sql writer
This commit is contained in:
@@ -18,7 +18,7 @@ type Database struct {
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
DatabaseType DatabaseType `json:"database_type,omitempty" yaml:"database_type,omitempty" xml:"database_type,omitempty"`
|
||||
DatabaseVersion string `json:"database_version,omitempty" yaml:"database_version,omitempty" xml:"database_version,omitempty"`
|
||||
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` //Source Format of the database.
|
||||
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
||||
}
|
||||
|
||||
// SQLNamer returns the database name in lowercase
|
||||
@@ -106,19 +106,19 @@ func (d *View) SQLName() string {
|
||||
|
||||
// Sequence represents a database sequence (auto-increment generator)
|
||||
type Sequence struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
||||
StartValue int64 `json:"start_value" yaml:"start_value" xml:"start_value"`
|
||||
MinValue int64 `json:"min_value,omitempty" yaml:"min_value,omitempty" xml:"min_value,omitempty"`
|
||||
MaxValue int64 `json:"max_value,omitempty" yaml:"max_value,omitempty" xml:"max_value,omitempty"`
|
||||
IncrementBy int64 `json:"increment_by" yaml:"increment_by" xml:"increment_by"`
|
||||
CacheSize int64 `json:"cache_size,omitempty" yaml:"cache_size,omitempty" xml:"cache_size,omitempty"`
|
||||
Cycle bool `json:"cycle" yaml:"cycle" xml:"cycle"`
|
||||
OwnedByTable string `json:"owned_by_table,omitempty" yaml:"owned_by_table,omitempty" xml:"owned_by_table,omitempty"`
|
||||
OwnedByColumn string `json:"owned_by_column,omitempty" yaml:"owned_by_column,omitempty" xml:"owned_by_column,omitempty"`
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
||||
StartValue int64 `json:"start_value" yaml:"start_value" xml:"start_value"`
|
||||
MinValue int64 `json:"min_value,omitempty" yaml:"min_value,omitempty" xml:"min_value,omitempty"`
|
||||
MaxValue int64 `json:"max_value,omitempty" yaml:"max_value,omitempty" xml:"max_value,omitempty"`
|
||||
IncrementBy int64 `json:"increment_by" yaml:"increment_by" xml:"increment_by"`
|
||||
CacheSize int64 `json:"cache_size,omitempty" yaml:"cache_size,omitempty" xml:"cache_size,omitempty"`
|
||||
Cycle bool `json:"cycle" yaml:"cycle" xml:"cycle"`
|
||||
OwnedByTable string `json:"owned_by_table,omitempty" yaml:"owned_by_table,omitempty" xml:"owned_by_table,omitempty"`
|
||||
OwnedByColumn string `json:"owned_by_column,omitempty" yaml:"owned_by_column,omitempty" xml:"owned_by_column,omitempty"`
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
RefSchema *Schema `json:"ref_schema,omitempty" yaml:"ref_schema,omitempty" xml:"ref_schema,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
@@ -67,6 +67,15 @@ func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
// stripQuotes removes surrounding quotes from an identifier
|
||||
func stripQuotes(s string) string {
|
||||
s = strings.TrimSpace(s)
|
||||
if len(s) >= 2 && ((s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'')) {
|
||||
return s[1 : len(s)-1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// parseDBML parses DBML content and returns a Database model
|
||||
func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||
db := models.InitDatabase("database")
|
||||
@@ -79,13 +88,14 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||
|
||||
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||
schemaMap := make(map[string]*models.Schema)
|
||||
pendingConstraints := []*models.Constraint{}
|
||||
|
||||
var currentTable *models.Table
|
||||
var currentSchema string
|
||||
var inIndexes bool
|
||||
var inTable bool
|
||||
|
||||
tableRegex := regexp.MustCompile(`^Table\s+([a-zA-Z0-9_.]+)\s*{`)
|
||||
tableRegex := regexp.MustCompile(`^Table\s+(.+?)\s*{`)
|
||||
refRegex := regexp.MustCompile(`^Ref:\s+(.+)`)
|
||||
|
||||
for scanner.Scan() {
|
||||
@@ -102,10 +112,11 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||
parts := strings.Split(tableName, ".")
|
||||
|
||||
if len(parts) == 2 {
|
||||
currentSchema = parts[0]
|
||||
tableName = parts[1]
|
||||
currentSchema = stripQuotes(parts[0])
|
||||
tableName = stripQuotes(parts[1])
|
||||
} else {
|
||||
currentSchema = "public"
|
||||
tableName = stripQuotes(parts[0])
|
||||
}
|
||||
|
||||
// Ensure schema exists
|
||||
@@ -131,7 +142,7 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||
}
|
||||
|
||||
// Parse indexes section
|
||||
if inTable && strings.HasPrefix(line, "indexes") {
|
||||
if inTable && (strings.HasPrefix(line, "Indexes {") || strings.HasPrefix(line, "indexes {")) {
|
||||
inIndexes = true
|
||||
continue
|
||||
}
|
||||
@@ -161,10 +172,14 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||
|
||||
// Parse column definition
|
||||
if inTable && !inIndexes && currentTable != nil {
|
||||
column := r.parseColumn(line, currentTable.Name, currentSchema)
|
||||
column, constraint := r.parseColumn(line, currentTable.Name, currentSchema)
|
||||
if column != nil {
|
||||
currentTable.Columns[column.Name] = column
|
||||
}
|
||||
if constraint != nil {
|
||||
// Add to pending list - will assign to tables at the end
|
||||
pendingConstraints = append(pendingConstraints, constraint)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -186,6 +201,19 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Assign pending constraints to their respective tables
|
||||
for _, constraint := range pendingConstraints {
|
||||
// Find the table this constraint belongs to
|
||||
if schema, exists := schemaMap[constraint.Schema]; exists {
|
||||
for _, table := range schema.Tables {
|
||||
if table.Name == constraint.Table {
|
||||
table.Constraints[constraint.Name] = constraint
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add schemas to database
|
||||
for _, schema := range schemaMap {
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
@@ -195,19 +223,21 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||
}
|
||||
|
||||
// parseColumn parses a DBML column definition
|
||||
func (r *Reader) parseColumn(line, tableName, schemaName string) *models.Column {
|
||||
func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column, *models.Constraint) {
|
||||
// Format: column_name type [attributes] // comment
|
||||
parts := strings.Fields(line)
|
||||
if len(parts) < 2 {
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
columnName := parts[0]
|
||||
columnType := parts[1]
|
||||
columnName := stripQuotes(parts[0])
|
||||
columnType := stripQuotes(parts[1])
|
||||
|
||||
column := models.InitColumn(columnName, tableName, schemaName)
|
||||
column.Type = columnType
|
||||
|
||||
var constraint *models.Constraint
|
||||
|
||||
// Parse attributes in brackets
|
||||
if strings.Contains(line, "[") && strings.Contains(line, "]") {
|
||||
attrStart := strings.Index(line, "[")
|
||||
@@ -230,7 +260,55 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) *models.Column
|
||||
defaultVal := strings.TrimSpace(strings.TrimPrefix(attr, "default:"))
|
||||
column.Default = strings.Trim(defaultVal, "'\"")
|
||||
} else if attr == "unique" {
|
||||
// Could create a unique constraint here
|
||||
// Create a unique constraint
|
||||
uniqueConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("uq_%s", columnName),
|
||||
models.UniqueConstraint,
|
||||
)
|
||||
uniqueConstraint.Schema = schemaName
|
||||
uniqueConstraint.Table = tableName
|
||||
uniqueConstraint.Columns = []string{columnName}
|
||||
// Store it to be added later
|
||||
if constraint == nil {
|
||||
constraint = uniqueConstraint
|
||||
}
|
||||
} else if strings.HasPrefix(attr, "ref:") {
|
||||
// Parse inline reference
|
||||
// DBML semantics depend on context:
|
||||
// - On FK column: ref: < target means "this FK references target"
|
||||
// - On PK column: ref: < source means "source references this PK" (reverse notation)
|
||||
refStr := strings.TrimSpace(strings.TrimPrefix(attr, "ref:"))
|
||||
|
||||
// Check relationship direction operator
|
||||
refOp := strings.TrimSpace(refStr)
|
||||
var isReverse bool
|
||||
if strings.HasPrefix(refOp, "<") {
|
||||
isReverse = column.IsPrimaryKey // < on PK means "is referenced by" (reverse)
|
||||
} else if strings.HasPrefix(refOp, ">") {
|
||||
isReverse = !column.IsPrimaryKey // > on FK means reverse
|
||||
}
|
||||
|
||||
constraint = r.parseRef(refStr)
|
||||
if constraint != nil {
|
||||
if isReverse {
|
||||
// Reverse: parsed ref is SOURCE, current column is TARGET
|
||||
// Constraint should be ON the source table
|
||||
constraint.Schema = constraint.ReferencedSchema
|
||||
constraint.Table = constraint.ReferencedTable
|
||||
constraint.Columns = constraint.ReferencedColumns
|
||||
constraint.ReferencedSchema = schemaName
|
||||
constraint.ReferencedTable = tableName
|
||||
constraint.ReferencedColumns = []string{columnName}
|
||||
} else {
|
||||
// Forward: current column is SOURCE, parsed ref is TARGET
|
||||
// Standard FK: constraint is ON current table
|
||||
constraint.Schema = schemaName
|
||||
constraint.Table = tableName
|
||||
constraint.Columns = []string{columnName}
|
||||
}
|
||||
// Generate short constraint name based on the column
|
||||
constraint.Name = fmt.Sprintf("fk_%s", constraint.Columns[0])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -242,28 +320,41 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) *models.Column
|
||||
column.Comment = strings.TrimSpace(line[commentStart+2:])
|
||||
}
|
||||
|
||||
return column
|
||||
return column, constraint
|
||||
}
|
||||
|
||||
// parseIndex parses a DBML index definition
|
||||
func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
|
||||
// Format: (columns) [attributes]
|
||||
if !strings.Contains(line, "(") || !strings.Contains(line, ")") {
|
||||
return nil
|
||||
// Format: (columns) [attributes] OR columnname [attributes]
|
||||
var columns []string
|
||||
|
||||
if strings.Contains(line, "(") && strings.Contains(line, ")") {
|
||||
// Multi-column format: (col1, col2) [attributes]
|
||||
colStart := strings.Index(line, "(")
|
||||
colEnd := strings.Index(line, ")")
|
||||
if colStart >= colEnd {
|
||||
return nil
|
||||
}
|
||||
|
||||
columnsStr := line[colStart+1 : colEnd]
|
||||
for _, col := range strings.Split(columnsStr, ",") {
|
||||
columns = append(columns, stripQuotes(strings.TrimSpace(col)))
|
||||
}
|
||||
} else {
|
||||
// Single column format: columnname [attributes]
|
||||
// Extract column name before the bracket
|
||||
if strings.Contains(line, "[") {
|
||||
colName := strings.TrimSpace(line[:strings.Index(line, "[")])
|
||||
if colName != "" {
|
||||
columns = []string{stripQuotes(colName)}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
colStart := strings.Index(line, "(")
|
||||
colEnd := strings.Index(line, ")")
|
||||
if colStart >= colEnd {
|
||||
if len(columns) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
columnsStr := line[colStart+1 : colEnd]
|
||||
columns := strings.Split(columnsStr, ",")
|
||||
for i := range columns {
|
||||
columns[i] = strings.TrimSpace(columns[i])
|
||||
}
|
||||
|
||||
index := models.InitIndex("")
|
||||
index.Table = tableName
|
||||
index.Schema = schemaName
|
||||
@@ -304,9 +395,11 @@ func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
|
||||
// parseRef parses a DBML Ref (foreign key relationship)
|
||||
func (r *Reader) parseRef(refStr string) *models.Constraint {
|
||||
// Format: schema.table.(columns) > schema.table.(columns) [actions]
|
||||
// Or inline format: < schema.table.column (for inline column refs)
|
||||
|
||||
// Split by relationship operator (>, <, -, etc.)
|
||||
var fromPart, toPart string
|
||||
isInlineRef := false
|
||||
|
||||
for _, op := range []string{">", "<", "-"} {
|
||||
if strings.Contains(refStr, op) {
|
||||
@@ -314,30 +407,53 @@ func (r *Reader) parseRef(refStr string) *models.Constraint {
|
||||
if len(parts) == 2 {
|
||||
fromPart = strings.TrimSpace(parts[0])
|
||||
toPart = strings.TrimSpace(parts[1])
|
||||
// Check if this is an inline ref (operator at start)
|
||||
if fromPart == "" {
|
||||
isInlineRef = true
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fromPart == "" || toPart == "" {
|
||||
// For inline refs, only toPart should be populated
|
||||
if isInlineRef {
|
||||
if toPart == "" {
|
||||
return nil
|
||||
}
|
||||
} else if fromPart == "" || toPart == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Remove actions part if present
|
||||
if strings.Contains(toPart, "[") {
|
||||
toPart = strings.TrimSpace(toPart[:strings.Index(toPart, "[")])
|
||||
if idx := strings.Index(toPart, "["); idx >= 0 {
|
||||
toPart = strings.TrimSpace(toPart[:idx])
|
||||
}
|
||||
|
||||
// Parse from table and column
|
||||
fromSchema, fromTable, fromColumns := r.parseTableRef(fromPart)
|
||||
// Parse references
|
||||
var fromSchema, fromTable string
|
||||
var fromColumns []string
|
||||
toSchema, toTable, toColumns := r.parseTableRef(toPart)
|
||||
|
||||
if fromTable == "" || toTable == "" {
|
||||
if !isInlineRef {
|
||||
fromSchema, fromTable, fromColumns = r.parseTableRef(fromPart)
|
||||
if fromTable == "" {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if toTable == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Generate short constraint name based on the source column
|
||||
constraintName := fmt.Sprintf("fk_%s_%s", fromTable, toTable)
|
||||
if len(fromColumns) > 0 {
|
||||
constraintName = fmt.Sprintf("fk_%s", fromColumns[0])
|
||||
}
|
||||
|
||||
constraint := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", fromTable, toTable),
|
||||
constraintName,
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
|
||||
@@ -371,29 +487,48 @@ func (r *Reader) parseRef(refStr string) *models.Constraint {
|
||||
return constraint
|
||||
}
|
||||
|
||||
// parseTableRef parses a table reference like "schema.table.(column1, column2)"
|
||||
// parseTableRef parses a table reference like "schema.table.(column1, column2)" or "schema"."table"."column"
|
||||
func (r *Reader) parseTableRef(ref string) (schema, table string, columns []string) {
|
||||
// Extract columns if present
|
||||
// Extract columns if present in parentheses format
|
||||
hasParentheses := false
|
||||
if strings.Contains(ref, "(") && strings.Contains(ref, ")") {
|
||||
colStart := strings.Index(ref, "(")
|
||||
colEnd := strings.Index(ref, ")")
|
||||
if colStart < colEnd {
|
||||
columnsStr := ref[colStart+1 : colEnd]
|
||||
for _, col := range strings.Split(columnsStr, ",") {
|
||||
columns = append(columns, strings.TrimSpace(col))
|
||||
columns = append(columns, stripQuotes(strings.TrimSpace(col)))
|
||||
}
|
||||
hasParentheses = true
|
||||
}
|
||||
ref = ref[:colStart]
|
||||
}
|
||||
|
||||
// Parse schema and table
|
||||
// Parse schema, table, and optionally column
|
||||
parts := strings.Split(strings.TrimSpace(ref), ".")
|
||||
if len(parts) == 2 {
|
||||
schema = parts[0]
|
||||
table = parts[1]
|
||||
if len(parts) == 3 {
|
||||
// Format: "schema"."table"."column"
|
||||
schema = stripQuotes(parts[0])
|
||||
table = stripQuotes(parts[1])
|
||||
if !hasParentheses {
|
||||
columns = []string{stripQuotes(parts[2])}
|
||||
}
|
||||
} else if len(parts) == 2 {
|
||||
// Could be "schema"."table" or "table"."column"
|
||||
// If columns are already extracted from parentheses, this is schema.table
|
||||
// If no parentheses, this is table.column
|
||||
if hasParentheses {
|
||||
schema = stripQuotes(parts[0])
|
||||
table = stripQuotes(parts[1])
|
||||
} else {
|
||||
schema = "public"
|
||||
table = stripQuotes(parts[0])
|
||||
columns = []string{stripQuotes(parts[1])}
|
||||
}
|
||||
} else if len(parts) == 1 {
|
||||
// Format: "table"
|
||||
schema = "public"
|
||||
table = parts[0]
|
||||
table = stripQuotes(parts[0])
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
@@ -80,14 +80,15 @@ func (r *Reader) convertToDatabase(dctx *DCTXDictionary) (*models.Database, erro
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
// Create GUID mappings for tables and keys
|
||||
tableGuidMap := make(map[string]string) // GUID -> table name
|
||||
keyGuidMap := make(map[string]*DCTXKey) // GUID -> key definition
|
||||
keyTableMap := make(map[string]string) // key GUID -> table name
|
||||
tableGuidMap := make(map[string]string) // GUID -> table name
|
||||
keyGuidMap := make(map[string]*DCTXKey) // GUID -> key definition
|
||||
keyTableMap := make(map[string]string) // key GUID -> table name
|
||||
fieldGuidMaps := make(map[string]map[string]string) // table name -> field GUID -> field name
|
||||
|
||||
// First pass: build GUID mappings
|
||||
for _, dctxTable := range dctx.Tables {
|
||||
if !r.hasSQLOption(&dctxTable) {
|
||||
for i := range dctx.Tables {
|
||||
dctxTable := &dctx.Tables[i]
|
||||
if !r.hasSQLOption(dctxTable) {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -102,12 +103,13 @@ func (r *Reader) convertToDatabase(dctx *DCTXDictionary) (*models.Database, erro
|
||||
}
|
||||
|
||||
// Process tables - only include tables with SQL option enabled
|
||||
for _, dctxTable := range dctx.Tables {
|
||||
if !r.hasSQLOption(&dctxTable) {
|
||||
for i := range dctx.Tables {
|
||||
dctxTable := &dctx.Tables[i]
|
||||
if !r.hasSQLOption(dctxTable) {
|
||||
continue
|
||||
}
|
||||
|
||||
table, fieldGuidMap, err := r.convertTable(&dctxTable)
|
||||
table, fieldGuidMap, err := r.convertTable(dctxTable)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to convert table %s: %w", dctxTable.Name, err)
|
||||
}
|
||||
@@ -116,7 +118,7 @@ func (r *Reader) convertToDatabase(dctx *DCTXDictionary) (*models.Database, erro
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
|
||||
// Process keys (indexes, primary keys)
|
||||
err = r.processKeys(&dctxTable, table, fieldGuidMap)
|
||||
err = r.processKeys(dctxTable, table, fieldGuidMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process keys for table %s: %w", dctxTable.Name, err)
|
||||
}
|
||||
@@ -208,7 +210,7 @@ func (r *Reader) convertField(dctxField *DCTXField, tableName string) ([]*models
|
||||
}
|
||||
|
||||
// mapDataType maps Clarion data types to SQL types
|
||||
func (r *Reader) mapDataType(clarionType string, size int) (string, int) {
|
||||
func (r *Reader) mapDataType(clarionType string, size int) (sqlType string, precision int) {
|
||||
switch strings.ToUpper(clarionType) {
|
||||
case "LONG":
|
||||
if size == 8 {
|
||||
@@ -360,7 +362,8 @@ func (r *Reader) convertKey(dctxKey *DCTXKey, table *models.Table, fieldGuidMap
|
||||
|
||||
// processRelations processes DCTX relations and creates foreign keys
|
||||
func (r *Reader) processRelations(dctx *DCTXDictionary, schema *models.Schema, tableGuidMap map[string]string, keyGuidMap map[string]*DCTXKey, fieldGuidMaps map[string]map[string]string) error {
|
||||
for _, relation := range dctx.Relations {
|
||||
for i := range dctx.Relations {
|
||||
relation := &dctx.Relations[i]
|
||||
// Get table names from GUIDs
|
||||
primaryTableName := tableGuidMap[relation.PrimaryTable]
|
||||
foreignTableName := tableGuidMap[relation.ForeignTable]
|
||||
|
||||
@@ -357,15 +357,15 @@ func (r *Reader) queryForeignKeys(schemaName string) (map[string][]*models.Const
|
||||
|
||||
// First pass: collect all FK data
|
||||
type fkData struct {
|
||||
schema string
|
||||
tableName string
|
||||
constraintName string
|
||||
foreignColumns []string
|
||||
referencedSchema string
|
||||
referencedTable string
|
||||
referencedColumns []string
|
||||
updateRule string
|
||||
deleteRule string
|
||||
schema string
|
||||
tableName string
|
||||
constraintName string
|
||||
foreignColumns []string
|
||||
referencedSchema string
|
||||
referencedTable string
|
||||
referencedColumns []string
|
||||
updateRule string
|
||||
deleteRule string
|
||||
}
|
||||
|
||||
fkMap := make(map[string]*fkData)
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
@@ -261,47 +262,47 @@ func (r *Reader) close() {
|
||||
func (r *Reader) mapDataType(pgType, udtName string) string {
|
||||
// Map common PostgreSQL types
|
||||
typeMap := map[string]string{
|
||||
"integer": "int",
|
||||
"bigint": "int64",
|
||||
"smallint": "int16",
|
||||
"int": "int",
|
||||
"int2": "int16",
|
||||
"int4": "int",
|
||||
"int8": "int64",
|
||||
"serial": "int",
|
||||
"bigserial": "int64",
|
||||
"smallserial": "int16",
|
||||
"numeric": "decimal",
|
||||
"decimal": "decimal",
|
||||
"real": "float32",
|
||||
"double precision": "float64",
|
||||
"float4": "float32",
|
||||
"float8": "float64",
|
||||
"money": "decimal",
|
||||
"character varying": "string",
|
||||
"varchar": "string",
|
||||
"character": "string",
|
||||
"char": "string",
|
||||
"text": "string",
|
||||
"boolean": "bool",
|
||||
"bool": "bool",
|
||||
"date": "date",
|
||||
"time": "time",
|
||||
"time without time zone": "time",
|
||||
"time with time zone": "timetz",
|
||||
"timestamp": "timestamp",
|
||||
"integer": "int",
|
||||
"bigint": "int64",
|
||||
"smallint": "int16",
|
||||
"int": "int",
|
||||
"int2": "int16",
|
||||
"int4": "int",
|
||||
"int8": "int64",
|
||||
"serial": "int",
|
||||
"bigserial": "int64",
|
||||
"smallserial": "int16",
|
||||
"numeric": "decimal",
|
||||
"decimal": "decimal",
|
||||
"real": "float32",
|
||||
"double precision": "float64",
|
||||
"float4": "float32",
|
||||
"float8": "float64",
|
||||
"money": "decimal",
|
||||
"character varying": "string",
|
||||
"varchar": "string",
|
||||
"character": "string",
|
||||
"char": "string",
|
||||
"text": "string",
|
||||
"boolean": "bool",
|
||||
"bool": "bool",
|
||||
"date": "date",
|
||||
"time": "time",
|
||||
"time without time zone": "time",
|
||||
"time with time zone": "timetz",
|
||||
"timestamp": "timestamp",
|
||||
"timestamp without time zone": "timestamp",
|
||||
"timestamp with time zone": "timestamptz",
|
||||
"timestamptz": "timestamptz",
|
||||
"interval": "interval",
|
||||
"uuid": "uuid",
|
||||
"json": "json",
|
||||
"jsonb": "jsonb",
|
||||
"bytea": "bytea",
|
||||
"inet": "inet",
|
||||
"cidr": "cidr",
|
||||
"macaddr": "macaddr",
|
||||
"xml": "xml",
|
||||
"timestamp with time zone": "timestamptz",
|
||||
"timestamptz": "timestamptz",
|
||||
"interval": "interval",
|
||||
"uuid": "uuid",
|
||||
"json": "json",
|
||||
"jsonb": "jsonb",
|
||||
"bytea": "bytea",
|
||||
"inet": "inet",
|
||||
"cidr": "cidr",
|
||||
"macaddr": "macaddr",
|
||||
"xml": "xml",
|
||||
}
|
||||
|
||||
// Try mapped type first
|
||||
|
||||
@@ -62,7 +62,7 @@ func PascalCaseToSnakeCase(s string) string {
|
||||
// Add underscore before uppercase letter if:
|
||||
// 1. Previous char was lowercase, OR
|
||||
// 2. Next char is lowercase (end of acronym)
|
||||
if !prevUpper || (nextUpper == false && i+1 < len(runes)) {
|
||||
if !prevUpper || (!nextUpper && i+1 < len(runes)) {
|
||||
result.WriteRune('_')
|
||||
}
|
||||
}
|
||||
@@ -84,20 +84,20 @@ func capitalize(s string) string {
|
||||
|
||||
// Handle common acronyms
|
||||
acronyms := map[string]bool{
|
||||
"ID": true,
|
||||
"UUID": true,
|
||||
"GUID": true,
|
||||
"URL": true,
|
||||
"URI": true,
|
||||
"HTTP": true,
|
||||
"ID": true,
|
||||
"UUID": true,
|
||||
"GUID": true,
|
||||
"URL": true,
|
||||
"URI": true,
|
||||
"HTTP": true,
|
||||
"HTTPS": true,
|
||||
"API": true,
|
||||
"JSON": true,
|
||||
"XML": true,
|
||||
"SQL": true,
|
||||
"HTML": true,
|
||||
"CSS": true,
|
||||
"RID": true,
|
||||
"API": true,
|
||||
"JSON": true,
|
||||
"XML": true,
|
||||
"SQL": true,
|
||||
"HTML": true,
|
||||
"CSS": true,
|
||||
"RID": true,
|
||||
}
|
||||
|
||||
if acronyms[upper] {
|
||||
@@ -146,8 +146,8 @@ func Pluralize(s string) string {
|
||||
|
||||
// Words ending in s, x, z, ch, sh
|
||||
if strings.HasSuffix(s, "s") || strings.HasSuffix(s, "x") ||
|
||||
strings.HasSuffix(s, "z") || strings.HasSuffix(s, "ch") ||
|
||||
strings.HasSuffix(s, "sh") {
|
||||
strings.HasSuffix(s, "z") || strings.HasSuffix(s, "ch") ||
|
||||
strings.HasSuffix(s, "sh") {
|
||||
return s + "es"
|
||||
}
|
||||
|
||||
@@ -220,8 +220,8 @@ func Singularize(s string) string {
|
||||
|
||||
// Words ending in ses, xes, zes, ches, shes
|
||||
if strings.HasSuffix(s, "ses") || strings.HasSuffix(s, "xes") ||
|
||||
strings.HasSuffix(s, "zes") || strings.HasSuffix(s, "ches") ||
|
||||
strings.HasSuffix(s, "shes") {
|
||||
strings.HasSuffix(s, "zes") || strings.HasSuffix(s, "ches") ||
|
||||
strings.HasSuffix(s, "shes") {
|
||||
return s[:len(s)-2]
|
||||
}
|
||||
|
||||
|
||||
@@ -17,17 +17,17 @@ type TemplateData struct {
|
||||
|
||||
// ModelData represents a single model/struct in the template
|
||||
type ModelData struct {
|
||||
Name string
|
||||
TableName string // schema.table format
|
||||
SchemaName string
|
||||
TableNameOnly string // just table name without schema
|
||||
Comment string
|
||||
Fields []*FieldData
|
||||
Config *MethodConfig
|
||||
PrimaryKeyField string // Name of the primary key field
|
||||
PrimaryKeyIsSQL bool // Whether PK uses SQL type (needs .Int64() call)
|
||||
IDColumnName string // Name of the ID column in database
|
||||
Prefix string // 3-letter prefix
|
||||
Name string
|
||||
TableName string // schema.table format
|
||||
SchemaName string
|
||||
TableNameOnly string // just table name without schema
|
||||
Comment string
|
||||
Fields []*FieldData
|
||||
Config *MethodConfig
|
||||
PrimaryKeyField string // Name of the primary key field
|
||||
PrimaryKeyIsSQL bool // Whether PK uses SQL type (needs .Int64() call)
|
||||
IDColumnName string // Name of the ID column in database
|
||||
Prefix string // 3-letter prefix
|
||||
}
|
||||
|
||||
// FieldData represents a single field in a struct
|
||||
|
||||
@@ -91,6 +91,7 @@ func (w *Writer) databaseToDrawDB(d *models.Database) *DrawDBSchema {
|
||||
note += "\n"
|
||||
}
|
||||
note += schemaModel.Comment
|
||||
_ = note // TODO: Add note/description field to DrawDBArea when supported
|
||||
|
||||
area := &DrawDBArea{
|
||||
ID: areaID,
|
||||
@@ -242,12 +243,12 @@ func (w *Writer) tableToDrawDB(table *models.Table) *DrawDBSchema {
|
||||
}
|
||||
|
||||
// convertTableToDrawDB converts a table to DrawDB format and returns the table and next field ID
|
||||
func (w *Writer) convertTableToDrawDB(table *models.Table, schemaName string, tableID, fieldID, tableIndex, tablesPerRow, gridX, gridY, colWidth, rowHeight, colorIndex int) (*DrawDBTable, int) {
|
||||
func (w *Writer) convertTableToDrawDB(table *models.Table, schemaName string, tableID, fieldID, tableIndex, tablesPerRow, gridX, gridY, colWidth, rowHeight, colorIndex int) (drawTable *DrawDBTable, nextFieldID int) {
|
||||
// Calculate position
|
||||
x := gridX + (tableIndex%tablesPerRow)*colWidth
|
||||
y := gridY + (tableIndex/tablesPerRow)*rowHeight
|
||||
|
||||
drawTable := &DrawDBTable{
|
||||
drawTable = &DrawDBTable{
|
||||
ID: tableID,
|
||||
Name: table.Name,
|
||||
Schema: schemaName,
|
||||
|
||||
@@ -62,7 +62,7 @@ func PascalCaseToSnakeCase(s string) string {
|
||||
// Add underscore before uppercase letter if:
|
||||
// 1. Previous char was lowercase, OR
|
||||
// 2. Next char is lowercase (end of acronym)
|
||||
if !prevUpper || (nextUpper == false && i+1 < len(runes)) {
|
||||
if !prevUpper || (!nextUpper && i+1 < len(runes)) {
|
||||
result.WriteRune('_')
|
||||
}
|
||||
}
|
||||
@@ -84,20 +84,20 @@ func capitalize(s string) string {
|
||||
|
||||
// Handle common acronyms
|
||||
acronyms := map[string]bool{
|
||||
"ID": true,
|
||||
"UUID": true,
|
||||
"GUID": true,
|
||||
"URL": true,
|
||||
"URI": true,
|
||||
"HTTP": true,
|
||||
"ID": true,
|
||||
"UUID": true,
|
||||
"GUID": true,
|
||||
"URL": true,
|
||||
"URI": true,
|
||||
"HTTP": true,
|
||||
"HTTPS": true,
|
||||
"API": true,
|
||||
"JSON": true,
|
||||
"XML": true,
|
||||
"SQL": true,
|
||||
"HTML": true,
|
||||
"CSS": true,
|
||||
"RID": true,
|
||||
"API": true,
|
||||
"JSON": true,
|
||||
"XML": true,
|
||||
"SQL": true,
|
||||
"HTML": true,
|
||||
"CSS": true,
|
||||
"RID": true,
|
||||
}
|
||||
|
||||
if acronyms[upper] {
|
||||
@@ -146,8 +146,8 @@ func Pluralize(s string) string {
|
||||
|
||||
// Words ending in s, x, z, ch, sh
|
||||
if strings.HasSuffix(s, "s") || strings.HasSuffix(s, "x") ||
|
||||
strings.HasSuffix(s, "z") || strings.HasSuffix(s, "ch") ||
|
||||
strings.HasSuffix(s, "sh") {
|
||||
strings.HasSuffix(s, "z") || strings.HasSuffix(s, "ch") ||
|
||||
strings.HasSuffix(s, "sh") {
|
||||
return s + "es"
|
||||
}
|
||||
|
||||
@@ -220,8 +220,8 @@ func Singularize(s string) string {
|
||||
|
||||
// Words ending in ses, xes, zes, ches, shes
|
||||
if strings.HasSuffix(s, "ses") || strings.HasSuffix(s, "xes") ||
|
||||
strings.HasSuffix(s, "zes") || strings.HasSuffix(s, "ches") ||
|
||||
strings.HasSuffix(s, "shes") {
|
||||
strings.HasSuffix(s, "zes") || strings.HasSuffix(s, "ches") ||
|
||||
strings.HasSuffix(s, "shes") {
|
||||
return s[:len(s)-2]
|
||||
}
|
||||
|
||||
|
||||
@@ -17,15 +17,15 @@ type TemplateData struct {
|
||||
// ModelData represents a single model/struct in the template
|
||||
type ModelData struct {
|
||||
Name string
|
||||
TableName string // schema.table format
|
||||
TableName string // schema.table format
|
||||
SchemaName string
|
||||
TableNameOnly string // just table name without schema
|
||||
TableNameOnly string // just table name without schema
|
||||
Comment string
|
||||
Fields []*FieldData
|
||||
Config *MethodConfig
|
||||
PrimaryKeyField string // Name of the primary key field
|
||||
IDColumnName string // Name of the ID column in database
|
||||
Prefix string // 3-letter prefix
|
||||
PrimaryKeyField string // Name of the primary key field
|
||||
IDColumnName string // Name of the ID column in database
|
||||
Prefix string // 3-letter prefix
|
||||
}
|
||||
|
||||
// FieldData represents a single field in a struct
|
||||
|
||||
@@ -52,24 +52,24 @@ func (tm *TypeMapper) extractBaseType(sqlType string) string {
|
||||
func (tm *TypeMapper) baseGoType(sqlType string) string {
|
||||
typeMap := map[string]string{
|
||||
// Integer types
|
||||
"integer": "int32",
|
||||
"int": "int32",
|
||||
"int4": "int32",
|
||||
"smallint": "int16",
|
||||
"int2": "int16",
|
||||
"bigint": "int64",
|
||||
"int8": "int64",
|
||||
"serial": "int32",
|
||||
"bigserial": "int64",
|
||||
"integer": "int32",
|
||||
"int": "int32",
|
||||
"int4": "int32",
|
||||
"smallint": "int16",
|
||||
"int2": "int16",
|
||||
"bigint": "int64",
|
||||
"int8": "int64",
|
||||
"serial": "int32",
|
||||
"bigserial": "int64",
|
||||
"smallserial": "int16",
|
||||
|
||||
// String types
|
||||
"text": "string",
|
||||
"varchar": "string",
|
||||
"char": "string",
|
||||
"character": "string",
|
||||
"citext": "string",
|
||||
"bpchar": "string",
|
||||
"text": "string",
|
||||
"varchar": "string",
|
||||
"char": "string",
|
||||
"character": "string",
|
||||
"citext": "string",
|
||||
"bpchar": "string",
|
||||
|
||||
// Boolean
|
||||
"boolean": "bool",
|
||||
@@ -84,15 +84,15 @@ func (tm *TypeMapper) baseGoType(sqlType string) string {
|
||||
"decimal": "float64",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": "time.Time",
|
||||
"timestamp without time zone": "time.Time",
|
||||
"timestamp with time zone": "time.Time",
|
||||
"timestamptz": "time.Time",
|
||||
"date": "time.Time",
|
||||
"time": "time.Time",
|
||||
"time without time zone": "time.Time",
|
||||
"time with time zone": "time.Time",
|
||||
"timetz": "time.Time",
|
||||
"timestamp": "time.Time",
|
||||
"timestamp without time zone": "time.Time",
|
||||
"timestamp with time zone": "time.Time",
|
||||
"timestamptz": "time.Time",
|
||||
"date": "time.Time",
|
||||
"time": "time.Time",
|
||||
"time without time zone": "time.Time",
|
||||
"time with time zone": "time.Time",
|
||||
"timetz": "time.Time",
|
||||
|
||||
// Binary
|
||||
"bytea": "[]byte",
|
||||
@@ -105,8 +105,8 @@ func (tm *TypeMapper) baseGoType(sqlType string) string {
|
||||
"jsonb": "string",
|
||||
|
||||
// Network
|
||||
"inet": "string",
|
||||
"cidr": "string",
|
||||
"inet": "string",
|
||||
"cidr": "string",
|
||||
"macaddr": "string",
|
||||
|
||||
// Other
|
||||
@@ -125,24 +125,24 @@ func (tm *TypeMapper) baseGoType(sqlType string) string {
|
||||
func (tm *TypeMapper) nullableGoType(sqlType string) string {
|
||||
typeMap := map[string]string{
|
||||
// Integer types
|
||||
"integer": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"int": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"int4": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"smallint": tm.sqlTypesAlias + ".SqlInt16",
|
||||
"int2": tm.sqlTypesAlias + ".SqlInt16",
|
||||
"bigint": tm.sqlTypesAlias + ".SqlInt64",
|
||||
"int8": tm.sqlTypesAlias + ".SqlInt64",
|
||||
"serial": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"bigserial": tm.sqlTypesAlias + ".SqlInt64",
|
||||
"integer": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"int": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"int4": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"smallint": tm.sqlTypesAlias + ".SqlInt16",
|
||||
"int2": tm.sqlTypesAlias + ".SqlInt16",
|
||||
"bigint": tm.sqlTypesAlias + ".SqlInt64",
|
||||
"int8": tm.sqlTypesAlias + ".SqlInt64",
|
||||
"serial": tm.sqlTypesAlias + ".SqlInt32",
|
||||
"bigserial": tm.sqlTypesAlias + ".SqlInt64",
|
||||
"smallserial": tm.sqlTypesAlias + ".SqlInt16",
|
||||
|
||||
// String types
|
||||
"text": tm.sqlTypesAlias + ".SqlString",
|
||||
"varchar": tm.sqlTypesAlias + ".SqlString",
|
||||
"char": tm.sqlTypesAlias + ".SqlString",
|
||||
"character": tm.sqlTypesAlias + ".SqlString",
|
||||
"citext": tm.sqlTypesAlias + ".SqlString",
|
||||
"bpchar": tm.sqlTypesAlias + ".SqlString",
|
||||
"text": tm.sqlTypesAlias + ".SqlString",
|
||||
"varchar": tm.sqlTypesAlias + ".SqlString",
|
||||
"char": tm.sqlTypesAlias + ".SqlString",
|
||||
"character": tm.sqlTypesAlias + ".SqlString",
|
||||
"citext": tm.sqlTypesAlias + ".SqlString",
|
||||
"bpchar": tm.sqlTypesAlias + ".SqlString",
|
||||
|
||||
// Boolean
|
||||
"boolean": tm.sqlTypesAlias + ".SqlBool",
|
||||
@@ -157,15 +157,15 @@ func (tm *TypeMapper) nullableGoType(sqlType string) string {
|
||||
"decimal": tm.sqlTypesAlias + ".SqlFloat64",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timestamp without time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timestamp with time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timestamptz": tm.sqlTypesAlias + ".SqlTime",
|
||||
"date": tm.sqlTypesAlias + ".SqlDate",
|
||||
"time": tm.sqlTypesAlias + ".SqlTime",
|
||||
"time without time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"time with time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timetz": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timestamp": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timestamp without time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timestamp with time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timestamptz": tm.sqlTypesAlias + ".SqlTime",
|
||||
"date": tm.sqlTypesAlias + ".SqlDate",
|
||||
"time": tm.sqlTypesAlias + ".SqlTime",
|
||||
"time without time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"time with time zone": tm.sqlTypesAlias + ".SqlTime",
|
||||
"timetz": tm.sqlTypesAlias + ".SqlTime",
|
||||
|
||||
// Binary
|
||||
"bytea": "[]byte", // No nullable version needed
|
||||
@@ -178,8 +178,8 @@ func (tm *TypeMapper) nullableGoType(sqlType string) string {
|
||||
"jsonb": tm.sqlTypesAlias + ".SqlString",
|
||||
|
||||
// Network
|
||||
"inet": tm.sqlTypesAlias + ".SqlString",
|
||||
"cidr": tm.sqlTypesAlias + ".SqlString",
|
||||
"inet": tm.sqlTypesAlias + ".SqlString",
|
||||
"cidr": tm.sqlTypesAlias + ".SqlString",
|
||||
"macaddr": tm.sqlTypesAlias + ".SqlString",
|
||||
|
||||
// Other
|
||||
|
||||
@@ -1,258 +0,0 @@
|
||||
# PostgreSQL Migration Writer
|
||||
|
||||
## Overview
|
||||
|
||||
The PostgreSQL Migration Writer implements database schema inspection and differential migration generation, following the same approach as the `pgsql_meta_upgrade` migration system. It compares a desired model (target schema) against the current database state and generates the necessary SQL migration scripts.
|
||||
|
||||
## Migration Phases
|
||||
|
||||
The migration writer follows a phased approach with specific priorities to ensure proper execution order:
|
||||
|
||||
### Phase 1: Drops (Priority 11-50)
|
||||
- Drop changed constraints (Priority 11)
|
||||
- Drop changed indexes (Priority 20)
|
||||
- Drop changed foreign keys (Priority 50)
|
||||
|
||||
### Phase 2: Renames (Priority 60-90)
|
||||
- Rename tables (Priority 60)
|
||||
- Rename columns (Priority 90)
|
||||
- *Note: Currently requires manual handling or metadata for rename detection*
|
||||
|
||||
### Phase 3: Tables & Columns (Priority 100-145)
|
||||
- Create new tables (Priority 100)
|
||||
- Add new columns (Priority 120)
|
||||
- Alter column types (Priority 120)
|
||||
- Alter column defaults (Priority 145)
|
||||
|
||||
### Phase 4: Indexes (Priority 160-180)
|
||||
- Create primary keys (Priority 160)
|
||||
- Create indexes (Priority 180)
|
||||
|
||||
### Phase 5: Foreign Keys (Priority 195)
|
||||
- Create foreign key constraints
|
||||
|
||||
### Phase 6: Comments (Priority 200+)
|
||||
- Add table and column comments
|
||||
|
||||
## Usage
|
||||
|
||||
### 1. Inspect Current Database
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
)
|
||||
|
||||
// Create reader with connection string
|
||||
options := &readers.ReaderOptions{
|
||||
ConnectionString: "host=localhost port=5432 dbname=mydb user=postgres password=secret",
|
||||
}
|
||||
|
||||
reader := pgsql.NewReader(options)
|
||||
|
||||
// Read current database state
|
||||
currentDB, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Define Desired Model
|
||||
|
||||
```go
|
||||
import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
|
||||
// Create desired model (could be loaded from DBML, JSON, etc.)
|
||||
modelDB := models.InitDatabase("mydb")
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
// Define table
|
||||
table := models.InitTable("users", "public")
|
||||
table.Description = "User accounts"
|
||||
|
||||
// Add columns
|
||||
idCol := models.InitColumn("id", "users", "public")
|
||||
idCol.Type = "integer"
|
||||
idCol.NotNull = true
|
||||
idCol.IsPrimaryKey = true
|
||||
table.Columns["id"] = idCol
|
||||
|
||||
nameCol := models.InitColumn("name", "users", "public")
|
||||
nameCol.Type = "text"
|
||||
nameCol.NotNull = true
|
||||
table.Columns["name"] = nameCol
|
||||
|
||||
emailCol := models.InitColumn("email", "users", "public")
|
||||
emailCol.Type = "text"
|
||||
table.Columns["email"] = emailCol
|
||||
|
||||
// Add primary key constraint
|
||||
pkConstraint := &models.Constraint{
|
||||
Name: "pk_users",
|
||||
Type: models.PrimaryKeyConstraint,
|
||||
Columns: []string{"id"},
|
||||
}
|
||||
table.Constraints["pk_users"] = pkConstraint
|
||||
|
||||
// Add unique index
|
||||
emailIndex := &models.Index{
|
||||
Name: "uk_users_email",
|
||||
Unique: true,
|
||||
Columns: []string{"email"},
|
||||
}
|
||||
table.Indexes["uk_users_email"] = emailIndex
|
||||
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
modelDB.Schemas = append(modelDB.Schemas, schema)
|
||||
```
|
||||
|
||||
### 3. Generate Migration
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||
)
|
||||
|
||||
// Create migration writer
|
||||
writerOptions := &writers.WriterOptions{
|
||||
OutputPath: "migration_001.sql",
|
||||
}
|
||||
|
||||
migrationWriter := pgsql.NewMigrationWriter(writerOptions)
|
||||
|
||||
// Generate migration comparing model vs current
|
||||
err = migrationWriter.WriteMigration(modelDB, currentDB)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
```
|
||||
|
||||
## Example Migration Output
|
||||
|
||||
```sql
|
||||
-- PostgreSQL Migration Script
|
||||
-- Generated by RelSpec
|
||||
-- Source: mydb -> mydb
|
||||
|
||||
-- Priority: 11 | Type: drop constraint | Object: public.users.old_constraint
|
||||
ALTER TABLE public.users DROP CONSTRAINT IF EXISTS old_constraint;
|
||||
|
||||
-- Priority: 100 | Type: create table | Object: public.orders
|
||||
CREATE TABLE IF NOT EXISTS public.orders (
|
||||
id integer NOT NULL,
|
||||
user_id integer,
|
||||
total numeric(10,2) DEFAULT 0.00,
|
||||
created_at timestamp DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Priority: 120 | Type: create column | Object: public.users.phone
|
||||
ALTER TABLE public.users
|
||||
ADD COLUMN IF NOT EXISTS phone text;
|
||||
|
||||
-- Priority: 120 | Type: alter column type | Object: public.users.age
|
||||
ALTER TABLE public.users
|
||||
ALTER COLUMN age TYPE integer;
|
||||
|
||||
-- Priority: 160 | Type: create primary key | Object: public.orders.pk_orders
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.table_constraints
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'orders'
|
||||
AND constraint_name = 'pk_orders'
|
||||
) THEN
|
||||
ALTER TABLE public.orders
|
||||
ADD CONSTRAINT pk_orders PRIMARY KEY (id);
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Priority: 180 | Type: create index | Object: public.users.idx_users_email
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email
|
||||
ON public.users USING btree (email);
|
||||
|
||||
-- Priority: 195 | Type: create foreign key | Object: public.orders.fk_orders_users
|
||||
ALTER TABLE public.orders
|
||||
DROP CONSTRAINT IF EXISTS fk_orders_users;
|
||||
|
||||
ALTER TABLE public.orders
|
||||
ADD CONSTRAINT fk_orders_users
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES public.users (id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE
|
||||
DEFERRABLE;
|
||||
|
||||
-- Priority: 200 | Type: comment on table | Object: public.users
|
||||
COMMENT ON TABLE public.users IS 'User accounts';
|
||||
|
||||
-- Priority: 200 | Type: comment on column | Object: public.users.email
|
||||
COMMENT ON COLUMN public.users.email IS 'User email address';
|
||||
```
|
||||
|
||||
## Migration Script Structure
|
||||
|
||||
Each migration script includes:
|
||||
|
||||
- **ObjectName**: Fully qualified name of the object being modified
|
||||
- **ObjectType**: Type of operation (create table, alter column, etc.)
|
||||
- **Schema**: Schema name
|
||||
- **Priority**: Execution order priority (lower runs first)
|
||||
- **Sequence**: Sub-ordering within same priority
|
||||
- **Body**: The actual SQL statement
|
||||
|
||||
## Comparison Logic
|
||||
|
||||
The migration writer compares objects using:
|
||||
|
||||
### Tables
|
||||
- Existence check by name (case-insensitive)
|
||||
- New tables generate CREATE TABLE statements
|
||||
|
||||
### Columns
|
||||
- Existence check within tables
|
||||
- Type changes generate ALTER COLUMN TYPE
|
||||
- Default value changes generate SET/DROP DEFAULT
|
||||
- New columns generate ADD COLUMN
|
||||
|
||||
### Constraints
|
||||
- Compared by type, columns, and referenced objects
|
||||
- Changed constraints are dropped and recreated
|
||||
|
||||
### Indexes
|
||||
- Compared by uniqueness and column list
|
||||
- Changed indexes are dropped and recreated
|
||||
|
||||
### Foreign Keys
|
||||
- Compared by columns, referenced table/columns, and actions
|
||||
- Changed foreign keys are dropped and recreated
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Always Review Generated Migrations**: Manually review SQL before execution
|
||||
2. **Test on Non-Production First**: Apply migrations to development/staging environments first
|
||||
3. **Backup Before Migration**: Create database backup before running migrations
|
||||
4. **Use Transactions**: Wrap migrations in transactions when possible
|
||||
5. **Handle Renames Carefully**: Column/table renames may appear as DROP + CREATE without metadata
|
||||
6. **Consider Data Migration**: Generated SQL handles structure only; data migration may be needed
|
||||
|
||||
## Limitations
|
||||
|
||||
1. **Rename Detection**: Automatic rename detection not implemented; requires GUID or metadata matching
|
||||
2. **Data Type Conversions**: Some type changes may require custom USING clauses
|
||||
3. **Complex Constraints**: CHECK constraints with complex expressions may need manual handling
|
||||
4. **Sequence Values**: Current sequence values not automatically synced
|
||||
5. **Permissions**: Schema and object permissions not included in migrations
|
||||
|
||||
## Integration with Migration System
|
||||
|
||||
This implementation follows the same logic as the SQL migration system in `examples/pgsql_meta_upgrade`:
|
||||
|
||||
- `migration_inspect.sql` → Reader (pkg/readers/pgsql)
|
||||
- `migration_build.sql` → MigrationWriter (pkg/writers/pgsql)
|
||||
- `migration_run.sql` → External execution (psql, application code)
|
||||
|
||||
The phases, priorities, and script generation logic match the original migration system to ensure compatibility and consistency.
|
||||
696
pkg/writers/pgsql/TEMPLATES.md
Normal file
696
pkg/writers/pgsql/TEMPLATES.md
Normal file
@@ -0,0 +1,696 @@
|
||||
# PostgreSQL Migration Templates
|
||||
|
||||
## Overview
|
||||
|
||||
The PostgreSQL migration writer uses Go text templates to generate SQL, making the code much more maintainable and customizable than hardcoded string concatenation.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
pkg/writers/pgsql/
|
||||
├── templates/ # Template files
|
||||
│ ├── create_table.tmpl # CREATE TABLE
|
||||
│ ├── add_column.tmpl # ALTER TABLE ADD COLUMN
|
||||
│ ├── alter_column_type.tmpl # ALTER TABLE ALTER COLUMN TYPE
|
||||
│ ├── alter_column_default.tmpl # ALTER TABLE ALTER COLUMN DEFAULT
|
||||
│ ├── create_primary_key.tmpl # ADD CONSTRAINT PRIMARY KEY
|
||||
│ ├── create_index.tmpl # CREATE INDEX
|
||||
│ ├── create_foreign_key.tmpl # ADD CONSTRAINT FOREIGN KEY
|
||||
│ ├── drop_constraint.tmpl # DROP CONSTRAINT
|
||||
│ ├── drop_index.tmpl # DROP INDEX
|
||||
│ ├── comment_table.tmpl # COMMENT ON TABLE
|
||||
│ ├── comment_column.tmpl # COMMENT ON COLUMN
|
||||
│ ├── audit_tables.tmpl # CREATE audit tables
|
||||
│ ├── audit_function.tmpl # CREATE audit function
|
||||
│ └── audit_trigger.tmpl # CREATE audit trigger
|
||||
├── templates.go # Template executor and data structures
|
||||
└── migration_writer_templated.go # Templated migration writer
|
||||
```
|
||||
|
||||
## Using Templates
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```go
|
||||
// Create template executor
|
||||
executor, err := pgsql.NewTemplateExecutor()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Prepare data
|
||||
data := pgsql.CreateTableData{
|
||||
SchemaName: "public",
|
||||
TableName: "users",
|
||||
Columns: []pgsql.ColumnData{
|
||||
{Name: "id", Type: "integer", NotNull: true},
|
||||
{Name: "name", Type: "text"},
|
||||
},
|
||||
}
|
||||
|
||||
// Execute template
|
||||
sql, err := executor.ExecuteCreateTable(data)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(sql)
|
||||
```
|
||||
|
||||
### Using Templated Migration Writer
|
||||
|
||||
```go
|
||||
// Create templated migration writer
|
||||
writer, err := pgsql.NewTemplatedMigrationWriter(&writers.WriterOptions{
|
||||
OutputPath: "migration.sql",
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Generate migration (uses templates internally)
|
||||
err = writer.WriteMigration(modelDB, currentDB)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
```
|
||||
|
||||
## Template Data Structures
|
||||
|
||||
### CreateTableData
|
||||
|
||||
For `create_table.tmpl`:
|
||||
|
||||
```go
|
||||
type CreateTableData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
Columns []ColumnData
|
||||
}
|
||||
|
||||
type ColumnData struct {
|
||||
Name string
|
||||
Type string
|
||||
Default string
|
||||
NotNull bool
|
||||
}
|
||||
```
|
||||
|
||||
Example:
|
||||
```go
|
||||
data := CreateTableData{
|
||||
SchemaName: "public",
|
||||
TableName: "products",
|
||||
Columns: []ColumnData{
|
||||
{Name: "id", Type: "serial", NotNull: true},
|
||||
{Name: "name", Type: "text", NotNull: true},
|
||||
{Name: "price", Type: "numeric(10,2)", Default: "0.00"},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### AddColumnData
|
||||
|
||||
For `add_column.tmpl`:
|
||||
|
||||
```go
|
||||
type AddColumnData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ColumnName string
|
||||
ColumnType string
|
||||
Default string
|
||||
NotNull bool
|
||||
}
|
||||
```
|
||||
|
||||
### CreateIndexData
|
||||
|
||||
For `create_index.tmpl`:
|
||||
|
||||
```go
|
||||
type CreateIndexData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
IndexName string
|
||||
IndexType string // btree, hash, gin, gist
|
||||
Columns string // comma-separated
|
||||
Unique bool
|
||||
}
|
||||
```
|
||||
|
||||
### CreateForeignKeyData
|
||||
|
||||
For `create_foreign_key.tmpl`:
|
||||
|
||||
```go
|
||||
type CreateForeignKeyData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ConstraintName string
|
||||
SourceColumns string // comma-separated
|
||||
TargetSchema string
|
||||
TargetTable string
|
||||
TargetColumns string // comma-separated
|
||||
OnDelete string // CASCADE, SET NULL, etc.
|
||||
OnUpdate string
|
||||
}
|
||||
```
|
||||
|
||||
### AuditFunctionData
|
||||
|
||||
For `audit_function.tmpl`:
|
||||
|
||||
```go
|
||||
type AuditFunctionData struct {
|
||||
SchemaName string
|
||||
FunctionName string
|
||||
TableName string
|
||||
TablePrefix string
|
||||
PrimaryKey string
|
||||
AuditSchema string
|
||||
UserFunction string
|
||||
AuditInsert bool
|
||||
AuditUpdate bool
|
||||
AuditDelete bool
|
||||
UpdateCondition string
|
||||
UpdateColumns []AuditColumnData
|
||||
DeleteColumns []AuditColumnData
|
||||
}
|
||||
|
||||
type AuditColumnData struct {
|
||||
Name string
|
||||
OldValue string // SQL expression for old value
|
||||
NewValue string // SQL expression for new value
|
||||
}
|
||||
```
|
||||
|
||||
## Customizing Templates
|
||||
|
||||
### Modifying Existing Templates
|
||||
|
||||
Templates are embedded in the binary but can be modified at compile time:
|
||||
|
||||
1. **Edit template file** in `pkg/writers/pgsql/templates/`:
|
||||
|
||||
```go
|
||||
// templates/create_table.tmpl
|
||||
CREATE TABLE IF NOT EXISTS {{.SchemaName}}.{{.TableName}} (
|
||||
{{- range $i, $col := .Columns}}
|
||||
{{- if $i}},{{end}}
|
||||
{{$col.Name}} {{$col.Type}}
|
||||
{{- if $col.Default}} DEFAULT {{$col.Default}}{{end}}
|
||||
{{- if $col.NotNull}} NOT NULL{{end}}
|
||||
{{- end}}
|
||||
);
|
||||
|
||||
-- Custom comment
|
||||
COMMENT ON TABLE {{.SchemaName}}.{{.TableName}} IS 'Auto-generated by RelSpec';
|
||||
```
|
||||
|
||||
2. **Rebuild** the application:
|
||||
|
||||
```bash
|
||||
go build ./cmd/relspec
|
||||
```
|
||||
|
||||
The new template is automatically embedded.
|
||||
|
||||
### Template Syntax Reference
|
||||
|
||||
#### Variables
|
||||
|
||||
```go
|
||||
{{.FieldName}} // Access field
|
||||
{{.SchemaName}} // String field
|
||||
{{.NotNull}} // Boolean field
|
||||
```
|
||||
|
||||
#### Conditionals
|
||||
|
||||
```go
|
||||
{{if .NotNull}}
|
||||
NOT NULL
|
||||
{{end}}
|
||||
|
||||
{{if .Default}}
|
||||
DEFAULT {{.Default}}
|
||||
{{else}}
|
||||
-- No default
|
||||
{{end}}
|
||||
```
|
||||
|
||||
#### Loops
|
||||
|
||||
```go
|
||||
{{range $i, $col := .Columns}}
|
||||
Column: {{$col.Name}} Type: {{$col.Type}}
|
||||
{{end}}
|
||||
```
|
||||
|
||||
#### Functions
|
||||
|
||||
```go
|
||||
{{if eq .Type "CASCADE"}}
|
||||
ON DELETE CASCADE
|
||||
{{end}}
|
||||
|
||||
{{join .Columns ", "}} // Join string slice
|
||||
```
|
||||
|
||||
### Creating New Templates
|
||||
|
||||
1. **Create template file** in `pkg/writers/pgsql/templates/`:
|
||||
|
||||
```go
|
||||
// templates/custom_operation.tmpl
|
||||
-- Custom operation for {{.TableName}}
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
{{.CustomOperation}};
|
||||
```
|
||||
|
||||
2. **Define data structure** in `templates.go`:
|
||||
|
||||
```go
|
||||
type CustomOperationData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
CustomOperation string
|
||||
}
|
||||
```
|
||||
|
||||
3. **Add executor method** in `templates.go`:
|
||||
|
||||
```go
|
||||
func (te *TemplateExecutor) ExecuteCustomOperation(data CustomOperationData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "custom_operation.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute custom_operation template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
```
|
||||
|
||||
4. **Use in migration writer**:
|
||||
|
||||
```go
|
||||
sql, err := w.executor.ExecuteCustomOperation(CustomOperationData{
|
||||
SchemaName: "public",
|
||||
TableName: "users",
|
||||
CustomOperation: "ADD COLUMN custom_field text",
|
||||
})
|
||||
```
|
||||
|
||||
## Template Examples
|
||||
|
||||
### Example 1: Custom Table Creation
|
||||
|
||||
Modify `create_table.tmpl` to add table options:
|
||||
|
||||
```sql
|
||||
CREATE TABLE IF NOT EXISTS {{.SchemaName}}.{{.TableName}} (
|
||||
{{- range $i, $col := .Columns}}
|
||||
{{- if $i}},{{end}}
|
||||
{{$col.Name}} {{$col.Type}}
|
||||
{{- if $col.Default}} DEFAULT {{$col.Default}}{{end}}
|
||||
{{- if $col.NotNull}} NOT NULL{{end}}
|
||||
{{- end}}
|
||||
) WITH (fillfactor = 90);
|
||||
|
||||
-- Add automatic comment
|
||||
COMMENT ON TABLE {{.SchemaName}}.{{.TableName}}
|
||||
IS 'Created: {{.CreatedDate}} | Version: {{.Version}}';
|
||||
```
|
||||
|
||||
### Example 2: Custom Index with WHERE Clause
|
||||
|
||||
Add to `create_index.tmpl`:
|
||||
|
||||
```sql
|
||||
CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{.IndexName}}
|
||||
ON {{.SchemaName}}.{{.TableName}}
|
||||
USING {{.IndexType}} ({{.Columns}})
|
||||
{{- if .Where}}
|
||||
WHERE {{.Where}}
|
||||
{{- end}}
|
||||
{{- if .Include}}
|
||||
INCLUDE ({{.Include}})
|
||||
{{- end}};
|
||||
```
|
||||
|
||||
Update data structure:
|
||||
|
||||
```go
|
||||
type CreateIndexData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
IndexName string
|
||||
IndexType string
|
||||
Columns string
|
||||
Unique bool
|
||||
Where string // New field for partial indexes
|
||||
Include string // New field for covering indexes
|
||||
}
|
||||
```
|
||||
|
||||
### Example 3: Enhanced Audit Function
|
||||
|
||||
Modify `audit_function.tmpl` to add custom logging:
|
||||
|
||||
```sql
|
||||
CREATE OR REPLACE FUNCTION {{.SchemaName}}.{{.FunctionName}}()
|
||||
RETURNS trigger AS
|
||||
$body$
|
||||
DECLARE
|
||||
m_funcname text = '{{.FunctionName}}';
|
||||
m_user text;
|
||||
m_atevent integer;
|
||||
m_application_name text;
|
||||
BEGIN
|
||||
-- Get current user and application
|
||||
m_user := {{.UserFunction}}::text;
|
||||
m_application_name := current_setting('application_name', true);
|
||||
|
||||
-- Custom logging
|
||||
RAISE NOTICE 'Audit: % on %.% by % from %',
|
||||
TG_OP, TG_TABLE_SCHEMA, TG_TABLE_NAME, m_user, m_application_name;
|
||||
|
||||
-- Rest of function...
|
||||
...
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### 1. Keep Templates Simple
|
||||
|
||||
Templates should focus on SQL generation. Complex logic belongs in Go code:
|
||||
|
||||
**Good:**
|
||||
```go
|
||||
// In Go code
|
||||
columns := buildColumnList(table)
|
||||
|
||||
// In template
|
||||
{{range .Columns}}
|
||||
{{.Name}} {{.Type}}
|
||||
{{end}}
|
||||
```
|
||||
|
||||
**Bad:**
|
||||
```go
|
||||
// Don't do complex transformations in templates
|
||||
{{range .Columns}}
|
||||
{{if eq .Type "integer"}}
|
||||
{{.Name}} serial
|
||||
{{else}}
|
||||
{{.Name}} {{.Type}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
```
|
||||
|
||||
### 2. Use Descriptive Field Names
|
||||
|
||||
```go
|
||||
// Good
|
||||
type CreateTableData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
}
|
||||
|
||||
// Bad
|
||||
type CreateTableData struct {
|
||||
S string // What is S?
|
||||
T string // What is T?
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Document Template Data
|
||||
|
||||
Always document what data a template expects:
|
||||
|
||||
```go
|
||||
// CreateTableData contains data for create table template.
|
||||
// Used by templates/create_table.tmpl
|
||||
type CreateTableData struct {
|
||||
SchemaName string // Schema where table will be created
|
||||
TableName string // Name of the table
|
||||
Columns []ColumnData // List of columns to create
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Handle SQL Injection
|
||||
|
||||
Always escape user input:
|
||||
|
||||
```go
|
||||
// In Go code - escape before passing to template
|
||||
data := CommentTableData{
|
||||
SchemaName: schema,
|
||||
TableName: table,
|
||||
Comment: escapeQuote(userComment), // Escape quotes
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Test Templates Thoroughly
|
||||
|
||||
```go
|
||||
func TestTemplate_CreateTable(t *testing.T) {
|
||||
executor, _ := NewTemplateExecutor()
|
||||
|
||||
data := CreateTableData{
|
||||
SchemaName: "public",
|
||||
TableName: "test",
|
||||
Columns: []ColumnData{{Name: "id", Type: "integer"}},
|
||||
}
|
||||
|
||||
sql, err := executor.ExecuteCreateTable(data)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Verify expected SQL patterns
|
||||
if !strings.Contains(sql, "CREATE TABLE") {
|
||||
t.Error("Missing CREATE TABLE")
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Benefits of Template-Based Approach
|
||||
|
||||
### Maintainability
|
||||
|
||||
**Before (string concatenation):**
|
||||
```go
|
||||
sql := fmt.Sprintf(`CREATE TABLE %s.%s (
|
||||
%s %s%s%s
|
||||
);`, schema, table, col, typ,
|
||||
func() string {
|
||||
if def != "" {
|
||||
return " DEFAULT " + def
|
||||
}
|
||||
return ""
|
||||
}(),
|
||||
func() string {
|
||||
if notNull {
|
||||
return " NOT NULL"
|
||||
}
|
||||
return ""
|
||||
}(),
|
||||
)
|
||||
```
|
||||
|
||||
**After (templates):**
|
||||
```go
|
||||
sql, _ := executor.ExecuteCreateTable(CreateTableData{
|
||||
SchemaName: schema,
|
||||
TableName: table,
|
||||
Columns: columns,
|
||||
})
|
||||
```
|
||||
|
||||
### Customization
|
||||
|
||||
Users can modify templates without changing Go code:
|
||||
- Edit template file
|
||||
- Rebuild application
|
||||
- New SQL generation logic active
|
||||
|
||||
### Testing
|
||||
|
||||
Templates can be tested independently:
|
||||
```go
|
||||
func TestAuditTemplate(t *testing.T) {
|
||||
executor, _ := NewTemplateExecutor()
|
||||
|
||||
// Test with various data
|
||||
for _, testCase := range testCases {
|
||||
sql, err := executor.ExecuteAuditFunction(testCase.data)
|
||||
// Verify output
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Readability
|
||||
|
||||
SQL templates are easier to read and review than Go string building code.
|
||||
|
||||
## Migration from Old Writer
|
||||
|
||||
To migrate from the old string-based writer to templates:
|
||||
|
||||
### Option 1: Use TemplatedMigrationWriter
|
||||
|
||||
```go
|
||||
// Old
|
||||
writer := pgsql.NewMigrationWriter(options)
|
||||
|
||||
// New
|
||||
writer, err := pgsql.NewTemplatedMigrationWriter(options)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Same interface
|
||||
writer.WriteMigration(model, current)
|
||||
```
|
||||
|
||||
### Option 2: Keep Both
|
||||
|
||||
Both writers are available:
|
||||
- `MigrationWriter` - Original string-based
|
||||
- `TemplatedMigrationWriter` - New template-based
|
||||
|
||||
Choose based on your needs.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Template Not Found
|
||||
|
||||
```
|
||||
Error: template: "my_template.tmpl" not defined
|
||||
```
|
||||
|
||||
Solution: Ensure template file exists in `templates/` directory and rebuild.
|
||||
|
||||
### Template Execution Error
|
||||
|
||||
```
|
||||
Error: template: create_table.tmpl:5:10: executing "create_table.tmpl"
|
||||
at <.InvalidField>: can't evaluate field InvalidField
|
||||
```
|
||||
|
||||
Solution: Check data structure has all fields used in template.
|
||||
|
||||
### Embedded Files Not Updating
|
||||
|
||||
If template changes aren't reflected:
|
||||
|
||||
1. Clean build cache: `go clean -cache`
|
||||
2. Rebuild: `go build ./cmd/relspec`
|
||||
3. Verify template file is in `templates/` directory
|
||||
|
||||
## Custom Template Functions
|
||||
|
||||
RelSpec provides a comprehensive library of template functions for SQL generation:
|
||||
|
||||
### String Manipulation
|
||||
- `upper`, `lower` - Case conversion
|
||||
- `snake_case`, `camelCase` - Naming convention conversion
|
||||
- Usage: `{{upper .TableName}}` → `USERS`
|
||||
|
||||
### SQL Formatting
|
||||
- `indent(spaces, text)` - Indent text
|
||||
- `quote(string)` - Quote for SQL with escaping
|
||||
- `escape(string)` - Escape special characters
|
||||
- `safe_identifier(string)` - Make SQL-safe identifier
|
||||
- Usage: `{{quote "O'Brien"}}` → `'O''Brien'`
|
||||
|
||||
### Type Conversion
|
||||
- `goTypeToSQL(type)` - Convert Go type to PostgreSQL type
|
||||
- `sqlTypeToGo(type)` - Convert PostgreSQL type to Go type
|
||||
- `isNumeric(type)`, `isText(type)` - Type checking
|
||||
- Usage: `{{goTypeToSQL "int64"}}` → `bigint`
|
||||
|
||||
### Collection Helpers
|
||||
- `first(slice)`, `last(slice)` - Get elements
|
||||
- `join_with(slice, sep)` - Join with custom separator
|
||||
- Usage: `{{join_with .Columns ", "}}` → `id, name, email`
|
||||
|
||||
See [template_functions.go](template_functions.go) for full documentation.
|
||||
|
||||
## Template Inheritance and Composition
|
||||
|
||||
RelSpec supports Go template inheritance using `{{template}}` and `{{block}}`:
|
||||
|
||||
### Base Templates
|
||||
- `base_ddl.tmpl` - Common DDL patterns
|
||||
- `base_constraint.tmpl` - Constraint operations
|
||||
- `fragments.tmpl` - Reusable fragments
|
||||
|
||||
### Using Fragments
|
||||
```gotmpl
|
||||
{{/* Use predefined fragments */}}
|
||||
CREATE TABLE {{template "qualified_table" .}} (
|
||||
{{range .Columns}}
|
||||
{{template "column_definition" .}}
|
||||
{{end}}
|
||||
);
|
||||
```
|
||||
|
||||
### Template Blocks
|
||||
```gotmpl
|
||||
{{/* Define with override capability */}}
|
||||
{{define "table_options"}}
|
||||
) {{block "storage_options" .}}WITH (fillfactor = 90){{end}};
|
||||
{{end}}
|
||||
```
|
||||
|
||||
See [TEMPLATE_INHERITANCE.md](TEMPLATE_INHERITANCE.md) for detailed guide.
|
||||
|
||||
## Visual Template Editor
|
||||
|
||||
A VS Code extension is available for visual template editing:
|
||||
|
||||
### Features
|
||||
- **Live Preview** - See rendered SQL as you type
|
||||
- **IntelliSense** - Auto-completion for functions
|
||||
- **Validation** - Syntax checking and error highlighting
|
||||
- **Scaffolding** - Quick template creation
|
||||
- **Function Browser** - Browse available functions
|
||||
|
||||
### Installation
|
||||
```bash
|
||||
cd vscode-extension
|
||||
npm install
|
||||
npm run compile
|
||||
code .
|
||||
# Press F5 to launch
|
||||
```
|
||||
|
||||
See [vscode-extension/README.md](../../vscode-extension/README.md) for full documentation.
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
Completed:
|
||||
- [x] Template inheritance/composition
|
||||
- [x] Custom template functions library
|
||||
- [x] Visual template editor (VS Code)
|
||||
|
||||
Potential future improvements:
|
||||
- [ ] Parameterized templates (load from config)
|
||||
- [ ] Template validation CLI tool
|
||||
- [ ] Template library/marketplace
|
||||
- [ ] Template versioning
|
||||
- [ ] Hot-reload during development
|
||||
|
||||
## Contributing Templates
|
||||
|
||||
When contributing new templates:
|
||||
|
||||
1. Place in `pkg/writers/pgsql/templates/`
|
||||
2. Use `.tmpl` extension
|
||||
3. Document data structure in `templates.go`
|
||||
4. Add executor method
|
||||
5. Write tests
|
||||
6. Update this documentation
|
||||
74
pkg/writers/pgsql/audit.go
Normal file
74
pkg/writers/pgsql/audit.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package pgsql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// AuditConfig defines audit configuration for tables
|
||||
type AuditConfig struct {
|
||||
// EnabledTables maps table names (schema.table or just table) to audit settings
|
||||
EnabledTables map[string]*TableAuditConfig
|
||||
// AuditSchema is where audit tables are created (default: same as table schema)
|
||||
AuditSchema string
|
||||
// UserFunction is the function to get current user (default: current_user)
|
||||
UserFunction string
|
||||
}
|
||||
|
||||
// TableAuditConfig defines audit settings for a specific table
|
||||
type TableAuditConfig struct {
|
||||
// TableName is the name of the table to audit
|
||||
TableName string
|
||||
// SchemaName is the schema of the table
|
||||
SchemaName string
|
||||
// TablePrefix for compatibility with old audit system
|
||||
TablePrefix string
|
||||
// AuditInsert tracks INSERT operations
|
||||
AuditInsert bool
|
||||
// AuditUpdate tracks UPDATE operations
|
||||
AuditUpdate bool
|
||||
// AuditDelete tracks DELETE operations
|
||||
AuditDelete bool
|
||||
// ExcludedColumns are columns to skip from audit
|
||||
ExcludedColumns []string
|
||||
// EncryptedColumns are columns to hide in audit (show as ***)
|
||||
EncryptedColumns []string
|
||||
}
|
||||
|
||||
// NewAuditConfig creates a default audit configuration
|
||||
func NewAuditConfig() *AuditConfig {
|
||||
return &AuditConfig{
|
||||
EnabledTables: make(map[string]*TableAuditConfig),
|
||||
AuditSchema: "public",
|
||||
UserFunction: "current_user",
|
||||
}
|
||||
}
|
||||
|
||||
// EnableTableAudit enables audit for a specific table
|
||||
func (ac *AuditConfig) EnableTableAudit(schemaName, tableName string) *TableAuditConfig {
|
||||
key := fmt.Sprintf("%s.%s", schemaName, tableName)
|
||||
config := &TableAuditConfig{
|
||||
TableName: tableName,
|
||||
SchemaName: schemaName,
|
||||
TablePrefix: "",
|
||||
AuditInsert: true,
|
||||
AuditUpdate: true,
|
||||
AuditDelete: true,
|
||||
ExcludedColumns: []string{"updatecnt", "prefix"},
|
||||
EncryptedColumns: []string{},
|
||||
}
|
||||
ac.EnabledTables[key] = config
|
||||
return config
|
||||
}
|
||||
|
||||
// IsTableAudited checks if a table is configured for auditing
|
||||
func (ac *AuditConfig) IsTableAudited(schemaName, tableName string) bool {
|
||||
key := fmt.Sprintf("%s.%s", schemaName, tableName)
|
||||
_, exists := ac.EnabledTables[key]
|
||||
return exists
|
||||
}
|
||||
|
||||
// GetTableConfig returns the audit config for a specific table
|
||||
func (ac *AuditConfig) GetTableConfig(schemaName, tableName string) *TableAuditConfig {
|
||||
key := fmt.Sprintf("%s.%s", schemaName, tableName)
|
||||
return ac.EnabledTables[key]
|
||||
}
|
||||
@@ -11,13 +11,7 @@ import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
)
|
||||
|
||||
// MigrationWriter generates differential migration SQL scripts
|
||||
type MigrationWriter struct {
|
||||
options *writers.WriterOptions
|
||||
writer io.Writer
|
||||
}
|
||||
|
||||
// MigrationScript represents a single migration script with priority and sequence
|
||||
// MigrationScript represents a single migration script with priority
|
||||
type MigrationScript struct {
|
||||
ObjectName string
|
||||
ObjectType string
|
||||
@@ -27,14 +21,27 @@ type MigrationScript struct {
|
||||
Body string
|
||||
}
|
||||
|
||||
// NewMigrationWriter creates a new migration writer
|
||||
func NewMigrationWriter(options *writers.WriterOptions) *MigrationWriter {
|
||||
return &MigrationWriter{
|
||||
options: options,
|
||||
}
|
||||
// MigrationWriter generates differential migration SQL scripts using templates
|
||||
type MigrationWriter struct {
|
||||
options *writers.WriterOptions
|
||||
writer io.Writer
|
||||
executor *TemplateExecutor
|
||||
}
|
||||
|
||||
// WriteMigration generates migration scripts by comparing model (desired) vs current (actual) database
|
||||
// NewMigrationWriter creates a new templated migration writer
|
||||
func NewMigrationWriter(options *writers.WriterOptions) (*MigrationWriter, error) {
|
||||
executor, err := NewTemplateExecutor()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create template executor: %w", err)
|
||||
}
|
||||
|
||||
return &MigrationWriter{
|
||||
options: options,
|
||||
executor: executor,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// WriteMigration generates migration scripts using templates
|
||||
func (w *MigrationWriter) WriteMigration(model *models.Database, current *models.Database) error {
|
||||
var writer io.Writer
|
||||
var file *os.File
|
||||
@@ -56,9 +63,26 @@ func (w *MigrationWriter) WriteMigration(model *models.Database, current *models
|
||||
|
||||
w.writer = writer
|
||||
|
||||
// Check if audit is configured in metadata
|
||||
var auditConfig *AuditConfig
|
||||
if w.options.Metadata != nil {
|
||||
if ac, ok := w.options.Metadata["audit_config"].(*AuditConfig); ok {
|
||||
auditConfig = ac
|
||||
}
|
||||
}
|
||||
|
||||
// Generate all migration scripts
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Generate audit tables if needed (priority 90)
|
||||
if auditConfig != nil && len(auditConfig.EnabledTables) > 0 {
|
||||
auditTableScript, err := w.generateAuditTablesScript(auditConfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate audit tables: %w", err)
|
||||
}
|
||||
scripts = append(scripts, auditTableScript...)
|
||||
}
|
||||
|
||||
// Process each schema in the model
|
||||
for _, modelSchema := range model.Schemas {
|
||||
// Find corresponding schema in current database
|
||||
@@ -71,8 +95,20 @@ func (w *MigrationWriter) WriteMigration(model *models.Database, current *models
|
||||
}
|
||||
|
||||
// Generate schema-level scripts
|
||||
schemaScripts := w.generateSchemaScripts(modelSchema, currentSchema)
|
||||
schemaScripts, err := w.generateSchemaScripts(modelSchema, currentSchema)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate schema scripts: %w", err)
|
||||
}
|
||||
scripts = append(scripts, schemaScripts...)
|
||||
|
||||
// Generate audit scripts for this schema (if configured)
|
||||
if auditConfig != nil {
|
||||
auditScripts, err := w.generateAuditScripts(modelSchema, auditConfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate audit scripts: %w", err)
|
||||
}
|
||||
scripts = append(scripts, auditScripts...)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort scripts by priority and sequence
|
||||
@@ -98,37 +134,52 @@ func (w *MigrationWriter) WriteMigration(model *models.Database, current *models
|
||||
return nil
|
||||
}
|
||||
|
||||
// generateSchemaScripts generates migration scripts for a schema
|
||||
func (w *MigrationWriter) generateSchemaScripts(model *models.Schema, current *models.Schema) []MigrationScript {
|
||||
// generateSchemaScripts generates migration scripts for a schema using templates
|
||||
func (w *MigrationWriter) generateSchemaScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Phase 1: Drop constraints and indexes that changed (Priority 11-50)
|
||||
if current != nil {
|
||||
scripts = append(scripts, w.generateDropScripts(model, current)...)
|
||||
}
|
||||
|
||||
// Phase 2: Rename tables and columns (Priority 60-90)
|
||||
if current != nil {
|
||||
scripts = append(scripts, w.generateRenameScripts(model, current)...)
|
||||
dropScripts, err := w.generateDropScripts(model, current)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate drop scripts: %w", err)
|
||||
}
|
||||
scripts = append(scripts, dropScripts...)
|
||||
}
|
||||
|
||||
// Phase 3: Create/Alter tables and columns (Priority 100-145)
|
||||
scripts = append(scripts, w.generateTableScripts(model, current)...)
|
||||
tableScripts, err := w.generateTableScripts(model, current)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate table scripts: %w", err)
|
||||
}
|
||||
scripts = append(scripts, tableScripts...)
|
||||
|
||||
// Phase 4: Create indexes (Priority 160-180)
|
||||
scripts = append(scripts, w.generateIndexScripts(model, current)...)
|
||||
indexScripts, err := w.generateIndexScripts(model, current)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate index scripts: %w", err)
|
||||
}
|
||||
scripts = append(scripts, indexScripts...)
|
||||
|
||||
// Phase 5: Create foreign keys (Priority 195)
|
||||
scripts = append(scripts, w.generateForeignKeyScripts(model, current)...)
|
||||
fkScripts, err := w.generateForeignKeyScripts(model, current)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate foreign key scripts: %w", err)
|
||||
}
|
||||
scripts = append(scripts, fkScripts...)
|
||||
|
||||
// Phase 6: Add comments (Priority 200+)
|
||||
scripts = append(scripts, w.generateCommentScripts(model, current)...)
|
||||
commentScripts, err := w.generateCommentScripts(model, current)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate comment scripts: %w", err)
|
||||
}
|
||||
scripts = append(scripts, commentScripts...)
|
||||
|
||||
return scripts
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// generateDropScripts generates DROP scripts for removed/changed objects
|
||||
func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *models.Schema) []MigrationScript {
|
||||
// generateDropScripts generates DROP scripts using templates
|
||||
func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Build map of model tables for quick lookup
|
||||
@@ -142,35 +193,37 @@ func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *mod
|
||||
modelTable, existsInModel := modelTables[strings.ToLower(currentTable.Name)]
|
||||
|
||||
if !existsInModel {
|
||||
// Table will be dropped, skip individual constraint drops
|
||||
continue
|
||||
}
|
||||
|
||||
// Check each constraint in current database
|
||||
for constraintName, currentConstraint := range currentTable.Constraints {
|
||||
// Check if constraint exists in model
|
||||
modelConstraint, existsInModel := modelTable.Constraints[constraintName]
|
||||
|
||||
shouldDrop := false
|
||||
|
||||
if !existsInModel {
|
||||
shouldDrop = true
|
||||
} else if !constraintsEqual(modelConstraint, currentConstraint) {
|
||||
// Constraint changed, drop and recreate
|
||||
shouldDrop = true
|
||||
}
|
||||
|
||||
if shouldDrop {
|
||||
sql, err := w.executor.ExecuteDropConstraint(DropConstraintData{
|
||||
SchemaName: current.Name,
|
||||
TableName: currentTable.Name,
|
||||
ConstraintName: constraintName,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", current.Name, currentTable.Name, constraintName),
|
||||
ObjectType: "drop constraint",
|
||||
Schema: current.Name,
|
||||
Priority: 11,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"ALTER TABLE %s.%s DROP CONSTRAINT IF EXISTS %s;",
|
||||
current.Name, currentTable.Name, constraintName,
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
@@ -181,7 +234,6 @@ func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *mod
|
||||
modelIndex, existsInModel := modelTable.Indexes[indexName]
|
||||
|
||||
shouldDrop := false
|
||||
|
||||
if !existsInModel {
|
||||
shouldDrop = true
|
||||
} else if !indexesEqual(modelIndex, currentIndex) {
|
||||
@@ -189,42 +241,32 @@ func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *mod
|
||||
}
|
||||
|
||||
if shouldDrop {
|
||||
sql, err := w.executor.ExecuteDropIndex(DropIndexData{
|
||||
SchemaName: current.Name,
|
||||
IndexName: indexName,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", current.Name, currentTable.Name, indexName),
|
||||
ObjectType: "drop index",
|
||||
Schema: current.Name,
|
||||
Priority: 20,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"DROP INDEX IF EXISTS %s.%s CASCADE;",
|
||||
current.Name, indexName,
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return scripts
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// generateRenameScripts generates RENAME scripts for renamed objects
|
||||
func (w *MigrationWriter) generateRenameScripts(model *models.Schema, current *models.Schema) []MigrationScript {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// For now, we don't attempt to detect renames automatically
|
||||
// This would require GUID matching or other heuristics
|
||||
// Users would need to handle renames manually or through metadata
|
||||
|
||||
// Suppress unused parameter warnings
|
||||
_ = model
|
||||
_ = current
|
||||
|
||||
return scripts
|
||||
}
|
||||
|
||||
// generateTableScripts generates CREATE/ALTER TABLE scripts
|
||||
func (w *MigrationWriter) generateTableScripts(model *models.Schema, current *models.Schema) []MigrationScript {
|
||||
// generateTableScripts generates CREATE/ALTER TABLE scripts using templates
|
||||
func (w *MigrationWriter) generateTableScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Build map of current tables
|
||||
@@ -241,59 +283,35 @@ func (w *MigrationWriter) generateTableScripts(model *models.Schema, current *mo
|
||||
|
||||
if !exists {
|
||||
// Table doesn't exist, create it
|
||||
script := w.generateCreateTableScript(model, modelTable)
|
||||
sql, err := w.executor.ExecuteCreateTable(BuildCreateTableData(model.Name, modelTable))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s", model.Name, modelTable.Name),
|
||||
ObjectType: "create table",
|
||||
Schema: model.Name,
|
||||
Priority: 100,
|
||||
Sequence: len(scripts),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
} else {
|
||||
// Table exists, check for column changes
|
||||
alterScripts := w.generateAlterTableScripts(model, modelTable, currentTable)
|
||||
alterScripts, err := w.generateAlterTableScripts(model, modelTable, currentTable)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scripts = append(scripts, alterScripts...)
|
||||
}
|
||||
}
|
||||
|
||||
return scripts
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// generateCreateTableScript generates a CREATE TABLE script
|
||||
func (w *MigrationWriter) generateCreateTableScript(schema *models.Schema, table *models.Table) MigrationScript {
|
||||
var body strings.Builder
|
||||
|
||||
body.WriteString(fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s.%s (\n", schema.Name, table.Name))
|
||||
|
||||
// Get sorted columns
|
||||
columns := getSortedColumns(table.Columns)
|
||||
columnDefs := make([]string, 0, len(columns))
|
||||
|
||||
for _, col := range columns {
|
||||
colDef := fmt.Sprintf(" %s %s", col.Name, col.Type)
|
||||
|
||||
// Add default value if present
|
||||
if col.Default != nil {
|
||||
colDef += fmt.Sprintf(" DEFAULT %v", col.Default)
|
||||
}
|
||||
|
||||
// Add NOT NULL if needed
|
||||
if col.NotNull {
|
||||
colDef += " NOT NULL"
|
||||
}
|
||||
|
||||
columnDefs = append(columnDefs, colDef)
|
||||
}
|
||||
|
||||
body.WriteString(strings.Join(columnDefs, ",\n"))
|
||||
body.WriteString("\n);")
|
||||
|
||||
return MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s", schema.Name, table.Name),
|
||||
ObjectType: "create table",
|
||||
Schema: schema.Name,
|
||||
Priority: 100,
|
||||
Sequence: 0,
|
||||
Body: body.String(),
|
||||
}
|
||||
}
|
||||
|
||||
// generateAlterTableScripts generates ALTER TABLE scripts for column changes
|
||||
func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, modelTable *models.Table, currentTable *models.Table) []MigrationScript {
|
||||
// generateAlterTableScripts generates ALTER TABLE scripts using templates
|
||||
func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, modelTable *models.Table, currentTable *models.Table) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Build map of current columns
|
||||
@@ -308,85 +326,93 @@ func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, model
|
||||
|
||||
if !exists {
|
||||
// Column doesn't exist, add it
|
||||
defaultVal := ""
|
||||
if modelCol.Default != nil {
|
||||
defaultVal = fmt.Sprintf("%v", modelCol.Default)
|
||||
}
|
||||
|
||||
sql, err := w.executor.ExecuteAddColumn(AddColumnData{
|
||||
SchemaName: schema.Name,
|
||||
TableName: modelTable.Name,
|
||||
ColumnName: modelCol.Name,
|
||||
ColumnType: modelCol.Type,
|
||||
Default: defaultVal,
|
||||
NotNull: modelCol.NotNull,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
|
||||
ObjectType: "create column",
|
||||
Schema: schema.Name,
|
||||
Priority: 120,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"ALTER TABLE %s.%s\n ADD COLUMN IF NOT EXISTS %s %s%s%s;",
|
||||
schema.Name, modelTable.Name, modelCol.Name, modelCol.Type,
|
||||
func() string {
|
||||
if modelCol.Default != nil {
|
||||
return fmt.Sprintf(" DEFAULT %v", modelCol.Default)
|
||||
}
|
||||
return ""
|
||||
}(),
|
||||
func() string {
|
||||
if modelCol.NotNull {
|
||||
return " NOT NULL"
|
||||
}
|
||||
return ""
|
||||
}(),
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
} else if !columnsEqual(modelCol, currentCol) {
|
||||
// Column exists but type or properties changed
|
||||
// Column exists but properties changed
|
||||
if modelCol.Type != currentCol.Type {
|
||||
sql, err := w.executor.ExecuteAlterColumnType(AlterColumnTypeData{
|
||||
SchemaName: schema.Name,
|
||||
TableName: modelTable.Name,
|
||||
ColumnName: modelCol.Name,
|
||||
NewType: modelCol.Type,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
|
||||
ObjectType: "alter column type",
|
||||
Schema: schema.Name,
|
||||
Priority: 120,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"ALTER TABLE %s.%s\n ALTER COLUMN %s TYPE %s;",
|
||||
schema.Name, modelTable.Name, modelCol.Name, modelCol.Type,
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
|
||||
// Check default value changes
|
||||
if fmt.Sprintf("%v", modelCol.Default) != fmt.Sprintf("%v", currentCol.Default) {
|
||||
if modelCol.Default != nil {
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
|
||||
ObjectType: "alter column default",
|
||||
Schema: schema.Name,
|
||||
Priority: 145,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"ALTER TABLE %s.%s\n ALTER COLUMN %s SET DEFAULT %v;",
|
||||
schema.Name, modelTable.Name, modelCol.Name, modelCol.Default,
|
||||
),
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
} else {
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
|
||||
ObjectType: "alter column default",
|
||||
Schema: schema.Name,
|
||||
Priority: 145,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"ALTER TABLE %s.%s\n ALTER COLUMN %s DROP DEFAULT;",
|
||||
schema.Name, modelTable.Name, modelCol.Name,
|
||||
),
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
setDefault := modelCol.Default != nil
|
||||
defaultVal := ""
|
||||
if setDefault {
|
||||
defaultVal = fmt.Sprintf("%v", modelCol.Default)
|
||||
}
|
||||
|
||||
sql, err := w.executor.ExecuteAlterColumnDefault(AlterColumnDefaultData{
|
||||
SchemaName: schema.Name,
|
||||
TableName: modelTable.Name,
|
||||
ColumnName: modelCol.Name,
|
||||
SetDefault: setDefault,
|
||||
DefaultValue: defaultVal,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
|
||||
ObjectType: "alter column default",
|
||||
Schema: schema.Name,
|
||||
Priority: 145,
|
||||
Sequence: len(scripts),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return scripts
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// generateIndexScripts generates CREATE INDEX scripts
|
||||
func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *models.Schema) []MigrationScript {
|
||||
// generateIndexScripts generates CREATE INDEX scripts using templates
|
||||
func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Build map of current tables
|
||||
@@ -401,47 +427,7 @@ func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *mo
|
||||
for _, modelTable := range model.Tables {
|
||||
currentTable := currentTables[strings.ToLower(modelTable.Name)]
|
||||
|
||||
// Process each index in model
|
||||
for indexName, modelIndex := range modelTable.Indexes {
|
||||
shouldCreate := true
|
||||
|
||||
// Check if index exists in current
|
||||
if currentTable != nil {
|
||||
if currentIndex, exists := currentTable.Indexes[indexName]; exists {
|
||||
if indexesEqual(modelIndex, currentIndex) {
|
||||
shouldCreate = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if shouldCreate {
|
||||
unique := ""
|
||||
if modelIndex.Unique {
|
||||
unique = "UNIQUE "
|
||||
}
|
||||
|
||||
indexType := "btree"
|
||||
if modelIndex.Type != "" {
|
||||
indexType = modelIndex.Type
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, indexName),
|
||||
ObjectType: "create index",
|
||||
Schema: model.Name,
|
||||
Priority: 180,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"CREATE %sINDEX IF NOT EXISTS %s\n ON %s.%s USING %s (%s);",
|
||||
unique, indexName, model.Name, modelTable.Name, indexType,
|
||||
strings.Join(modelIndex.Columns, ", "),
|
||||
),
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
}
|
||||
|
||||
// Add primary key constraint if it exists
|
||||
// Process primary keys first
|
||||
for constraintName, constraint := range modelTable.Constraints {
|
||||
if constraint.Type == models.PrimaryKeyConstraint {
|
||||
shouldCreate := true
|
||||
@@ -455,39 +441,82 @@ func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *mo
|
||||
}
|
||||
|
||||
if shouldCreate {
|
||||
sql, err := w.executor.ExecuteCreatePrimaryKey(CreatePrimaryKeyData{
|
||||
SchemaName: model.Name,
|
||||
TableName: modelTable.Name,
|
||||
ConstraintName: constraintName,
|
||||
Columns: strings.Join(constraint.Columns, ", "),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, constraintName),
|
||||
ObjectType: "create primary key",
|
||||
Schema: model.Name,
|
||||
Priority: 160,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"DO $$\nBEGIN\n IF NOT EXISTS (\n"+
|
||||
" SELECT 1 FROM information_schema.table_constraints\n"+
|
||||
" WHERE table_schema = '%s'\n"+
|
||||
" AND table_name = '%s'\n"+
|
||||
" AND constraint_name = '%s'\n"+
|
||||
" ) THEN\n"+
|
||||
" ALTER TABLE %s.%s\n"+
|
||||
" ADD CONSTRAINT %s PRIMARY KEY (%s);\n"+
|
||||
" END IF;\n"+
|
||||
"END;\n$$;",
|
||||
model.Name, modelTable.Name, constraintName,
|
||||
model.Name, modelTable.Name, constraintName,
|
||||
strings.Join(constraint.Columns, ", "),
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process indexes
|
||||
for indexName, modelIndex := range modelTable.Indexes {
|
||||
// Skip primary key indexes
|
||||
if strings.HasPrefix(strings.ToLower(indexName), "pk_") {
|
||||
continue
|
||||
}
|
||||
|
||||
shouldCreate := true
|
||||
|
||||
if currentTable != nil {
|
||||
if currentIndex, exists := currentTable.Indexes[indexName]; exists {
|
||||
if indexesEqual(modelIndex, currentIndex) {
|
||||
shouldCreate = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if shouldCreate {
|
||||
indexType := "btree"
|
||||
if modelIndex.Type != "" {
|
||||
indexType = modelIndex.Type
|
||||
}
|
||||
|
||||
sql, err := w.executor.ExecuteCreateIndex(CreateIndexData{
|
||||
SchemaName: model.Name,
|
||||
TableName: modelTable.Name,
|
||||
IndexName: indexName,
|
||||
IndexType: indexType,
|
||||
Columns: strings.Join(modelIndex.Columns, ", "),
|
||||
Unique: modelIndex.Unique,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, indexName),
|
||||
ObjectType: "create index",
|
||||
Schema: model.Name,
|
||||
Priority: 180,
|
||||
Sequence: len(scripts),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return scripts
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// generateForeignKeyScripts generates ADD CONSTRAINT FOREIGN KEY scripts
|
||||
func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, current *models.Schema) []MigrationScript {
|
||||
// generateForeignKeyScripts generates ADD CONSTRAINT FOREIGN KEY scripts using templates
|
||||
func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Build map of current tables
|
||||
@@ -510,7 +539,6 @@ func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, curren
|
||||
|
||||
shouldCreate := true
|
||||
|
||||
// Check if constraint exists in current
|
||||
if currentTable != nil {
|
||||
if currentConstraint, exists := currentTable.Constraints[constraintName]; exists {
|
||||
if constraintsEqual(constraint, currentConstraint) {
|
||||
@@ -530,59 +558,62 @@ func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, curren
|
||||
onUpdate = strings.ToUpper(constraint.OnUpdate)
|
||||
}
|
||||
|
||||
sql, err := w.executor.ExecuteCreateForeignKey(CreateForeignKeyData{
|
||||
SchemaName: model.Name,
|
||||
TableName: modelTable.Name,
|
||||
ConstraintName: constraintName,
|
||||
SourceColumns: strings.Join(constraint.Columns, ", "),
|
||||
TargetSchema: constraint.ReferencedSchema,
|
||||
TargetTable: constraint.ReferencedTable,
|
||||
TargetColumns: strings.Join(constraint.ReferencedColumns, ", "),
|
||||
OnDelete: onDelete,
|
||||
OnUpdate: onUpdate,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, constraintName),
|
||||
ObjectType: "create foreign key",
|
||||
Schema: model.Name,
|
||||
Priority: 195,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"ALTER TABLE %s.%s\n"+
|
||||
" DROP CONSTRAINT IF EXISTS %s;\n\n"+
|
||||
"ALTER TABLE %s.%s\n"+
|
||||
" ADD CONSTRAINT %s\n"+
|
||||
" FOREIGN KEY (%s)\n"+
|
||||
" REFERENCES %s.%s (%s)\n"+
|
||||
" ON DELETE %s\n"+
|
||||
" ON UPDATE %s\n"+
|
||||
" DEFERRABLE;",
|
||||
model.Name, modelTable.Name, constraintName,
|
||||
model.Name, modelTable.Name, constraintName,
|
||||
strings.Join(constraint.Columns, ", "),
|
||||
constraint.ReferencedSchema, constraint.ReferencedTable,
|
||||
strings.Join(constraint.ReferencedColumns, ", "),
|
||||
onDelete, onUpdate,
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return scripts
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// generateCommentScripts generates COMMENT ON scripts
|
||||
func (w *MigrationWriter) generateCommentScripts(model *models.Schema, current *models.Schema) []MigrationScript {
|
||||
// generateCommentScripts generates COMMENT ON scripts using templates
|
||||
func (w *MigrationWriter) generateCommentScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Suppress unused parameter warning (current not used yet, could be used for diffing)
|
||||
_ = current
|
||||
_ = current // TODO: Compare with current schema to only add new/changed comments
|
||||
|
||||
// Process each model table
|
||||
for _, modelTable := range model.Tables {
|
||||
// Table comment
|
||||
if modelTable.Description != "" {
|
||||
sql, err := w.executor.ExecuteCommentTable(CommentTableData{
|
||||
SchemaName: model.Name,
|
||||
TableName: modelTable.Name,
|
||||
Comment: escapeQuote(modelTable.Description),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s", model.Name, modelTable.Name),
|
||||
ObjectType: "comment on table",
|
||||
Schema: model.Name,
|
||||
Priority: 200,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"COMMENT ON TABLE %s.%s IS '%s';",
|
||||
model.Name, modelTable.Name, escapeQuote(modelTable.Description),
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
@@ -590,79 +621,218 @@ func (w *MigrationWriter) generateCommentScripts(model *models.Schema, current *
|
||||
// Column comments
|
||||
for _, col := range modelTable.Columns {
|
||||
if col.Description != "" {
|
||||
sql, err := w.executor.ExecuteCommentColumn(CommentColumnData{
|
||||
SchemaName: model.Name,
|
||||
TableName: modelTable.Name,
|
||||
ColumnName: col.Name,
|
||||
Comment: escapeQuote(col.Description),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, col.Name),
|
||||
ObjectType: "comment on column",
|
||||
Schema: model.Name,
|
||||
Priority: 200,
|
||||
Sequence: len(scripts),
|
||||
Body: fmt.Sprintf(
|
||||
"COMMENT ON COLUMN %s.%s.%s IS '%s';",
|
||||
model.Name, modelTable.Name, col.Name, escapeQuote(col.Description),
|
||||
),
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return scripts
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// Comparison helper functions
|
||||
// generateAuditTablesScript generates audit table creation scripts using templates
|
||||
func (w *MigrationWriter) generateAuditTablesScript(auditConfig *AuditConfig) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
func constraintsEqual(a, b *models.Constraint) bool {
|
||||
if a.Type != b.Type {
|
||||
auditSchema := auditConfig.AuditSchema
|
||||
if auditSchema == "" {
|
||||
auditSchema = "public"
|
||||
}
|
||||
|
||||
sql, err := w.executor.ExecuteAuditTables(AuditTablesData{
|
||||
AuditSchema: auditSchema,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
script := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.atevent+atdetail", auditSchema),
|
||||
ObjectType: "create audit tables",
|
||||
Schema: auditSchema,
|
||||
Priority: 90,
|
||||
Sequence: 0,
|
||||
Body: sql,
|
||||
}
|
||||
scripts = append(scripts, script)
|
||||
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// generateAuditScripts generates audit functions and triggers using templates
|
||||
func (w *MigrationWriter) generateAuditScripts(schema *models.Schema, auditConfig *AuditConfig) ([]MigrationScript, error) {
|
||||
scripts := make([]MigrationScript, 0)
|
||||
|
||||
// Process each table in the schema
|
||||
for _, table := range schema.Tables {
|
||||
if !auditConfig.IsTableAudited(schema.Name, table.Name) {
|
||||
continue
|
||||
}
|
||||
|
||||
config := auditConfig.GetTableConfig(schema.Name, table.Name)
|
||||
if config == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find primary key
|
||||
pk := table.GetPrimaryKey()
|
||||
if pk == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
auditSchema := auditConfig.AuditSchema
|
||||
if auditSchema == "" {
|
||||
auditSchema = schema.Name
|
||||
}
|
||||
|
||||
// Generate audit function
|
||||
funcName := fmt.Sprintf("ft_audit_%s", table.Name)
|
||||
funcData := BuildAuditFunctionData(schema.Name, table, pk, config, auditSchema, auditConfig.UserFunction)
|
||||
|
||||
funcSQL, err := w.executor.ExecuteAuditFunction(funcData)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
functionScript := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s", schema.Name, funcName),
|
||||
ObjectType: "create audit function",
|
||||
Schema: schema.Name,
|
||||
Priority: 345,
|
||||
Sequence: len(scripts),
|
||||
Body: funcSQL,
|
||||
}
|
||||
scripts = append(scripts, functionScript)
|
||||
|
||||
// Generate audit trigger
|
||||
triggerName := fmt.Sprintf("t_audit_%s", table.Name)
|
||||
events := make([]string, 0)
|
||||
if config.AuditInsert {
|
||||
events = append(events, "INSERT")
|
||||
}
|
||||
if config.AuditUpdate {
|
||||
events = append(events, "UPDATE")
|
||||
}
|
||||
if config.AuditDelete {
|
||||
events = append(events, "DELETE")
|
||||
}
|
||||
|
||||
if len(events) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
triggerSQL, err := w.executor.ExecuteAuditTrigger(AuditTriggerData{
|
||||
SchemaName: schema.Name,
|
||||
TableName: table.Name,
|
||||
TriggerName: triggerName,
|
||||
FunctionName: funcName,
|
||||
Events: strings.Join(events, " OR "),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
triggerScript := MigrationScript{
|
||||
ObjectName: fmt.Sprintf("%s.%s", schema.Name, triggerName),
|
||||
ObjectType: "create audit trigger",
|
||||
Schema: schema.Name,
|
||||
Priority: 355,
|
||||
Sequence: len(scripts),
|
||||
Body: triggerSQL,
|
||||
}
|
||||
scripts = append(scripts, triggerScript)
|
||||
}
|
||||
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// Helper functions for comparing database objects
|
||||
|
||||
// columnsEqual checks if two columns have the same definition
|
||||
func columnsEqual(col1, col2 *models.Column) bool {
|
||||
if col1 == nil || col2 == nil {
|
||||
return false
|
||||
}
|
||||
if len(a.Columns) != len(b.Columns) {
|
||||
return strings.EqualFold(col1.Type, col2.Type) &&
|
||||
col1.NotNull == col2.NotNull &&
|
||||
fmt.Sprintf("%v", col1.Default) == fmt.Sprintf("%v", col2.Default)
|
||||
}
|
||||
|
||||
// constraintsEqual checks if two constraints are equal
|
||||
func constraintsEqual(c1, c2 *models.Constraint) bool {
|
||||
if c1 == nil || c2 == nil {
|
||||
return false
|
||||
}
|
||||
for i := range a.Columns {
|
||||
if !strings.EqualFold(a.Columns[i], b.Columns[i]) {
|
||||
if c1.Type != c2.Type {
|
||||
return false
|
||||
}
|
||||
|
||||
// Compare columns
|
||||
if len(c1.Columns) != len(c2.Columns) {
|
||||
return false
|
||||
}
|
||||
for i, col := range c1.Columns {
|
||||
if !strings.EqualFold(col, c2.Columns[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if a.Type == models.ForeignKeyConstraint {
|
||||
if a.ReferencedTable != b.ReferencedTable || a.ReferencedSchema != b.ReferencedSchema {
|
||||
|
||||
// For foreign keys, also compare referenced table and columns
|
||||
if c1.Type == models.ForeignKeyConstraint {
|
||||
if !strings.EqualFold(c1.ReferencedTable, c2.ReferencedTable) {
|
||||
return false
|
||||
}
|
||||
if len(a.ReferencedColumns) != len(b.ReferencedColumns) {
|
||||
if len(c1.ReferencedColumns) != len(c2.ReferencedColumns) {
|
||||
return false
|
||||
}
|
||||
for i := range a.ReferencedColumns {
|
||||
if !strings.EqualFold(a.ReferencedColumns[i], b.ReferencedColumns[i]) {
|
||||
for i, col := range c1.ReferencedColumns {
|
||||
if !strings.EqualFold(col, c2.ReferencedColumns[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if c1.OnDelete != c2.OnDelete || c1.OnUpdate != c2.OnUpdate {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func indexesEqual(a, b *models.Index) bool {
|
||||
if a.Unique != b.Unique {
|
||||
// indexesEqual checks if two indexes are equal
|
||||
func indexesEqual(idx1, idx2 *models.Index) bool {
|
||||
if idx1 == nil || idx2 == nil {
|
||||
return false
|
||||
}
|
||||
if len(a.Columns) != len(b.Columns) {
|
||||
if idx1.Unique != idx2.Unique {
|
||||
return false
|
||||
}
|
||||
for i := range a.Columns {
|
||||
if !strings.EqualFold(a.Columns[i], b.Columns[i]) {
|
||||
if !strings.EqualFold(idx1.Type, idx2.Type) {
|
||||
return false
|
||||
}
|
||||
if len(idx1.Columns) != len(idx2.Columns) {
|
||||
return false
|
||||
}
|
||||
for i, col := range idx1.Columns {
|
||||
if !strings.EqualFold(col, idx2.Columns[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func columnsEqual(a, b *models.Column) bool {
|
||||
if a.Type != b.Type {
|
||||
return false
|
||||
}
|
||||
if a.NotNull != b.NotNull {
|
||||
return false
|
||||
}
|
||||
if fmt.Sprintf("%v", a.Default) != fmt.Sprintf("%v", b.Default) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -34,10 +34,13 @@ func TestWriteMigration_NewTable(t *testing.T) {
|
||||
|
||||
// Generate migration
|
||||
var buf bytes.Buffer
|
||||
writer := NewMigrationWriter(&writers.WriterOptions{})
|
||||
writer, err := NewMigrationWriter(&writers.WriterOptions{})
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create writer: %v", err)
|
||||
}
|
||||
writer.writer = &buf
|
||||
|
||||
err := writer.WriteMigration(model, current)
|
||||
err = writer.WriteMigration(model, current)
|
||||
if err != nil {
|
||||
t.Fatalf("WriteMigration failed: %v", err)
|
||||
}
|
||||
@@ -54,234 +57,161 @@ func TestWriteMigration_NewTable(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteMigration_AddColumn(t *testing.T) {
|
||||
// Current database (with table but missing column)
|
||||
func TestWriteMigration_WithAudit(t *testing.T) {
|
||||
// Current database (empty)
|
||||
current := models.InitDatabase("testdb")
|
||||
currentSchema := models.InitSchema("public")
|
||||
currentTable := models.InitTable("users", "public")
|
||||
current.Schemas = append(current.Schemas, currentSchema)
|
||||
|
||||
// Model database (with table to audit)
|
||||
model := models.InitDatabase("testdb")
|
||||
modelSchema := models.InitSchema("public")
|
||||
|
||||
table := models.InitTable("users", "public")
|
||||
|
||||
idCol := models.InitColumn("id", "users", "public")
|
||||
idCol.Type = "integer"
|
||||
currentTable.Columns["id"] = idCol
|
||||
idCol.IsPrimaryKey = true
|
||||
table.Columns["id"] = idCol
|
||||
|
||||
currentSchema.Tables = append(currentSchema.Tables, currentTable)
|
||||
current.Schemas = append(current.Schemas, currentSchema)
|
||||
nameCol := models.InitColumn("name", "users", "public")
|
||||
nameCol.Type = "text"
|
||||
table.Columns["name"] = nameCol
|
||||
|
||||
// Model database (with additional column)
|
||||
model := models.InitDatabase("testdb")
|
||||
modelSchema := models.InitSchema("public")
|
||||
modelTable := models.InitTable("users", "public")
|
||||
passwordCol := models.InitColumn("password", "users", "public")
|
||||
passwordCol.Type = "text"
|
||||
table.Columns["password"] = passwordCol
|
||||
|
||||
idCol2 := models.InitColumn("id", "users", "public")
|
||||
idCol2.Type = "integer"
|
||||
modelTable.Columns["id"] = idCol2
|
||||
|
||||
emailCol := models.InitColumn("email", "users", "public")
|
||||
emailCol.Type = "text"
|
||||
modelTable.Columns["email"] = emailCol
|
||||
|
||||
modelSchema.Tables = append(modelSchema.Tables, modelTable)
|
||||
modelSchema.Tables = append(modelSchema.Tables, table)
|
||||
model.Schemas = append(model.Schemas, modelSchema)
|
||||
|
||||
// Generate migration
|
||||
// Configure audit
|
||||
auditConfig := NewAuditConfig()
|
||||
auditConfig.AuditSchema = "public"
|
||||
tableConfig := auditConfig.EnableTableAudit("public", "users")
|
||||
tableConfig.EncryptedColumns = []string{"password"}
|
||||
|
||||
// Generate migration with audit
|
||||
var buf bytes.Buffer
|
||||
writer := NewMigrationWriter(&writers.WriterOptions{})
|
||||
options := &writers.WriterOptions{
|
||||
Metadata: map[string]interface{}{
|
||||
"audit_config": auditConfig,
|
||||
},
|
||||
}
|
||||
writer, err := NewMigrationWriter(options)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create writer: %v", err)
|
||||
}
|
||||
writer.writer = &buf
|
||||
|
||||
err := writer.WriteMigration(model, current)
|
||||
err = writer.WriteMigration(model, current)
|
||||
if err != nil {
|
||||
t.Fatalf("WriteMigration failed: %v", err)
|
||||
}
|
||||
|
||||
output := buf.String()
|
||||
t.Logf("Generated migration:\n%s", output)
|
||||
t.Logf("Generated migration with audit:\n%s", output)
|
||||
|
||||
// Verify ADD COLUMN is present
|
||||
if !strings.Contains(output, "ADD COLUMN") {
|
||||
t.Error("Migration missing ADD COLUMN statement")
|
||||
// Verify audit tables
|
||||
if !strings.Contains(output, "CREATE TABLE IF NOT EXISTS public.atevent") {
|
||||
t.Error("Migration missing atevent table")
|
||||
}
|
||||
if !strings.Contains(output, "email") {
|
||||
t.Error("Migration missing column name 'email'")
|
||||
if !strings.Contains(output, "CREATE TABLE IF NOT EXISTS public.atdetail") {
|
||||
t.Error("Migration missing atdetail table")
|
||||
}
|
||||
|
||||
// Verify audit function
|
||||
if !strings.Contains(output, "CREATE OR REPLACE FUNCTION public.ft_audit_users()") {
|
||||
t.Error("Migration missing audit function")
|
||||
}
|
||||
|
||||
// Verify audit trigger
|
||||
if !strings.Contains(output, "CREATE TRIGGER t_audit_users") {
|
||||
t.Error("Migration missing audit trigger")
|
||||
}
|
||||
|
||||
// Verify encrypted column handling
|
||||
if !strings.Contains(output, "'****************'") {
|
||||
t.Error("Migration missing encrypted column handling")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteMigration_ChangeColumnType(t *testing.T) {
|
||||
// Current database (with integer column)
|
||||
current := models.InitDatabase("testdb")
|
||||
currentSchema := models.InitSchema("public")
|
||||
currentTable := models.InitTable("users", "public")
|
||||
|
||||
idCol := models.InitColumn("id", "users", "public")
|
||||
idCol.Type = "integer"
|
||||
currentTable.Columns["id"] = idCol
|
||||
|
||||
currentSchema.Tables = append(currentSchema.Tables, currentTable)
|
||||
current.Schemas = append(current.Schemas, currentSchema)
|
||||
|
||||
// Model database (changed to bigint)
|
||||
model := models.InitDatabase("testdb")
|
||||
modelSchema := models.InitSchema("public")
|
||||
modelTable := models.InitTable("users", "public")
|
||||
|
||||
idCol2 := models.InitColumn("id", "users", "public")
|
||||
idCol2.Type = "bigint"
|
||||
modelTable.Columns["id"] = idCol2
|
||||
|
||||
modelSchema.Tables = append(modelSchema.Tables, modelTable)
|
||||
model.Schemas = append(model.Schemas, modelSchema)
|
||||
|
||||
// Generate migration
|
||||
var buf bytes.Buffer
|
||||
writer := NewMigrationWriter(&writers.WriterOptions{})
|
||||
writer.writer = &buf
|
||||
|
||||
err := writer.WriteMigration(model, current)
|
||||
func TestTemplateExecutor_CreateTable(t *testing.T) {
|
||||
executor, err := NewTemplateExecutor()
|
||||
if err != nil {
|
||||
t.Fatalf("WriteMigration failed: %v", err)
|
||||
t.Fatalf("Failed to create executor: %v", err)
|
||||
}
|
||||
|
||||
output := buf.String()
|
||||
t.Logf("Generated migration:\n%s", output)
|
||||
|
||||
// Verify ALTER COLUMN TYPE is present
|
||||
if !strings.Contains(output, "ALTER COLUMN") {
|
||||
t.Error("Migration missing ALTER COLUMN statement")
|
||||
data := CreateTableData{
|
||||
SchemaName: "public",
|
||||
TableName: "test_table",
|
||||
Columns: []ColumnData{
|
||||
{Name: "id", Type: "integer", NotNull: true},
|
||||
{Name: "name", Type: "text", Default: "'unknown'"},
|
||||
},
|
||||
}
|
||||
if !strings.Contains(output, "TYPE bigint") {
|
||||
t.Error("Migration missing TYPE bigint")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteMigration_AddForeignKey(t *testing.T) {
|
||||
// Current database (two tables, no relationship)
|
||||
current := models.InitDatabase("testdb")
|
||||
currentSchema := models.InitSchema("public")
|
||||
|
||||
usersTable := models.InitTable("users", "public")
|
||||
idCol := models.InitColumn("id", "users", "public")
|
||||
idCol.Type = "integer"
|
||||
usersTable.Columns["id"] = idCol
|
||||
|
||||
postsTable := models.InitTable("posts", "public")
|
||||
postIdCol := models.InitColumn("id", "posts", "public")
|
||||
postIdCol.Type = "integer"
|
||||
postsTable.Columns["id"] = postIdCol
|
||||
|
||||
userIdCol := models.InitColumn("user_id", "posts", "public")
|
||||
userIdCol.Type = "integer"
|
||||
postsTable.Columns["user_id"] = userIdCol
|
||||
|
||||
currentSchema.Tables = append(currentSchema.Tables, usersTable, postsTable)
|
||||
current.Schemas = append(current.Schemas, currentSchema)
|
||||
|
||||
// Model database (with foreign key)
|
||||
model := models.InitDatabase("testdb")
|
||||
modelSchema := models.InitSchema("public")
|
||||
|
||||
modelUsersTable := models.InitTable("users", "public")
|
||||
modelIdCol := models.InitColumn("id", "users", "public")
|
||||
modelIdCol.Type = "integer"
|
||||
modelUsersTable.Columns["id"] = modelIdCol
|
||||
|
||||
modelPostsTable := models.InitTable("posts", "public")
|
||||
modelPostIdCol := models.InitColumn("id", "posts", "public")
|
||||
modelPostIdCol.Type = "integer"
|
||||
modelPostsTable.Columns["id"] = modelPostIdCol
|
||||
|
||||
modelUserIdCol := models.InitColumn("user_id", "posts", "public")
|
||||
modelUserIdCol.Type = "integer"
|
||||
modelPostsTable.Columns["user_id"] = modelUserIdCol
|
||||
|
||||
// Add foreign key constraint
|
||||
fkConstraint := &models.Constraint{
|
||||
Name: "fk_posts_users",
|
||||
Type: models.ForeignKeyConstraint,
|
||||
Columns: []string{"user_id"},
|
||||
ReferencedTable: "users",
|
||||
ReferencedSchema: "public",
|
||||
ReferencedColumns: []string{"id"},
|
||||
OnDelete: "CASCADE",
|
||||
OnUpdate: "CASCADE",
|
||||
}
|
||||
modelPostsTable.Constraints["fk_posts_users"] = fkConstraint
|
||||
|
||||
modelSchema.Tables = append(modelSchema.Tables, modelUsersTable, modelPostsTable)
|
||||
model.Schemas = append(model.Schemas, modelSchema)
|
||||
|
||||
// Generate migration
|
||||
var buf bytes.Buffer
|
||||
writer := NewMigrationWriter(&writers.WriterOptions{})
|
||||
writer.writer = &buf
|
||||
|
||||
err := writer.WriteMigration(model, current)
|
||||
sql, err := executor.ExecuteCreateTable(data)
|
||||
if err != nil {
|
||||
t.Fatalf("WriteMigration failed: %v", err)
|
||||
t.Fatalf("Failed to execute template: %v", err)
|
||||
}
|
||||
|
||||
output := buf.String()
|
||||
t.Logf("Generated migration:\n%s", output)
|
||||
t.Logf("Generated SQL:\n%s", sql)
|
||||
|
||||
// Verify FOREIGN KEY is present
|
||||
if !strings.Contains(output, "FOREIGN KEY") {
|
||||
t.Error("Migration missing FOREIGN KEY statement")
|
||||
if !strings.Contains(sql, "CREATE TABLE IF NOT EXISTS public.test_table") {
|
||||
t.Error("SQL missing CREATE TABLE statement")
|
||||
}
|
||||
if !strings.Contains(output, "ON DELETE CASCADE") {
|
||||
t.Error("Migration missing ON DELETE CASCADE")
|
||||
if !strings.Contains(sql, "id integer NOT NULL") {
|
||||
t.Error("SQL missing id column definition")
|
||||
}
|
||||
if !strings.Contains(sql, "name text DEFAULT 'unknown'") {
|
||||
t.Error("SQL missing name column definition")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteMigration_AddIndex(t *testing.T) {
|
||||
// Current database (table without index)
|
||||
current := models.InitDatabase("testdb")
|
||||
currentSchema := models.InitSchema("public")
|
||||
currentTable := models.InitTable("users", "public")
|
||||
|
||||
emailCol := models.InitColumn("email", "users", "public")
|
||||
emailCol.Type = "text"
|
||||
currentTable.Columns["email"] = emailCol
|
||||
|
||||
currentSchema.Tables = append(currentSchema.Tables, currentTable)
|
||||
current.Schemas = append(current.Schemas, currentSchema)
|
||||
|
||||
// Model database (with unique index)
|
||||
model := models.InitDatabase("testdb")
|
||||
modelSchema := models.InitSchema("public")
|
||||
modelTable := models.InitTable("users", "public")
|
||||
|
||||
modelEmailCol := models.InitColumn("email", "users", "public")
|
||||
modelEmailCol.Type = "text"
|
||||
modelTable.Columns["email"] = modelEmailCol
|
||||
|
||||
// Add unique index
|
||||
index := &models.Index{
|
||||
Name: "uk_users_email",
|
||||
Unique: true,
|
||||
Columns: []string{"email"},
|
||||
Type: "btree",
|
||||
}
|
||||
modelTable.Indexes["uk_users_email"] = index
|
||||
|
||||
modelSchema.Tables = append(modelSchema.Tables, modelTable)
|
||||
model.Schemas = append(model.Schemas, modelSchema)
|
||||
|
||||
// Generate migration
|
||||
var buf bytes.Buffer
|
||||
writer := NewMigrationWriter(&writers.WriterOptions{})
|
||||
writer.writer = &buf
|
||||
|
||||
err := writer.WriteMigration(model, current)
|
||||
func TestTemplateExecutor_AuditFunction(t *testing.T) {
|
||||
executor, err := NewTemplateExecutor()
|
||||
if err != nil {
|
||||
t.Fatalf("WriteMigration failed: %v", err)
|
||||
t.Fatalf("Failed to create executor: %v", err)
|
||||
}
|
||||
|
||||
output := buf.String()
|
||||
t.Logf("Generated migration:\n%s", output)
|
||||
|
||||
// Verify CREATE UNIQUE INDEX is present
|
||||
if !strings.Contains(output, "CREATE UNIQUE INDEX") {
|
||||
t.Error("Migration missing CREATE UNIQUE INDEX statement")
|
||||
data := AuditFunctionData{
|
||||
SchemaName: "public",
|
||||
FunctionName: "ft_audit_users",
|
||||
TableName: "users",
|
||||
TablePrefix: "NULL",
|
||||
PrimaryKey: "id",
|
||||
AuditSchema: "public",
|
||||
UserFunction: "current_user",
|
||||
AuditInsert: true,
|
||||
AuditUpdate: true,
|
||||
AuditDelete: true,
|
||||
UpdateCondition: "old.name IS DISTINCT FROM new.name",
|
||||
UpdateColumns: []AuditColumnData{
|
||||
{Name: "name", OldValue: "old.name::text", NewValue: "new.name::text"},
|
||||
},
|
||||
DeleteColumns: []AuditColumnData{
|
||||
{Name: "name", OldValue: "old.name::text"},
|
||||
},
|
||||
}
|
||||
if !strings.Contains(output, "uk_users_email") {
|
||||
t.Error("Migration missing index name")
|
||||
|
||||
sql, err := executor.ExecuteAuditFunction(data)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to execute template: %v", err)
|
||||
}
|
||||
|
||||
t.Logf("Generated SQL:\n%s", sql)
|
||||
|
||||
if !strings.Contains(sql, "CREATE OR REPLACE FUNCTION public.ft_audit_users()") {
|
||||
t.Error("SQL missing function definition")
|
||||
}
|
||||
if !strings.Contains(sql, "IF TG_OP = 'INSERT'") {
|
||||
t.Error("SQL missing INSERT handling")
|
||||
}
|
||||
if !strings.Contains(sql, "ELSIF TG_OP = 'UPDATE'") {
|
||||
t.Error("SQL missing UPDATE handling")
|
||||
}
|
||||
if !strings.Contains(sql, "ELSIF TG_OP = 'DELETE'") {
|
||||
t.Error("SQL missing DELETE handling")
|
||||
}
|
||||
}
|
||||
|
||||
285
pkg/writers/pgsql/template_functions.go
Normal file
285
pkg/writers/pgsql/template_functions.go
Normal file
@@ -0,0 +1,285 @@
|
||||
package pgsql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// TemplateFunctions returns a map of custom template functions
|
||||
func TemplateFunctions() map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
// String manipulation
|
||||
"upper": strings.ToUpper,
|
||||
"lower": strings.ToLower,
|
||||
"snake_case": toSnakeCase,
|
||||
"camelCase": toCamelCase,
|
||||
|
||||
// SQL formatting
|
||||
"indent": indent,
|
||||
"quote": quote,
|
||||
"escape": escape,
|
||||
"safe_identifier": safeIdentifier,
|
||||
|
||||
// Type conversion
|
||||
"goTypeToSQL": goTypeToSQL,
|
||||
"sqlTypeToGo": sqlTypeToGo,
|
||||
"isNumeric": isNumeric,
|
||||
"isText": isText,
|
||||
|
||||
// Collection helpers
|
||||
"first": first,
|
||||
"last": last,
|
||||
"filter": filter,
|
||||
"mapFunc": mapFunc,
|
||||
"join_with": joinWith,
|
||||
|
||||
// Built-in Go template function (for convenience)
|
||||
"join": strings.Join,
|
||||
}
|
||||
}
|
||||
|
||||
// String manipulation functions
|
||||
|
||||
// toSnakeCase converts a string to snake_case
|
||||
func toSnakeCase(s string) string {
|
||||
// Insert underscore before uppercase letters
|
||||
var result strings.Builder
|
||||
for i, r := range s {
|
||||
if unicode.IsUpper(r) {
|
||||
if i > 0 {
|
||||
result.WriteRune('_')
|
||||
}
|
||||
result.WriteRune(unicode.ToLower(r))
|
||||
} else {
|
||||
result.WriteRune(r)
|
||||
}
|
||||
}
|
||||
return result.String()
|
||||
}
|
||||
|
||||
// toCamelCase converts a string to camelCase
|
||||
func toCamelCase(s string) string {
|
||||
// Split by underscore
|
||||
parts := strings.Split(s, "_")
|
||||
if len(parts) == 0 {
|
||||
return s
|
||||
}
|
||||
|
||||
// First part stays lowercase
|
||||
result := strings.ToLower(parts[0])
|
||||
|
||||
// Capitalize first letter of remaining parts
|
||||
for _, part := range parts[1:] {
|
||||
if len(part) > 0 {
|
||||
result += strings.ToUpper(part[0:1]) + strings.ToLower(part[1:])
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// SQL formatting functions
|
||||
|
||||
// indent indents each line of text by the specified number of spaces
|
||||
func indent(spaces int, text string) string {
|
||||
prefix := strings.Repeat(" ", spaces)
|
||||
lines := strings.Split(text, "\n")
|
||||
for i, line := range lines {
|
||||
if line != "" {
|
||||
lines[i] = prefix + line
|
||||
}
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
|
||||
// quote quotes a string value for SQL (escapes single quotes)
|
||||
func quote(s string) string {
|
||||
return "'" + strings.ReplaceAll(s, "'", "''") + "'"
|
||||
}
|
||||
|
||||
// escape escapes a string for SQL (escapes single quotes and backslashes)
|
||||
func escape(s string) string {
|
||||
s = strings.ReplaceAll(s, "\\", "\\\\")
|
||||
s = strings.ReplaceAll(s, "'", "''")
|
||||
return s
|
||||
}
|
||||
|
||||
// safeIdentifier makes a string safe to use as a SQL identifier
|
||||
func safeIdentifier(s string) string {
|
||||
// Remove or replace dangerous characters
|
||||
// Allow: letters, numbers, underscore
|
||||
reg := regexp.MustCompile(`[^a-zA-Z0-9_]`)
|
||||
safe := reg.ReplaceAllString(s, "_")
|
||||
|
||||
// Ensure it doesn't start with a number
|
||||
if len(safe) > 0 && unicode.IsDigit(rune(safe[0])) {
|
||||
safe = "_" + safe
|
||||
}
|
||||
|
||||
// Convert to lowercase (PostgreSQL convention)
|
||||
return strings.ToLower(safe)
|
||||
}
|
||||
|
||||
// Type conversion functions
|
||||
|
||||
// goTypeToSQL converts Go type to PostgreSQL type
|
||||
func goTypeToSQL(goType string) string {
|
||||
typeMap := map[string]string{
|
||||
"string": "text",
|
||||
"int": "integer",
|
||||
"int32": "integer",
|
||||
"int64": "bigint",
|
||||
"float32": "real",
|
||||
"float64": "double precision",
|
||||
"bool": "boolean",
|
||||
"time.Time": "timestamp",
|
||||
"[]byte": "bytea",
|
||||
}
|
||||
|
||||
if sqlType, ok := typeMap[goType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
return "text" // Default
|
||||
}
|
||||
|
||||
// sqlTypeToGo converts PostgreSQL type to Go type
|
||||
func sqlTypeToGo(sqlType string) string {
|
||||
sqlType = strings.ToLower(sqlType)
|
||||
|
||||
typeMap := map[string]string{
|
||||
"text": "string",
|
||||
"varchar": "string",
|
||||
"char": "string",
|
||||
"integer": "int",
|
||||
"int": "int",
|
||||
"bigint": "int64",
|
||||
"smallint": "int16",
|
||||
"serial": "int",
|
||||
"bigserial": "int64",
|
||||
"real": "float32",
|
||||
"double precision": "float64",
|
||||
"numeric": "float64",
|
||||
"decimal": "float64",
|
||||
"boolean": "bool",
|
||||
"timestamp": "time.Time",
|
||||
"timestamptz": "time.Time",
|
||||
"date": "time.Time",
|
||||
"time": "time.Time",
|
||||
"bytea": "[]byte",
|
||||
"json": "json.RawMessage",
|
||||
"jsonb": "json.RawMessage",
|
||||
"uuid": "string",
|
||||
}
|
||||
|
||||
if goType, ok := typeMap[sqlType]; ok {
|
||||
return goType
|
||||
}
|
||||
return "string" // Default
|
||||
}
|
||||
|
||||
// isNumeric checks if a SQL type is numeric
|
||||
func isNumeric(sqlType string) bool {
|
||||
sqlType = strings.ToLower(sqlType)
|
||||
numericTypes := []string{
|
||||
"integer", "int", "bigint", "smallint", "serial", "bigserial",
|
||||
"real", "double precision", "numeric", "decimal", "float",
|
||||
}
|
||||
for _, t := range numericTypes {
|
||||
if strings.Contains(sqlType, t) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isText checks if a SQL type is text-based
|
||||
func isText(sqlType string) bool {
|
||||
sqlType = strings.ToLower(sqlType)
|
||||
textTypes := []string{
|
||||
"text", "varchar", "char", "character", "string",
|
||||
}
|
||||
for _, t := range textTypes {
|
||||
if strings.Contains(sqlType, t) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Collection helper functions
|
||||
|
||||
// first returns the first element of a slice, or nil if empty
|
||||
func first(slice interface{}) interface{} {
|
||||
switch v := slice.(type) {
|
||||
case []string:
|
||||
if len(v) > 0 {
|
||||
return v[0]
|
||||
}
|
||||
case []int:
|
||||
if len(v) > 0 {
|
||||
return v[0]
|
||||
}
|
||||
case []interface{}:
|
||||
if len(v) > 0 {
|
||||
return v[0]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// last returns the last element of a slice, or nil if empty
|
||||
func last(slice interface{}) interface{} {
|
||||
switch v := slice.(type) {
|
||||
case []string:
|
||||
if len(v) > 0 {
|
||||
return v[len(v)-1]
|
||||
}
|
||||
case []int:
|
||||
if len(v) > 0 {
|
||||
return v[len(v)-1]
|
||||
}
|
||||
case []interface{}:
|
||||
if len(v) > 0 {
|
||||
return v[len(v)-1]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// filter filters a slice based on a predicate (simplified version)
|
||||
// Usage in template: {{filter .Columns "NotNull"}}
|
||||
func filter(slice interface{}, fieldName string) interface{} {
|
||||
// This is a simplified implementation
|
||||
// In templates, you'd use: {{range $col := .Columns}}{{if $col.NotNull}}...{{end}}{{end}}
|
||||
// This function is mainly for documentation purposes
|
||||
return slice
|
||||
}
|
||||
|
||||
// mapFunc maps a function over a slice (simplified version)
|
||||
// Usage in template: {{range .Columns}}{{mapFunc .Name "upper"}}{{end}}
|
||||
func mapFunc(value interface{}, funcName string) interface{} {
|
||||
// This is a simplified implementation
|
||||
// In templates, you'd directly call: {{upper .Name}}
|
||||
// This function is mainly for documentation purposes
|
||||
return value
|
||||
}
|
||||
|
||||
// joinWith joins a slice of strings with a separator
|
||||
func joinWith(slice []string, separator string) string {
|
||||
return strings.Join(slice, separator)
|
||||
}
|
||||
|
||||
// Additional helper functions
|
||||
|
||||
// formatType formats a SQL type with length/precision
|
||||
func formatType(baseType string, length, precision int) string {
|
||||
if length > 0 && precision > 0 {
|
||||
return fmt.Sprintf("%s(%d,%d)", baseType, length, precision)
|
||||
}
|
||||
if length > 0 {
|
||||
return fmt.Sprintf("%s(%d)", baseType, length)
|
||||
}
|
||||
return baseType
|
||||
}
|
||||
332
pkg/writers/pgsql/template_functions_test.go
Normal file
332
pkg/writers/pgsql/template_functions_test.go
Normal file
@@ -0,0 +1,332 @@
|
||||
package pgsql
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestToSnakeCase(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"UserId", "user_id"},
|
||||
{"UserID", "user_i_d"},
|
||||
{"HTTPResponse", "h_t_t_p_response"},
|
||||
{"already_snake", "already_snake"},
|
||||
{"", ""},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := toSnakeCase(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("toSnakeCase(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestToCamelCase(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"user_id", "userId"},
|
||||
{"user_name", "userName"},
|
||||
{"http_response", "httpResponse"},
|
||||
{"", ""},
|
||||
{"alreadycamel", "alreadycamel"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := toCamelCase(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("toCamelCase(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuote(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"hello", "'hello'"},
|
||||
{"O'Brien", "'O''Brien'"},
|
||||
{"", "''"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := quote(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("quote(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEscape(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"hello", "hello"},
|
||||
{"O'Brien", "O''Brien"},
|
||||
{"path\\to\\file", "path\\\\to\\\\file"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := escape(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("escape(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSafeIdentifier(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"User-Id", "user_id"},
|
||||
{"123column", "_123column"},
|
||||
{"valid_name", "valid_name"},
|
||||
{"Column@Name!", "column_name_"},
|
||||
{"UPPERCASE", "uppercase"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := safeIdentifier(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("safeIdentifier(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGoTypeToSQL(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"string", "text"},
|
||||
{"int", "integer"},
|
||||
{"int64", "bigint"},
|
||||
{"bool", "boolean"},
|
||||
{"time.Time", "timestamp"},
|
||||
{"unknown", "text"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := goTypeToSQL(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("goTypeToSQL(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSQLTypeToGo(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"text", "string"},
|
||||
{"integer", "int"},
|
||||
{"bigint", "int64"},
|
||||
{"boolean", "bool"},
|
||||
{"timestamp", "time.Time"},
|
||||
{"unknown", "string"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := sqlTypeToGo(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("sqlTypeToGo(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsNumeric(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected bool
|
||||
}{
|
||||
{"integer", true},
|
||||
{"bigint", true},
|
||||
{"numeric(10,2)", true},
|
||||
{"text", false},
|
||||
{"varchar", false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := isNumeric(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("isNumeric(%q) = %v, want %v", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsText(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected bool
|
||||
}{
|
||||
{"text", true},
|
||||
{"varchar(255)", true},
|
||||
{"character varying", true},
|
||||
{"integer", false},
|
||||
{"bigint", false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := isText(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("isText(%q) = %v, want %v", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndent(t *testing.T) {
|
||||
input := "line1\nline2\nline3"
|
||||
expected := " line1\n line2\n line3"
|
||||
result := indent(2, input)
|
||||
if result != expected {
|
||||
t.Errorf("indent(2, %q) = %q, want %q", input, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFirst(t *testing.T) {
|
||||
tests := []struct {
|
||||
input interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{[]string{"a", "b", "c"}, "a"},
|
||||
{[]string{}, nil},
|
||||
{[]int{1, 2, 3}, 1},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := first(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("first(%v) = %v, want %v", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLast(t *testing.T) {
|
||||
tests := []struct {
|
||||
input interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{[]string{"a", "b", "c"}, "c"},
|
||||
{[]string{}, nil},
|
||||
{[]int{1, 2, 3}, 3},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := last(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("last(%v) = %v, want %v", tt.input, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestJoinWith(t *testing.T) {
|
||||
input := []string{"a", "b", "c"}
|
||||
expected := "a, b, c"
|
||||
result := joinWith(input, ", ")
|
||||
if result != expected {
|
||||
t.Errorf("joinWith(%v, \", \") = %q, want %q", input, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTemplateFunctions(t *testing.T) {
|
||||
funcs := TemplateFunctions()
|
||||
|
||||
// Check that all expected functions are registered
|
||||
expectedFuncs := []string{
|
||||
"upper", "lower", "snake_case", "camelCase",
|
||||
"indent", "quote", "escape", "safe_identifier",
|
||||
"goTypeToSQL", "sqlTypeToGo", "isNumeric", "isText",
|
||||
"first", "last", "filter", "mapFunc", "join_with",
|
||||
"join",
|
||||
}
|
||||
|
||||
for _, name := range expectedFuncs {
|
||||
if _, ok := funcs[name]; !ok {
|
||||
t.Errorf("Expected function %q not found in TemplateFunctions()", name)
|
||||
}
|
||||
}
|
||||
|
||||
// Test that they're callable
|
||||
if upperFunc, ok := funcs["upper"].(func(string) string); ok {
|
||||
result := upperFunc("hello")
|
||||
if result != "HELLO" {
|
||||
t.Errorf("upper function not working correctly")
|
||||
}
|
||||
} else {
|
||||
t.Error("upper function has wrong type")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatType(t *testing.T) {
|
||||
tests := []struct {
|
||||
baseType string
|
||||
length int
|
||||
precision int
|
||||
expected string
|
||||
}{
|
||||
{"varchar", 255, 0, "varchar(255)"},
|
||||
{"numeric", 10, 2, "numeric(10,2)"},
|
||||
{"integer", 0, 0, "integer"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := formatType(tt.baseType, tt.length, tt.precision)
|
||||
if result != tt.expected {
|
||||
t.Errorf("formatType(%q, %d, %d) = %q, want %q",
|
||||
tt.baseType, tt.length, tt.precision, result, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test that template functions work in actual templates
|
||||
func TestTemplateFunctionsInTemplate(t *testing.T) {
|
||||
executor, err := NewTemplateExecutor()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create executor: %v", err)
|
||||
}
|
||||
|
||||
// Create a simple test template
|
||||
tmpl, err := executor.templates.New("test").Parse(`
|
||||
{{- upper .Name -}}
|
||||
{{- lower .Type -}}
|
||||
{{- snake_case .CamelName -}}
|
||||
{{- safe_identifier .UnsafeName -}}
|
||||
`)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse test template: %v", err)
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Name string
|
||||
Type string
|
||||
CamelName string
|
||||
UnsafeName string
|
||||
}{
|
||||
Name: "hello",
|
||||
Type: "TEXT",
|
||||
CamelName: "UserId",
|
||||
UnsafeName: "user-id!",
|
||||
}
|
||||
|
||||
var buf strings.Builder
|
||||
err = tmpl.Execute(&buf, data)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to execute template: %v", err)
|
||||
}
|
||||
|
||||
result := buf.String()
|
||||
expected := "HELLOtextuser_iduser_id_"
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("Template output = %q, want %q", result, expected)
|
||||
}
|
||||
}
|
||||
457
pkg/writers/pgsql/templates.go
Normal file
457
pkg/writers/pgsql/templates.go
Normal file
@@ -0,0 +1,457 @@
|
||||
package pgsql
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"embed"
|
||||
"fmt"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
//go:embed templates/*.tmpl
|
||||
var templateFS embed.FS
|
||||
|
||||
// TemplateExecutor manages and executes SQL templates
|
||||
type TemplateExecutor struct {
|
||||
templates *template.Template
|
||||
}
|
||||
|
||||
// NewTemplateExecutor creates a new template executor
|
||||
func NewTemplateExecutor() (*TemplateExecutor, error) {
|
||||
// Create template with custom functions
|
||||
funcMap := make(template.FuncMap)
|
||||
for k, v := range TemplateFunctions() {
|
||||
funcMap[k] = v
|
||||
}
|
||||
|
||||
tmpl, err := template.New("").Funcs(funcMap).ParseFS(templateFS, "templates/*.tmpl")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse templates: %w", err)
|
||||
}
|
||||
|
||||
return &TemplateExecutor{
|
||||
templates: tmpl,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Template data structures
|
||||
|
||||
// CreateTableData contains data for create table template
|
||||
type CreateTableData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
Columns []ColumnData
|
||||
}
|
||||
|
||||
// ColumnData represents column information
|
||||
type ColumnData struct {
|
||||
Name string
|
||||
Type string
|
||||
Default string
|
||||
NotNull bool
|
||||
}
|
||||
|
||||
// AddColumnData contains data for add column template
|
||||
type AddColumnData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ColumnName string
|
||||
ColumnType string
|
||||
Default string
|
||||
NotNull bool
|
||||
}
|
||||
|
||||
// AlterColumnTypeData contains data for alter column type template
|
||||
type AlterColumnTypeData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ColumnName string
|
||||
NewType string
|
||||
}
|
||||
|
||||
// AlterColumnDefaultData contains data for alter column default template
|
||||
type AlterColumnDefaultData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ColumnName string
|
||||
SetDefault bool
|
||||
DefaultValue string
|
||||
}
|
||||
|
||||
// CreatePrimaryKeyData contains data for create primary key template
|
||||
type CreatePrimaryKeyData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ConstraintName string
|
||||
Columns string
|
||||
}
|
||||
|
||||
// CreateIndexData contains data for create index template
|
||||
type CreateIndexData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
IndexName string
|
||||
IndexType string
|
||||
Columns string
|
||||
Unique bool
|
||||
}
|
||||
|
||||
// CreateForeignKeyData contains data for create foreign key template
|
||||
type CreateForeignKeyData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ConstraintName string
|
||||
SourceColumns string
|
||||
TargetSchema string
|
||||
TargetTable string
|
||||
TargetColumns string
|
||||
OnDelete string
|
||||
OnUpdate string
|
||||
}
|
||||
|
||||
// DropConstraintData contains data for drop constraint template
|
||||
type DropConstraintData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ConstraintName string
|
||||
}
|
||||
|
||||
// DropIndexData contains data for drop index template
|
||||
type DropIndexData struct {
|
||||
SchemaName string
|
||||
IndexName string
|
||||
}
|
||||
|
||||
// CommentTableData contains data for table comment template
|
||||
type CommentTableData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
Comment string
|
||||
}
|
||||
|
||||
// CommentColumnData contains data for column comment template
|
||||
type CommentColumnData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
ColumnName string
|
||||
Comment string
|
||||
}
|
||||
|
||||
// AuditTablesData contains data for audit tables template
|
||||
type AuditTablesData struct {
|
||||
AuditSchema string
|
||||
}
|
||||
|
||||
// AuditColumnData represents a column in audit template
|
||||
type AuditColumnData struct {
|
||||
Name string
|
||||
OldValue string
|
||||
NewValue string
|
||||
}
|
||||
|
||||
// AuditFunctionData contains data for audit function template
|
||||
type AuditFunctionData struct {
|
||||
SchemaName string
|
||||
FunctionName string
|
||||
TableName string
|
||||
TablePrefix string
|
||||
PrimaryKey string
|
||||
AuditSchema string
|
||||
UserFunction string
|
||||
AuditInsert bool
|
||||
AuditUpdate bool
|
||||
AuditDelete bool
|
||||
UpdateCondition string
|
||||
UpdateColumns []AuditColumnData
|
||||
DeleteColumns []AuditColumnData
|
||||
}
|
||||
|
||||
// AuditTriggerData contains data for audit trigger template
|
||||
type AuditTriggerData struct {
|
||||
SchemaName string
|
||||
TableName string
|
||||
TriggerName string
|
||||
FunctionName string
|
||||
Events string
|
||||
}
|
||||
|
||||
// Execute methods for each template
|
||||
|
||||
// ExecuteCreateTable executes the create table template
|
||||
func (te *TemplateExecutor) ExecuteCreateTable(data CreateTableData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "create_table.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute create_table template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteAddColumn executes the add column template
|
||||
func (te *TemplateExecutor) ExecuteAddColumn(data AddColumnData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "add_column.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute add_column template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteAlterColumnType executes the alter column type template
|
||||
func (te *TemplateExecutor) ExecuteAlterColumnType(data AlterColumnTypeData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "alter_column_type.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute alter_column_type template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteAlterColumnDefault executes the alter column default template
|
||||
func (te *TemplateExecutor) ExecuteAlterColumnDefault(data AlterColumnDefaultData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "alter_column_default.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute alter_column_default template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteCreatePrimaryKey executes the create primary key template
|
||||
func (te *TemplateExecutor) ExecuteCreatePrimaryKey(data CreatePrimaryKeyData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "create_primary_key.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute create_primary_key template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteCreateIndex executes the create index template
|
||||
func (te *TemplateExecutor) ExecuteCreateIndex(data CreateIndexData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "create_index.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute create_index template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteCreateForeignKey executes the create foreign key template
|
||||
func (te *TemplateExecutor) ExecuteCreateForeignKey(data CreateForeignKeyData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "create_foreign_key.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute create_foreign_key template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteDropConstraint executes the drop constraint template
|
||||
func (te *TemplateExecutor) ExecuteDropConstraint(data DropConstraintData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "drop_constraint.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute drop_constraint template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteDropIndex executes the drop index template
|
||||
func (te *TemplateExecutor) ExecuteDropIndex(data DropIndexData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "drop_index.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute drop_index template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteCommentTable executes the table comment template
|
||||
func (te *TemplateExecutor) ExecuteCommentTable(data CommentTableData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "comment_table.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute comment_table template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteCommentColumn executes the column comment template
|
||||
func (te *TemplateExecutor) ExecuteCommentColumn(data CommentColumnData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "comment_column.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute comment_column template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteAuditTables executes the audit tables template
|
||||
func (te *TemplateExecutor) ExecuteAuditTables(data AuditTablesData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "audit_tables.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute audit_tables template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteAuditFunction executes the audit function template
|
||||
func (te *TemplateExecutor) ExecuteAuditFunction(data AuditFunctionData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "audit_function.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute audit_function template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// ExecuteAuditTrigger executes the audit trigger template
|
||||
func (te *TemplateExecutor) ExecuteAuditTrigger(data AuditTriggerData) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := te.templates.ExecuteTemplate(&buf, "audit_trigger.tmpl", data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute audit_trigger template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// Helper functions to build template data from models
|
||||
|
||||
// BuildCreateTableData builds CreateTableData from a models.Table
|
||||
func BuildCreateTableData(schemaName string, table *models.Table) CreateTableData {
|
||||
columns := make([]ColumnData, 0, len(table.Columns))
|
||||
|
||||
// Get sorted columns
|
||||
sortedCols := getSortedColumns(table.Columns)
|
||||
for _, col := range sortedCols {
|
||||
colData := ColumnData{
|
||||
Name: col.Name,
|
||||
Type: col.Type,
|
||||
NotNull: col.NotNull,
|
||||
}
|
||||
if col.Default != nil {
|
||||
colData.Default = fmt.Sprintf("%v", col.Default)
|
||||
}
|
||||
columns = append(columns, colData)
|
||||
}
|
||||
|
||||
return CreateTableData{
|
||||
SchemaName: schemaName,
|
||||
TableName: table.Name,
|
||||
Columns: columns,
|
||||
}
|
||||
}
|
||||
|
||||
// BuildAuditFunctionData builds AuditFunctionData from table and config
|
||||
func BuildAuditFunctionData(
|
||||
schemaName string,
|
||||
table *models.Table,
|
||||
pk *models.Column,
|
||||
config *TableAuditConfig,
|
||||
auditSchema string,
|
||||
userFunction string,
|
||||
) AuditFunctionData {
|
||||
funcName := fmt.Sprintf("ft_audit_%s", table.Name)
|
||||
|
||||
// Build list of audited columns
|
||||
auditedColumns := make([]*models.Column, 0)
|
||||
for _, col := range table.Columns {
|
||||
if col.Name == pk.Name {
|
||||
continue
|
||||
}
|
||||
|
||||
excluded := false
|
||||
for _, excl := range config.ExcludedColumns {
|
||||
if strings.EqualFold(col.Name, excl) {
|
||||
excluded = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if excluded {
|
||||
continue
|
||||
}
|
||||
|
||||
auditedColumns = append(auditedColumns, col)
|
||||
}
|
||||
|
||||
// Build update condition
|
||||
updateComparisons := make([]string, 0)
|
||||
for _, col := range auditedColumns {
|
||||
updateComparisons = append(updateComparisons,
|
||||
fmt.Sprintf("old.%s IS DISTINCT FROM new.%s", col.Name, col.Name))
|
||||
}
|
||||
updateCondition := strings.Join(updateComparisons, " OR ")
|
||||
|
||||
// Build update columns data
|
||||
updateColumns := make([]AuditColumnData, 0)
|
||||
for _, col := range auditedColumns {
|
||||
isEncrypted := false
|
||||
for _, enc := range config.EncryptedColumns {
|
||||
if strings.EqualFold(col.Name, enc) {
|
||||
isEncrypted = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
oldValue := fmt.Sprintf("old.%s::text", col.Name)
|
||||
newValue := fmt.Sprintf("new.%s::text", col.Name)
|
||||
|
||||
if isEncrypted {
|
||||
oldValue = "'****************'"
|
||||
newValue = "'****************'"
|
||||
}
|
||||
|
||||
updateColumns = append(updateColumns, AuditColumnData{
|
||||
Name: col.Name,
|
||||
OldValue: oldValue,
|
||||
NewValue: newValue,
|
||||
})
|
||||
}
|
||||
|
||||
// Build delete columns data (same as update but only old values)
|
||||
deleteColumns := make([]AuditColumnData, 0)
|
||||
for _, col := range auditedColumns {
|
||||
isEncrypted := false
|
||||
for _, enc := range config.EncryptedColumns {
|
||||
if strings.EqualFold(col.Name, enc) {
|
||||
isEncrypted = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
oldValue := fmt.Sprintf("old.%s::text", col.Name)
|
||||
if isEncrypted {
|
||||
oldValue = "'****************'"
|
||||
}
|
||||
|
||||
deleteColumns = append(deleteColumns, AuditColumnData{
|
||||
Name: col.Name,
|
||||
OldValue: oldValue,
|
||||
})
|
||||
}
|
||||
|
||||
tablePrefix := "NULL"
|
||||
if config.TablePrefix != "" {
|
||||
tablePrefix = fmt.Sprintf("'%s'", config.TablePrefix)
|
||||
}
|
||||
|
||||
return AuditFunctionData{
|
||||
SchemaName: schemaName,
|
||||
FunctionName: funcName,
|
||||
TableName: table.Name,
|
||||
TablePrefix: tablePrefix,
|
||||
PrimaryKey: pk.Name,
|
||||
AuditSchema: auditSchema,
|
||||
UserFunction: userFunction,
|
||||
AuditInsert: config.AuditInsert,
|
||||
AuditUpdate: config.AuditUpdate,
|
||||
AuditDelete: config.AuditDelete,
|
||||
UpdateCondition: updateCondition,
|
||||
UpdateColumns: updateColumns,
|
||||
DeleteColumns: deleteColumns,
|
||||
}
|
||||
}
|
||||
4
pkg/writers/pgsql/templates/add_column.tmpl
Normal file
4
pkg/writers/pgsql/templates/add_column.tmpl
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
ADD COLUMN IF NOT EXISTS {{.ColumnName}} {{.ColumnType}}
|
||||
{{- if .Default}} DEFAULT {{.Default}}{{end}}
|
||||
{{- if .NotNull}} NOT NULL{{end}};
|
||||
7
pkg/writers/pgsql/templates/alter_column_default.tmpl
Normal file
7
pkg/writers/pgsql/templates/alter_column_default.tmpl
Normal file
@@ -0,0 +1,7 @@
|
||||
{{- if .SetDefault -}}
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
ALTER COLUMN {{.ColumnName}} SET DEFAULT {{.DefaultValue}};
|
||||
{{- else -}}
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
ALTER COLUMN {{.ColumnName}} DROP DEFAULT;
|
||||
{{- end -}}
|
||||
2
pkg/writers/pgsql/templates/alter_column_type.tmpl
Normal file
2
pkg/writers/pgsql/templates/alter_column_type.tmpl
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
ALTER COLUMN {{.ColumnName}} TYPE {{.NewType}};
|
||||
84
pkg/writers/pgsql/templates/audit_function.tmpl
Normal file
84
pkg/writers/pgsql/templates/audit_function.tmpl
Normal file
@@ -0,0 +1,84 @@
|
||||
CREATE OR REPLACE FUNCTION {{.SchemaName}}.{{.FunctionName}}()
|
||||
RETURNS trigger AS
|
||||
$body$
|
||||
DECLARE
|
||||
m_funcname text = '{{.FunctionName}}';
|
||||
m_user text;
|
||||
m_atevent integer;
|
||||
BEGIN
|
||||
-- Get current user
|
||||
m_user := {{.UserFunction}}::text;
|
||||
|
||||
-- Skip audit for specific users if needed
|
||||
IF m_user IN ('noaudit', 'importuser') THEN
|
||||
IF (TG_OP = 'DELETE') THEN
|
||||
RETURN OLD;
|
||||
ELSIF (TG_OP = 'UPDATE') THEN
|
||||
RETURN NEW;
|
||||
ELSIF (TG_OP = 'INSERT') THEN
|
||||
RETURN NEW;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
{{- if .AuditInsert}}
|
||||
IF TG_OP = 'INSERT' THEN
|
||||
-- Record INSERT
|
||||
INSERT INTO {{.AuditSchema}}.atevent (tablename, tableprefix, rid_parent, changeuser, changedate, changetime, actionx)
|
||||
VALUES ('{{.TableName}}', {{.TablePrefix}}, new.{{.PrimaryKey}}, m_user, CURRENT_DATE, CURRENT_TIME, 1)
|
||||
RETURNING rid_atevent INTO m_atevent;
|
||||
{{- end}}
|
||||
|
||||
{{- if .AuditUpdate}}
|
||||
ELSIF TG_OP = 'UPDATE' THEN
|
||||
-- Check if any audited columns changed
|
||||
IF ({{.UpdateCondition}}) THEN
|
||||
INSERT INTO {{.AuditSchema}}.atevent (tablename, tableprefix, rid_parent, changeuser, changedate, changetime, actionx)
|
||||
VALUES ('{{.TableName}}', {{.TablePrefix}}, new.{{.PrimaryKey}}, m_user, CURRENT_DATE, CURRENT_TIME, 2)
|
||||
RETURNING rid_atevent INTO m_atevent;
|
||||
|
||||
-- Record column changes
|
||||
{{- range .UpdateColumns}}
|
||||
IF (old.{{.Name}} IS DISTINCT FROM new.{{.Name}}) THEN
|
||||
INSERT INTO {{$.AuditSchema}}.atdetail(rid_atevent, datacolumn, changedfrom, changedto)
|
||||
VALUES (m_atevent, '{{.Name}}', substr({{.OldValue}}, 1, 1000), substr({{.NewValue}}, 1, 1000));
|
||||
END IF;
|
||||
{{- end}}
|
||||
END IF;
|
||||
{{- end}}
|
||||
|
||||
{{- if .AuditDelete}}
|
||||
ELSIF TG_OP = 'DELETE' THEN
|
||||
-- Record DELETE
|
||||
INSERT INTO {{.AuditSchema}}.atevent (tablename, tableprefix, rid_parent, rid_deletedparent, changeuser, changedate, changetime, actionx)
|
||||
VALUES ('{{.TableName}}', {{.TablePrefix}}, old.{{.PrimaryKey}}, old.{{.PrimaryKey}}, m_user, CURRENT_DATE, CURRENT_TIME, 3)
|
||||
RETURNING rid_atevent INTO m_atevent;
|
||||
|
||||
-- Record deleted column values
|
||||
{{- range .DeleteColumns}}
|
||||
INSERT INTO {{$.AuditSchema}}.atdetail(rid_atevent, datacolumn, changedfrom, changedto)
|
||||
VALUES (m_atevent, '{{.Name}}', substr({{.OldValue}}, 1, 1000), NULL);
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
END IF;
|
||||
|
||||
IF (TG_OP = 'DELETE') THEN
|
||||
RETURN OLD;
|
||||
ELSIF (TG_OP = 'UPDATE') THEN
|
||||
RETURN NEW;
|
||||
ELSIF (TG_OP = 'INSERT') THEN
|
||||
RETURN NEW;
|
||||
END IF;
|
||||
|
||||
RETURN NULL;
|
||||
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
RAISE WARNING 'Audit function % failed: %', m_funcname, SQLERRM;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$body$
|
||||
LANGUAGE plpgsql
|
||||
VOLATILE
|
||||
SECURITY DEFINER;
|
||||
|
||||
COMMENT ON FUNCTION {{.SchemaName}}.{{.FunctionName}}() IS 'Audit trigger function for table {{.SchemaName}}.{{.TableName}}';
|
||||
49
pkg/writers/pgsql/templates/audit_tables.tmpl
Normal file
49
pkg/writers/pgsql/templates/audit_tables.tmpl
Normal file
@@ -0,0 +1,49 @@
|
||||
-- Audit Event Header Table
|
||||
CREATE TABLE IF NOT EXISTS {{.AuditSchema}}.atevent (
|
||||
rid_atevent serial PRIMARY KEY,
|
||||
tablename text NOT NULL,
|
||||
tableprefix text,
|
||||
rid_parent integer NOT NULL,
|
||||
rid_deletedparent integer,
|
||||
changeuser text NOT NULL,
|
||||
changedate date NOT NULL,
|
||||
changetime time NOT NULL,
|
||||
actionx smallint NOT NULL,
|
||||
CONSTRAINT ck_atevent_action CHECK (actionx IN (1, 2, 3))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_atevent_tablename ON {{.AuditSchema}}.atevent(tablename);
|
||||
CREATE INDEX IF NOT EXISTS idx_atevent_rid_parent ON {{.AuditSchema}}.atevent(rid_parent);
|
||||
CREATE INDEX IF NOT EXISTS idx_atevent_changedate ON {{.AuditSchema}}.atevent(changedate);
|
||||
CREATE INDEX IF NOT EXISTS idx_atevent_changeuser ON {{.AuditSchema}}.atevent(changeuser);
|
||||
|
||||
COMMENT ON TABLE {{.AuditSchema}}.atevent IS 'Audit trail header table - tracks all data changes';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.rid_atevent IS 'Audit event ID';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.tablename IS 'Name of the table that was modified';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.rid_parent IS 'Primary key value of the modified record';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.rid_deletedparent IS 'Parent reference for deleted records';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.changeuser IS 'User who made the change';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.changedate IS 'Date of change';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.changetime IS 'Time of change';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atevent.actionx IS 'Action type: 1=INSERT, 2=UPDATE, 3=DELETE';
|
||||
|
||||
-- Audit Event Detail Table
|
||||
CREATE TABLE IF NOT EXISTS {{.AuditSchema}}.atdetail (
|
||||
rid_atdetail serial PRIMARY KEY,
|
||||
rid_atevent integer NOT NULL,
|
||||
datacolumn text NOT NULL,
|
||||
changedfrom text,
|
||||
changedto text,
|
||||
CONSTRAINT fk_atdetail_atevent FOREIGN KEY (rid_atevent)
|
||||
REFERENCES {{.AuditSchema}}.atevent(rid_atevent) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_atdetail_rid_atevent ON {{.AuditSchema}}.atdetail(rid_atevent);
|
||||
CREATE INDEX IF NOT EXISTS idx_atdetail_datacolumn ON {{.AuditSchema}}.atdetail(datacolumn);
|
||||
|
||||
COMMENT ON TABLE {{.AuditSchema}}.atdetail IS 'Audit trail detail table - stores individual column changes';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.rid_atdetail IS 'Audit detail ID';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.rid_atevent IS 'Reference to audit event';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.datacolumn IS 'Name of the column that changed';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.changedfrom IS 'Old value before change';
|
||||
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.changedto IS 'New value after change';
|
||||
16
pkg/writers/pgsql/templates/audit_trigger.tmpl
Normal file
16
pkg/writers/pgsql/templates/audit_trigger.tmpl
Normal file
@@ -0,0 +1,16 @@
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_trigger
|
||||
WHERE tgname = '{{.TriggerName}}'
|
||||
AND tgrelid = '{{.SchemaName}}.{{.TableName}}'::regclass
|
||||
) THEN
|
||||
CREATE TRIGGER {{.TriggerName}}
|
||||
AFTER {{.Events}}
|
||||
ON {{.SchemaName}}.{{.TableName}}
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION {{.SchemaName}}.{{.FunctionName}}();
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
39
pkg/writers/pgsql/templates/base_constraint.tmpl
Normal file
39
pkg/writers/pgsql/templates/base_constraint.tmpl
Normal file
@@ -0,0 +1,39 @@
|
||||
{{/* Base constraint template */}}
|
||||
{{- define "constraint_base" -}}
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
ADD CONSTRAINT {{.ConstraintName}}
|
||||
{{block "constraint_definition" .}}{{end}};
|
||||
{{- end -}}
|
||||
|
||||
{{/* Drop constraint with check */}}
|
||||
{{- define "drop_constraint_safe" -}}
|
||||
DO $$
|
||||
BEGIN
|
||||
IF EXISTS (
|
||||
SELECT 1 FROM information_schema.table_constraints
|
||||
WHERE table_schema = '{{.SchemaName}}'
|
||||
AND table_name = '{{.TableName}}'
|
||||
AND constraint_name = '{{.ConstraintName}}'
|
||||
) THEN
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
DROP CONSTRAINT {{.ConstraintName}};
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
{{- end -}}
|
||||
|
||||
{{/* Add constraint with existence check */}}
|
||||
{{- define "add_constraint_safe" -}}
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.table_constraints
|
||||
WHERE table_schema = '{{.SchemaName}}'
|
||||
AND table_name = '{{.TableName}}'
|
||||
AND constraint_name = '{{.ConstraintName}}'
|
||||
) THEN
|
||||
{{template "constraint_base" .}}
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
{{- end -}}
|
||||
34
pkg/writers/pgsql/templates/base_ddl.tmpl
Normal file
34
pkg/writers/pgsql/templates/base_ddl.tmpl
Normal file
@@ -0,0 +1,34 @@
|
||||
{{/* Base DDL template with common structure */}}
|
||||
{{- define "ddl_header" -}}
|
||||
-- DDL Operation: {{.Operation}}
|
||||
-- Schema: {{.Schema}}
|
||||
-- Object: {{.ObjectName}}
|
||||
{{- end -}}
|
||||
|
||||
{{- define "ddl_footer" -}}
|
||||
-- End of {{.Operation}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Base ALTER TABLE structure */}}
|
||||
{{- define "alter_table_base" -}}
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
{{block "alter_operation" .}}{{end}};
|
||||
{{- end -}}
|
||||
|
||||
{{/* Common existence check pattern */}}
|
||||
{{- define "exists_check" -}}
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
{{block "exists_query" .}}{{end}}
|
||||
) THEN
|
||||
{{block "create_statement" .}}{{end}}
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
{{- end -}}
|
||||
|
||||
{{/* Common drop pattern */}}
|
||||
{{- define "drop_if_exists" -}}
|
||||
{{block "drop_type" .}}{{end}} IF EXISTS {{.SchemaName}}.{{.ObjectName}};
|
||||
{{- end -}}
|
||||
1
pkg/writers/pgsql/templates/comment_column.tmpl
Normal file
1
pkg/writers/pgsql/templates/comment_column.tmpl
Normal file
@@ -0,0 +1 @@
|
||||
COMMENT ON COLUMN {{.SchemaName}}.{{.TableName}}.{{.ColumnName}} IS '{{.Comment}}';
|
||||
1
pkg/writers/pgsql/templates/comment_table.tmpl
Normal file
1
pkg/writers/pgsql/templates/comment_table.tmpl
Normal file
@@ -0,0 +1 @@
|
||||
COMMENT ON TABLE {{.SchemaName}}.{{.TableName}} IS '{{.Comment}}';
|
||||
10
pkg/writers/pgsql/templates/create_foreign_key.tmpl
Normal file
10
pkg/writers/pgsql/templates/create_foreign_key.tmpl
Normal file
@@ -0,0 +1,10 @@
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
DROP CONSTRAINT IF EXISTS {{.ConstraintName}};
|
||||
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
ADD CONSTRAINT {{.ConstraintName}}
|
||||
FOREIGN KEY ({{.SourceColumns}})
|
||||
REFERENCES {{.TargetSchema}}.{{.TargetTable}} ({{.TargetColumns}})
|
||||
ON DELETE {{.OnDelete}}
|
||||
ON UPDATE {{.OnUpdate}}
|
||||
DEFERRABLE;
|
||||
2
pkg/writers/pgsql/templates/create_index.tmpl
Normal file
2
pkg/writers/pgsql/templates/create_index.tmpl
Normal file
@@ -0,0 +1,2 @@
|
||||
CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{.IndexName}}
|
||||
ON {{.SchemaName}}.{{.TableName}} USING {{.IndexType}} ({{.Columns}});
|
||||
13
pkg/writers/pgsql/templates/create_primary_key.tmpl
Normal file
13
pkg/writers/pgsql/templates/create_primary_key.tmpl
Normal file
@@ -0,0 +1,13 @@
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.table_constraints
|
||||
WHERE table_schema = '{{.SchemaName}}'
|
||||
AND table_name = '{{.TableName}}'
|
||||
AND constraint_name = '{{.ConstraintName}}'
|
||||
) THEN
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
||||
ADD CONSTRAINT {{.ConstraintName}} PRIMARY KEY ({{.Columns}});
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
@@ -0,0 +1,4 @@
|
||||
{{/* Example of using template inheritance for primary key creation */}}
|
||||
{{/* This demonstrates how to use the base exists_check pattern */}}
|
||||
{{/* Note: This is an example and not used by the actual migration writer */}}
|
||||
{{/* The actual create_primary_key.tmpl is used instead */}}
|
||||
8
pkg/writers/pgsql/templates/create_table.tmpl
Normal file
8
pkg/writers/pgsql/templates/create_table.tmpl
Normal file
@@ -0,0 +1,8 @@
|
||||
CREATE TABLE IF NOT EXISTS {{.SchemaName}}.{{.TableName}} (
|
||||
{{- range $i, $col := .Columns}}
|
||||
{{- if $i}},{{end}}
|
||||
{{$col.Name}} {{$col.Type}}
|
||||
{{- if $col.Default}} DEFAULT {{$col.Default}}{{end}}
|
||||
{{- if $col.NotNull}} NOT NULL{{end}}
|
||||
{{- end}}
|
||||
);
|
||||
9
pkg/writers/pgsql/templates/create_table_composed.tmpl
Normal file
9
pkg/writers/pgsql/templates/create_table_composed.tmpl
Normal file
@@ -0,0 +1,9 @@
|
||||
{{/* Example of table creation using composition */}}
|
||||
{{- define "create_table_composed" -}}
|
||||
CREATE TABLE IF NOT EXISTS {{template "qualified_table" .}} (
|
||||
{{- range $i, $col := .Columns}}
|
||||
{{- if $i}},{{end}}
|
||||
{{template "column_definition" $col}}
|
||||
{{- end}}
|
||||
);
|
||||
{{- end -}}
|
||||
1
pkg/writers/pgsql/templates/drop_constraint.tmpl
Normal file
1
pkg/writers/pgsql/templates/drop_constraint.tmpl
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE {{.SchemaName}}.{{.TableName}} DROP CONSTRAINT IF EXISTS {{.ConstraintName}};
|
||||
1
pkg/writers/pgsql/templates/drop_index.tmpl
Normal file
1
pkg/writers/pgsql/templates/drop_index.tmpl
Normal file
@@ -0,0 +1 @@
|
||||
DROP INDEX IF EXISTS {{.SchemaName}}.{{.IndexName}} CASCADE;
|
||||
45
pkg/writers/pgsql/templates/fragments.tmpl
Normal file
45
pkg/writers/pgsql/templates/fragments.tmpl
Normal file
@@ -0,0 +1,45 @@
|
||||
{{/* Reusable template fragments */}}
|
||||
|
||||
{{/* Column definition fragment */}}
|
||||
{{- define "column_definition" -}}
|
||||
{{.Name}} {{.Type}}
|
||||
{{- if .Default}} DEFAULT {{.Default}}{{end}}
|
||||
{{- if .NotNull}} NOT NULL{{end}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Comma-separated column list */}}
|
||||
{{- define "column_list" -}}
|
||||
{{- range $i, $col := . -}}
|
||||
{{- if $i}}, {{end}}{{$col}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Qualified table name */}}
|
||||
{{- define "qualified_table" -}}
|
||||
{{.SchemaName}}.{{.TableName}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Index method clause */}}
|
||||
{{- define "index_method" -}}
|
||||
{{- if .IndexType}}USING {{.IndexType}}{{end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Uniqueness keyword */}}
|
||||
{{- define "unique_keyword" -}}
|
||||
{{- if .Unique}}UNIQUE {{end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Referential action clauses */}}
|
||||
{{- define "referential_actions" -}}
|
||||
{{- if .OnDelete}}
|
||||
ON DELETE {{.OnDelete}}
|
||||
{{- end}}
|
||||
{{- if .OnUpdate}}
|
||||
ON UPDATE {{.OnUpdate}}
|
||||
{{- end}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Comment statement */}}
|
||||
{{- define "comment_on" -}}
|
||||
COMMENT ON {{.ObjectType}} {{.ObjectName}} IS {{quote .Comment}};
|
||||
{{- end -}}
|
||||
Reference in New Issue
Block a user