sql writer
Some checks are pending
CI / Test (1.23) (push) Waiting to run
CI / Test (1.24) (push) Waiting to run
CI / Test (1.25) (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Build (push) Waiting to run

This commit is contained in:
2025-12-17 20:44:02 +02:00
parent 40bc0be1cb
commit 5e1448dcdb
48 changed files with 4592 additions and 950 deletions

View File

@@ -1,258 +0,0 @@
# PostgreSQL Migration Writer
## Overview
The PostgreSQL Migration Writer implements database schema inspection and differential migration generation, following the same approach as the `pgsql_meta_upgrade` migration system. It compares a desired model (target schema) against the current database state and generates the necessary SQL migration scripts.
## Migration Phases
The migration writer follows a phased approach with specific priorities to ensure proper execution order:
### Phase 1: Drops (Priority 11-50)
- Drop changed constraints (Priority 11)
- Drop changed indexes (Priority 20)
- Drop changed foreign keys (Priority 50)
### Phase 2: Renames (Priority 60-90)
- Rename tables (Priority 60)
- Rename columns (Priority 90)
- *Note: Currently requires manual handling or metadata for rename detection*
### Phase 3: Tables & Columns (Priority 100-145)
- Create new tables (Priority 100)
- Add new columns (Priority 120)
- Alter column types (Priority 120)
- Alter column defaults (Priority 145)
### Phase 4: Indexes (Priority 160-180)
- Create primary keys (Priority 160)
- Create indexes (Priority 180)
### Phase 5: Foreign Keys (Priority 195)
- Create foreign key constraints
### Phase 6: Comments (Priority 200+)
- Add table and column comments
## Usage
### 1. Inspect Current Database
```go
import (
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
)
// Create reader with connection string
options := &readers.ReaderOptions{
ConnectionString: "host=localhost port=5432 dbname=mydb user=postgres password=secret",
}
reader := pgsql.NewReader(options)
// Read current database state
currentDB, err := reader.ReadDatabase()
if err != nil {
log.Fatal(err)
}
```
### 2. Define Desired Model
```go
import "git.warky.dev/wdevs/relspecgo/pkg/models"
// Create desired model (could be loaded from DBML, JSON, etc.)
modelDB := models.InitDatabase("mydb")
schema := models.InitSchema("public")
// Define table
table := models.InitTable("users", "public")
table.Description = "User accounts"
// Add columns
idCol := models.InitColumn("id", "users", "public")
idCol.Type = "integer"
idCol.NotNull = true
idCol.IsPrimaryKey = true
table.Columns["id"] = idCol
nameCol := models.InitColumn("name", "users", "public")
nameCol.Type = "text"
nameCol.NotNull = true
table.Columns["name"] = nameCol
emailCol := models.InitColumn("email", "users", "public")
emailCol.Type = "text"
table.Columns["email"] = emailCol
// Add primary key constraint
pkConstraint := &models.Constraint{
Name: "pk_users",
Type: models.PrimaryKeyConstraint,
Columns: []string{"id"},
}
table.Constraints["pk_users"] = pkConstraint
// Add unique index
emailIndex := &models.Index{
Name: "uk_users_email",
Unique: true,
Columns: []string{"email"},
}
table.Indexes["uk_users_email"] = emailIndex
schema.Tables = append(schema.Tables, table)
modelDB.Schemas = append(modelDB.Schemas, schema)
```
### 3. Generate Migration
```go
import (
"git.warky.dev/wdevs/relspecgo/pkg/writers"
"git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
)
// Create migration writer
writerOptions := &writers.WriterOptions{
OutputPath: "migration_001.sql",
}
migrationWriter := pgsql.NewMigrationWriter(writerOptions)
// Generate migration comparing model vs current
err = migrationWriter.WriteMigration(modelDB, currentDB)
if err != nil {
log.Fatal(err)
}
```
## Example Migration Output
```sql
-- PostgreSQL Migration Script
-- Generated by RelSpec
-- Source: mydb -> mydb
-- Priority: 11 | Type: drop constraint | Object: public.users.old_constraint
ALTER TABLE public.users DROP CONSTRAINT IF EXISTS old_constraint;
-- Priority: 100 | Type: create table | Object: public.orders
CREATE TABLE IF NOT EXISTS public.orders (
id integer NOT NULL,
user_id integer,
total numeric(10,2) DEFAULT 0.00,
created_at timestamp DEFAULT CURRENT_TIMESTAMP
);
-- Priority: 120 | Type: create column | Object: public.users.phone
ALTER TABLE public.users
ADD COLUMN IF NOT EXISTS phone text;
-- Priority: 120 | Type: alter column type | Object: public.users.age
ALTER TABLE public.users
ALTER COLUMN age TYPE integer;
-- Priority: 160 | Type: create primary key | Object: public.orders.pk_orders
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = 'public'
AND table_name = 'orders'
AND constraint_name = 'pk_orders'
) THEN
ALTER TABLE public.orders
ADD CONSTRAINT pk_orders PRIMARY KEY (id);
END IF;
END;
$$;
-- Priority: 180 | Type: create index | Object: public.users.idx_users_email
CREATE INDEX IF NOT EXISTS idx_users_email
ON public.users USING btree (email);
-- Priority: 195 | Type: create foreign key | Object: public.orders.fk_orders_users
ALTER TABLE public.orders
DROP CONSTRAINT IF EXISTS fk_orders_users;
ALTER TABLE public.orders
ADD CONSTRAINT fk_orders_users
FOREIGN KEY (user_id)
REFERENCES public.users (id)
ON DELETE CASCADE
ON UPDATE CASCADE
DEFERRABLE;
-- Priority: 200 | Type: comment on table | Object: public.users
COMMENT ON TABLE public.users IS 'User accounts';
-- Priority: 200 | Type: comment on column | Object: public.users.email
COMMENT ON COLUMN public.users.email IS 'User email address';
```
## Migration Script Structure
Each migration script includes:
- **ObjectName**: Fully qualified name of the object being modified
- **ObjectType**: Type of operation (create table, alter column, etc.)
- **Schema**: Schema name
- **Priority**: Execution order priority (lower runs first)
- **Sequence**: Sub-ordering within same priority
- **Body**: The actual SQL statement
## Comparison Logic
The migration writer compares objects using:
### Tables
- Existence check by name (case-insensitive)
- New tables generate CREATE TABLE statements
### Columns
- Existence check within tables
- Type changes generate ALTER COLUMN TYPE
- Default value changes generate SET/DROP DEFAULT
- New columns generate ADD COLUMN
### Constraints
- Compared by type, columns, and referenced objects
- Changed constraints are dropped and recreated
### Indexes
- Compared by uniqueness and column list
- Changed indexes are dropped and recreated
### Foreign Keys
- Compared by columns, referenced table/columns, and actions
- Changed foreign keys are dropped and recreated
## Best Practices
1. **Always Review Generated Migrations**: Manually review SQL before execution
2. **Test on Non-Production First**: Apply migrations to development/staging environments first
3. **Backup Before Migration**: Create database backup before running migrations
4. **Use Transactions**: Wrap migrations in transactions when possible
5. **Handle Renames Carefully**: Column/table renames may appear as DROP + CREATE without metadata
6. **Consider Data Migration**: Generated SQL handles structure only; data migration may be needed
## Limitations
1. **Rename Detection**: Automatic rename detection not implemented; requires GUID or metadata matching
2. **Data Type Conversions**: Some type changes may require custom USING clauses
3. **Complex Constraints**: CHECK constraints with complex expressions may need manual handling
4. **Sequence Values**: Current sequence values not automatically synced
5. **Permissions**: Schema and object permissions not included in migrations
## Integration with Migration System
This implementation follows the same logic as the SQL migration system in `examples/pgsql_meta_upgrade`:
- `migration_inspect.sql` → Reader (pkg/readers/pgsql)
- `migration_build.sql` → MigrationWriter (pkg/writers/pgsql)
- `migration_run.sql` → External execution (psql, application code)
The phases, priorities, and script generation logic match the original migration system to ensure compatibility and consistency.

View File

@@ -0,0 +1,696 @@
# PostgreSQL Migration Templates
## Overview
The PostgreSQL migration writer uses Go text templates to generate SQL, making the code much more maintainable and customizable than hardcoded string concatenation.
## Architecture
```
pkg/writers/pgsql/
├── templates/ # Template files
│ ├── create_table.tmpl # CREATE TABLE
│ ├── add_column.tmpl # ALTER TABLE ADD COLUMN
│ ├── alter_column_type.tmpl # ALTER TABLE ALTER COLUMN TYPE
│ ├── alter_column_default.tmpl # ALTER TABLE ALTER COLUMN DEFAULT
│ ├── create_primary_key.tmpl # ADD CONSTRAINT PRIMARY KEY
│ ├── create_index.tmpl # CREATE INDEX
│ ├── create_foreign_key.tmpl # ADD CONSTRAINT FOREIGN KEY
│ ├── drop_constraint.tmpl # DROP CONSTRAINT
│ ├── drop_index.tmpl # DROP INDEX
│ ├── comment_table.tmpl # COMMENT ON TABLE
│ ├── comment_column.tmpl # COMMENT ON COLUMN
│ ├── audit_tables.tmpl # CREATE audit tables
│ ├── audit_function.tmpl # CREATE audit function
│ └── audit_trigger.tmpl # CREATE audit trigger
├── templates.go # Template executor and data structures
└── migration_writer_templated.go # Templated migration writer
```
## Using Templates
### Basic Usage
```go
// Create template executor
executor, err := pgsql.NewTemplateExecutor()
if err != nil {
log.Fatal(err)
}
// Prepare data
data := pgsql.CreateTableData{
SchemaName: "public",
TableName: "users",
Columns: []pgsql.ColumnData{
{Name: "id", Type: "integer", NotNull: true},
{Name: "name", Type: "text"},
},
}
// Execute template
sql, err := executor.ExecuteCreateTable(data)
if err != nil {
log.Fatal(err)
}
fmt.Println(sql)
```
### Using Templated Migration Writer
```go
// Create templated migration writer
writer, err := pgsql.NewTemplatedMigrationWriter(&writers.WriterOptions{
OutputPath: "migration.sql",
})
if err != nil {
log.Fatal(err)
}
// Generate migration (uses templates internally)
err = writer.WriteMigration(modelDB, currentDB)
if err != nil {
log.Fatal(err)
}
```
## Template Data Structures
### CreateTableData
For `create_table.tmpl`:
```go
type CreateTableData struct {
SchemaName string
TableName string
Columns []ColumnData
}
type ColumnData struct {
Name string
Type string
Default string
NotNull bool
}
```
Example:
```go
data := CreateTableData{
SchemaName: "public",
TableName: "products",
Columns: []ColumnData{
{Name: "id", Type: "serial", NotNull: true},
{Name: "name", Type: "text", NotNull: true},
{Name: "price", Type: "numeric(10,2)", Default: "0.00"},
},
}
```
### AddColumnData
For `add_column.tmpl`:
```go
type AddColumnData struct {
SchemaName string
TableName string
ColumnName string
ColumnType string
Default string
NotNull bool
}
```
### CreateIndexData
For `create_index.tmpl`:
```go
type CreateIndexData struct {
SchemaName string
TableName string
IndexName string
IndexType string // btree, hash, gin, gist
Columns string // comma-separated
Unique bool
}
```
### CreateForeignKeyData
For `create_foreign_key.tmpl`:
```go
type CreateForeignKeyData struct {
SchemaName string
TableName string
ConstraintName string
SourceColumns string // comma-separated
TargetSchema string
TargetTable string
TargetColumns string // comma-separated
OnDelete string // CASCADE, SET NULL, etc.
OnUpdate string
}
```
### AuditFunctionData
For `audit_function.tmpl`:
```go
type AuditFunctionData struct {
SchemaName string
FunctionName string
TableName string
TablePrefix string
PrimaryKey string
AuditSchema string
UserFunction string
AuditInsert bool
AuditUpdate bool
AuditDelete bool
UpdateCondition string
UpdateColumns []AuditColumnData
DeleteColumns []AuditColumnData
}
type AuditColumnData struct {
Name string
OldValue string // SQL expression for old value
NewValue string // SQL expression for new value
}
```
## Customizing Templates
### Modifying Existing Templates
Templates are embedded in the binary but can be modified at compile time:
1. **Edit template file** in `pkg/writers/pgsql/templates/`:
```go
// templates/create_table.tmpl
CREATE TABLE IF NOT EXISTS {{.SchemaName}}.{{.TableName}} (
{{- range $i, $col := .Columns}}
{{- if $i}},{{end}}
{{$col.Name}} {{$col.Type}}
{{- if $col.Default}} DEFAULT {{$col.Default}}{{end}}
{{- if $col.NotNull}} NOT NULL{{end}}
{{- end}}
);
-- Custom comment
COMMENT ON TABLE {{.SchemaName}}.{{.TableName}} IS 'Auto-generated by RelSpec';
```
2. **Rebuild** the application:
```bash
go build ./cmd/relspec
```
The new template is automatically embedded.
### Template Syntax Reference
#### Variables
```go
{{.FieldName}} // Access field
{{.SchemaName}} // String field
{{.NotNull}} // Boolean field
```
#### Conditionals
```go
{{if .NotNull}}
NOT NULL
{{end}}
{{if .Default}}
DEFAULT {{.Default}}
{{else}}
-- No default
{{end}}
```
#### Loops
```go
{{range $i, $col := .Columns}}
Column: {{$col.Name}} Type: {{$col.Type}}
{{end}}
```
#### Functions
```go
{{if eq .Type "CASCADE"}}
ON DELETE CASCADE
{{end}}
{{join .Columns ", "}} // Join string slice
```
### Creating New Templates
1. **Create template file** in `pkg/writers/pgsql/templates/`:
```go
// templates/custom_operation.tmpl
-- Custom operation for {{.TableName}}
ALTER TABLE {{.SchemaName}}.{{.TableName}}
{{.CustomOperation}};
```
2. **Define data structure** in `templates.go`:
```go
type CustomOperationData struct {
SchemaName string
TableName string
CustomOperation string
}
```
3. **Add executor method** in `templates.go`:
```go
func (te *TemplateExecutor) ExecuteCustomOperation(data CustomOperationData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "custom_operation.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute custom_operation template: %w", err)
}
return buf.String(), nil
}
```
4. **Use in migration writer**:
```go
sql, err := w.executor.ExecuteCustomOperation(CustomOperationData{
SchemaName: "public",
TableName: "users",
CustomOperation: "ADD COLUMN custom_field text",
})
```
## Template Examples
### Example 1: Custom Table Creation
Modify `create_table.tmpl` to add table options:
```sql
CREATE TABLE IF NOT EXISTS {{.SchemaName}}.{{.TableName}} (
{{- range $i, $col := .Columns}}
{{- if $i}},{{end}}
{{$col.Name}} {{$col.Type}}
{{- if $col.Default}} DEFAULT {{$col.Default}}{{end}}
{{- if $col.NotNull}} NOT NULL{{end}}
{{- end}}
) WITH (fillfactor = 90);
-- Add automatic comment
COMMENT ON TABLE {{.SchemaName}}.{{.TableName}}
IS 'Created: {{.CreatedDate}} | Version: {{.Version}}';
```
### Example 2: Custom Index with WHERE Clause
Add to `create_index.tmpl`:
```sql
CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{.IndexName}}
ON {{.SchemaName}}.{{.TableName}}
USING {{.IndexType}} ({{.Columns}})
{{- if .Where}}
WHERE {{.Where}}
{{- end}}
{{- if .Include}}
INCLUDE ({{.Include}})
{{- end}};
```
Update data structure:
```go
type CreateIndexData struct {
SchemaName string
TableName string
IndexName string
IndexType string
Columns string
Unique bool
Where string // New field for partial indexes
Include string // New field for covering indexes
}
```
### Example 3: Enhanced Audit Function
Modify `audit_function.tmpl` to add custom logging:
```sql
CREATE OR REPLACE FUNCTION {{.SchemaName}}.{{.FunctionName}}()
RETURNS trigger AS
$body$
DECLARE
m_funcname text = '{{.FunctionName}}';
m_user text;
m_atevent integer;
m_application_name text;
BEGIN
-- Get current user and application
m_user := {{.UserFunction}}::text;
m_application_name := current_setting('application_name', true);
-- Custom logging
RAISE NOTICE 'Audit: % on %.% by % from %',
TG_OP, TG_TABLE_SCHEMA, TG_TABLE_NAME, m_user, m_application_name;
-- Rest of function...
...
```
## Best Practices
### 1. Keep Templates Simple
Templates should focus on SQL generation. Complex logic belongs in Go code:
**Good:**
```go
// In Go code
columns := buildColumnList(table)
// In template
{{range .Columns}}
{{.Name}} {{.Type}}
{{end}}
```
**Bad:**
```go
// Don't do complex transformations in templates
{{range .Columns}}
{{if eq .Type "integer"}}
{{.Name}} serial
{{else}}
{{.Name}} {{.Type}}
{{end}}
{{end}}
```
### 2. Use Descriptive Field Names
```go
// Good
type CreateTableData struct {
SchemaName string
TableName string
}
// Bad
type CreateTableData struct {
S string // What is S?
T string // What is T?
}
```
### 3. Document Template Data
Always document what data a template expects:
```go
// CreateTableData contains data for create table template.
// Used by templates/create_table.tmpl
type CreateTableData struct {
SchemaName string // Schema where table will be created
TableName string // Name of the table
Columns []ColumnData // List of columns to create
}
```
### 4. Handle SQL Injection
Always escape user input:
```go
// In Go code - escape before passing to template
data := CommentTableData{
SchemaName: schema,
TableName: table,
Comment: escapeQuote(userComment), // Escape quotes
}
```
### 5. Test Templates Thoroughly
```go
func TestTemplate_CreateTable(t *testing.T) {
executor, _ := NewTemplateExecutor()
data := CreateTableData{
SchemaName: "public",
TableName: "test",
Columns: []ColumnData{{Name: "id", Type: "integer"}},
}
sql, err := executor.ExecuteCreateTable(data)
if err != nil {
t.Fatal(err)
}
// Verify expected SQL patterns
if !strings.Contains(sql, "CREATE TABLE") {
t.Error("Missing CREATE TABLE")
}
}
```
## Benefits of Template-Based Approach
### Maintainability
**Before (string concatenation):**
```go
sql := fmt.Sprintf(`CREATE TABLE %s.%s (
%s %s%s%s
);`, schema, table, col, typ,
func() string {
if def != "" {
return " DEFAULT " + def
}
return ""
}(),
func() string {
if notNull {
return " NOT NULL"
}
return ""
}(),
)
```
**After (templates):**
```go
sql, _ := executor.ExecuteCreateTable(CreateTableData{
SchemaName: schema,
TableName: table,
Columns: columns,
})
```
### Customization
Users can modify templates without changing Go code:
- Edit template file
- Rebuild application
- New SQL generation logic active
### Testing
Templates can be tested independently:
```go
func TestAuditTemplate(t *testing.T) {
executor, _ := NewTemplateExecutor()
// Test with various data
for _, testCase := range testCases {
sql, err := executor.ExecuteAuditFunction(testCase.data)
// Verify output
}
}
```
### Readability
SQL templates are easier to read and review than Go string building code.
## Migration from Old Writer
To migrate from the old string-based writer to templates:
### Option 1: Use TemplatedMigrationWriter
```go
// Old
writer := pgsql.NewMigrationWriter(options)
// New
writer, err := pgsql.NewTemplatedMigrationWriter(options)
if err != nil {
log.Fatal(err)
}
// Same interface
writer.WriteMigration(model, current)
```
### Option 2: Keep Both
Both writers are available:
- `MigrationWriter` - Original string-based
- `TemplatedMigrationWriter` - New template-based
Choose based on your needs.
## Troubleshooting
### Template Not Found
```
Error: template: "my_template.tmpl" not defined
```
Solution: Ensure template file exists in `templates/` directory and rebuild.
### Template Execution Error
```
Error: template: create_table.tmpl:5:10: executing "create_table.tmpl"
at <.InvalidField>: can't evaluate field InvalidField
```
Solution: Check data structure has all fields used in template.
### Embedded Files Not Updating
If template changes aren't reflected:
1. Clean build cache: `go clean -cache`
2. Rebuild: `go build ./cmd/relspec`
3. Verify template file is in `templates/` directory
## Custom Template Functions
RelSpec provides a comprehensive library of template functions for SQL generation:
### String Manipulation
- `upper`, `lower` - Case conversion
- `snake_case`, `camelCase` - Naming convention conversion
- Usage: `{{upper .TableName}}``USERS`
### SQL Formatting
- `indent(spaces, text)` - Indent text
- `quote(string)` - Quote for SQL with escaping
- `escape(string)` - Escape special characters
- `safe_identifier(string)` - Make SQL-safe identifier
- Usage: `{{quote "O'Brien"}}``'O''Brien'`
### Type Conversion
- `goTypeToSQL(type)` - Convert Go type to PostgreSQL type
- `sqlTypeToGo(type)` - Convert PostgreSQL type to Go type
- `isNumeric(type)`, `isText(type)` - Type checking
- Usage: `{{goTypeToSQL "int64"}}``bigint`
### Collection Helpers
- `first(slice)`, `last(slice)` - Get elements
- `join_with(slice, sep)` - Join with custom separator
- Usage: `{{join_with .Columns ", "}}``id, name, email`
See [template_functions.go](template_functions.go) for full documentation.
## Template Inheritance and Composition
RelSpec supports Go template inheritance using `{{template}}` and `{{block}}`:
### Base Templates
- `base_ddl.tmpl` - Common DDL patterns
- `base_constraint.tmpl` - Constraint operations
- `fragments.tmpl` - Reusable fragments
### Using Fragments
```gotmpl
{{/* Use predefined fragments */}}
CREATE TABLE {{template "qualified_table" .}} (
{{range .Columns}}
{{template "column_definition" .}}
{{end}}
);
```
### Template Blocks
```gotmpl
{{/* Define with override capability */}}
{{define "table_options"}}
) {{block "storage_options" .}}WITH (fillfactor = 90){{end}};
{{end}}
```
See [TEMPLATE_INHERITANCE.md](TEMPLATE_INHERITANCE.md) for detailed guide.
## Visual Template Editor
A VS Code extension is available for visual template editing:
### Features
- **Live Preview** - See rendered SQL as you type
- **IntelliSense** - Auto-completion for functions
- **Validation** - Syntax checking and error highlighting
- **Scaffolding** - Quick template creation
- **Function Browser** - Browse available functions
### Installation
```bash
cd vscode-extension
npm install
npm run compile
code .
# Press F5 to launch
```
See [vscode-extension/README.md](../../vscode-extension/README.md) for full documentation.
## Future Enhancements
Completed:
- [x] Template inheritance/composition
- [x] Custom template functions library
- [x] Visual template editor (VS Code)
Potential future improvements:
- [ ] Parameterized templates (load from config)
- [ ] Template validation CLI tool
- [ ] Template library/marketplace
- [ ] Template versioning
- [ ] Hot-reload during development
## Contributing Templates
When contributing new templates:
1. Place in `pkg/writers/pgsql/templates/`
2. Use `.tmpl` extension
3. Document data structure in `templates.go`
4. Add executor method
5. Write tests
6. Update this documentation

View File

@@ -0,0 +1,74 @@
package pgsql
import (
"fmt"
)
// AuditConfig defines audit configuration for tables
type AuditConfig struct {
// EnabledTables maps table names (schema.table or just table) to audit settings
EnabledTables map[string]*TableAuditConfig
// AuditSchema is where audit tables are created (default: same as table schema)
AuditSchema string
// UserFunction is the function to get current user (default: current_user)
UserFunction string
}
// TableAuditConfig defines audit settings for a specific table
type TableAuditConfig struct {
// TableName is the name of the table to audit
TableName string
// SchemaName is the schema of the table
SchemaName string
// TablePrefix for compatibility with old audit system
TablePrefix string
// AuditInsert tracks INSERT operations
AuditInsert bool
// AuditUpdate tracks UPDATE operations
AuditUpdate bool
// AuditDelete tracks DELETE operations
AuditDelete bool
// ExcludedColumns are columns to skip from audit
ExcludedColumns []string
// EncryptedColumns are columns to hide in audit (show as ***)
EncryptedColumns []string
}
// NewAuditConfig creates a default audit configuration
func NewAuditConfig() *AuditConfig {
return &AuditConfig{
EnabledTables: make(map[string]*TableAuditConfig),
AuditSchema: "public",
UserFunction: "current_user",
}
}
// EnableTableAudit enables audit for a specific table
func (ac *AuditConfig) EnableTableAudit(schemaName, tableName string) *TableAuditConfig {
key := fmt.Sprintf("%s.%s", schemaName, tableName)
config := &TableAuditConfig{
TableName: tableName,
SchemaName: schemaName,
TablePrefix: "",
AuditInsert: true,
AuditUpdate: true,
AuditDelete: true,
ExcludedColumns: []string{"updatecnt", "prefix"},
EncryptedColumns: []string{},
}
ac.EnabledTables[key] = config
return config
}
// IsTableAudited checks if a table is configured for auditing
func (ac *AuditConfig) IsTableAudited(schemaName, tableName string) bool {
key := fmt.Sprintf("%s.%s", schemaName, tableName)
_, exists := ac.EnabledTables[key]
return exists
}
// GetTableConfig returns the audit config for a specific table
func (ac *AuditConfig) GetTableConfig(schemaName, tableName string) *TableAuditConfig {
key := fmt.Sprintf("%s.%s", schemaName, tableName)
return ac.EnabledTables[key]
}

View File

@@ -11,13 +11,7 @@ import (
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
// MigrationWriter generates differential migration SQL scripts
type MigrationWriter struct {
options *writers.WriterOptions
writer io.Writer
}
// MigrationScript represents a single migration script with priority and sequence
// MigrationScript represents a single migration script with priority
type MigrationScript struct {
ObjectName string
ObjectType string
@@ -27,14 +21,27 @@ type MigrationScript struct {
Body string
}
// NewMigrationWriter creates a new migration writer
func NewMigrationWriter(options *writers.WriterOptions) *MigrationWriter {
return &MigrationWriter{
options: options,
}
// MigrationWriter generates differential migration SQL scripts using templates
type MigrationWriter struct {
options *writers.WriterOptions
writer io.Writer
executor *TemplateExecutor
}
// WriteMigration generates migration scripts by comparing model (desired) vs current (actual) database
// NewMigrationWriter creates a new templated migration writer
func NewMigrationWriter(options *writers.WriterOptions) (*MigrationWriter, error) {
executor, err := NewTemplateExecutor()
if err != nil {
return nil, fmt.Errorf("failed to create template executor: %w", err)
}
return &MigrationWriter{
options: options,
executor: executor,
}, nil
}
// WriteMigration generates migration scripts using templates
func (w *MigrationWriter) WriteMigration(model *models.Database, current *models.Database) error {
var writer io.Writer
var file *os.File
@@ -56,9 +63,26 @@ func (w *MigrationWriter) WriteMigration(model *models.Database, current *models
w.writer = writer
// Check if audit is configured in metadata
var auditConfig *AuditConfig
if w.options.Metadata != nil {
if ac, ok := w.options.Metadata["audit_config"].(*AuditConfig); ok {
auditConfig = ac
}
}
// Generate all migration scripts
scripts := make([]MigrationScript, 0)
// Generate audit tables if needed (priority 90)
if auditConfig != nil && len(auditConfig.EnabledTables) > 0 {
auditTableScript, err := w.generateAuditTablesScript(auditConfig)
if err != nil {
return fmt.Errorf("failed to generate audit tables: %w", err)
}
scripts = append(scripts, auditTableScript...)
}
// Process each schema in the model
for _, modelSchema := range model.Schemas {
// Find corresponding schema in current database
@@ -71,8 +95,20 @@ func (w *MigrationWriter) WriteMigration(model *models.Database, current *models
}
// Generate schema-level scripts
schemaScripts := w.generateSchemaScripts(modelSchema, currentSchema)
schemaScripts, err := w.generateSchemaScripts(modelSchema, currentSchema)
if err != nil {
return fmt.Errorf("failed to generate schema scripts: %w", err)
}
scripts = append(scripts, schemaScripts...)
// Generate audit scripts for this schema (if configured)
if auditConfig != nil {
auditScripts, err := w.generateAuditScripts(modelSchema, auditConfig)
if err != nil {
return fmt.Errorf("failed to generate audit scripts: %w", err)
}
scripts = append(scripts, auditScripts...)
}
}
// Sort scripts by priority and sequence
@@ -98,37 +134,52 @@ func (w *MigrationWriter) WriteMigration(model *models.Database, current *models
return nil
}
// generateSchemaScripts generates migration scripts for a schema
func (w *MigrationWriter) generateSchemaScripts(model *models.Schema, current *models.Schema) []MigrationScript {
// generateSchemaScripts generates migration scripts for a schema using templates
func (w *MigrationWriter) generateSchemaScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Phase 1: Drop constraints and indexes that changed (Priority 11-50)
if current != nil {
scripts = append(scripts, w.generateDropScripts(model, current)...)
}
// Phase 2: Rename tables and columns (Priority 60-90)
if current != nil {
scripts = append(scripts, w.generateRenameScripts(model, current)...)
dropScripts, err := w.generateDropScripts(model, current)
if err != nil {
return nil, fmt.Errorf("failed to generate drop scripts: %w", err)
}
scripts = append(scripts, dropScripts...)
}
// Phase 3: Create/Alter tables and columns (Priority 100-145)
scripts = append(scripts, w.generateTableScripts(model, current)...)
tableScripts, err := w.generateTableScripts(model, current)
if err != nil {
return nil, fmt.Errorf("failed to generate table scripts: %w", err)
}
scripts = append(scripts, tableScripts...)
// Phase 4: Create indexes (Priority 160-180)
scripts = append(scripts, w.generateIndexScripts(model, current)...)
indexScripts, err := w.generateIndexScripts(model, current)
if err != nil {
return nil, fmt.Errorf("failed to generate index scripts: %w", err)
}
scripts = append(scripts, indexScripts...)
// Phase 5: Create foreign keys (Priority 195)
scripts = append(scripts, w.generateForeignKeyScripts(model, current)...)
fkScripts, err := w.generateForeignKeyScripts(model, current)
if err != nil {
return nil, fmt.Errorf("failed to generate foreign key scripts: %w", err)
}
scripts = append(scripts, fkScripts...)
// Phase 6: Add comments (Priority 200+)
scripts = append(scripts, w.generateCommentScripts(model, current)...)
commentScripts, err := w.generateCommentScripts(model, current)
if err != nil {
return nil, fmt.Errorf("failed to generate comment scripts: %w", err)
}
scripts = append(scripts, commentScripts...)
return scripts
return scripts, nil
}
// generateDropScripts generates DROP scripts for removed/changed objects
func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *models.Schema) []MigrationScript {
// generateDropScripts generates DROP scripts using templates
func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Build map of model tables for quick lookup
@@ -142,35 +193,37 @@ func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *mod
modelTable, existsInModel := modelTables[strings.ToLower(currentTable.Name)]
if !existsInModel {
// Table will be dropped, skip individual constraint drops
continue
}
// Check each constraint in current database
for constraintName, currentConstraint := range currentTable.Constraints {
// Check if constraint exists in model
modelConstraint, existsInModel := modelTable.Constraints[constraintName]
shouldDrop := false
if !existsInModel {
shouldDrop = true
} else if !constraintsEqual(modelConstraint, currentConstraint) {
// Constraint changed, drop and recreate
shouldDrop = true
}
if shouldDrop {
sql, err := w.executor.ExecuteDropConstraint(DropConstraintData{
SchemaName: current.Name,
TableName: currentTable.Name,
ConstraintName: constraintName,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", current.Name, currentTable.Name, constraintName),
ObjectType: "drop constraint",
Schema: current.Name,
Priority: 11,
Sequence: len(scripts),
Body: fmt.Sprintf(
"ALTER TABLE %s.%s DROP CONSTRAINT IF EXISTS %s;",
current.Name, currentTable.Name, constraintName,
),
Body: sql,
}
scripts = append(scripts, script)
}
@@ -181,7 +234,6 @@ func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *mod
modelIndex, existsInModel := modelTable.Indexes[indexName]
shouldDrop := false
if !existsInModel {
shouldDrop = true
} else if !indexesEqual(modelIndex, currentIndex) {
@@ -189,42 +241,32 @@ func (w *MigrationWriter) generateDropScripts(model *models.Schema, current *mod
}
if shouldDrop {
sql, err := w.executor.ExecuteDropIndex(DropIndexData{
SchemaName: current.Name,
IndexName: indexName,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", current.Name, currentTable.Name, indexName),
ObjectType: "drop index",
Schema: current.Name,
Priority: 20,
Sequence: len(scripts),
Body: fmt.Sprintf(
"DROP INDEX IF EXISTS %s.%s CASCADE;",
current.Name, indexName,
),
Body: sql,
}
scripts = append(scripts, script)
}
}
}
return scripts
return scripts, nil
}
// generateRenameScripts generates RENAME scripts for renamed objects
func (w *MigrationWriter) generateRenameScripts(model *models.Schema, current *models.Schema) []MigrationScript {
scripts := make([]MigrationScript, 0)
// For now, we don't attempt to detect renames automatically
// This would require GUID matching or other heuristics
// Users would need to handle renames manually or through metadata
// Suppress unused parameter warnings
_ = model
_ = current
return scripts
}
// generateTableScripts generates CREATE/ALTER TABLE scripts
func (w *MigrationWriter) generateTableScripts(model *models.Schema, current *models.Schema) []MigrationScript {
// generateTableScripts generates CREATE/ALTER TABLE scripts using templates
func (w *MigrationWriter) generateTableScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Build map of current tables
@@ -241,59 +283,35 @@ func (w *MigrationWriter) generateTableScripts(model *models.Schema, current *mo
if !exists {
// Table doesn't exist, create it
script := w.generateCreateTableScript(model, modelTable)
sql, err := w.executor.ExecuteCreateTable(BuildCreateTableData(model.Name, modelTable))
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s", model.Name, modelTable.Name),
ObjectType: "create table",
Schema: model.Name,
Priority: 100,
Sequence: len(scripts),
Body: sql,
}
scripts = append(scripts, script)
} else {
// Table exists, check for column changes
alterScripts := w.generateAlterTableScripts(model, modelTable, currentTable)
alterScripts, err := w.generateAlterTableScripts(model, modelTable, currentTable)
if err != nil {
return nil, err
}
scripts = append(scripts, alterScripts...)
}
}
return scripts
return scripts, nil
}
// generateCreateTableScript generates a CREATE TABLE script
func (w *MigrationWriter) generateCreateTableScript(schema *models.Schema, table *models.Table) MigrationScript {
var body strings.Builder
body.WriteString(fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s.%s (\n", schema.Name, table.Name))
// Get sorted columns
columns := getSortedColumns(table.Columns)
columnDefs := make([]string, 0, len(columns))
for _, col := range columns {
colDef := fmt.Sprintf(" %s %s", col.Name, col.Type)
// Add default value if present
if col.Default != nil {
colDef += fmt.Sprintf(" DEFAULT %v", col.Default)
}
// Add NOT NULL if needed
if col.NotNull {
colDef += " NOT NULL"
}
columnDefs = append(columnDefs, colDef)
}
body.WriteString(strings.Join(columnDefs, ",\n"))
body.WriteString("\n);")
return MigrationScript{
ObjectName: fmt.Sprintf("%s.%s", schema.Name, table.Name),
ObjectType: "create table",
Schema: schema.Name,
Priority: 100,
Sequence: 0,
Body: body.String(),
}
}
// generateAlterTableScripts generates ALTER TABLE scripts for column changes
func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, modelTable *models.Table, currentTable *models.Table) []MigrationScript {
// generateAlterTableScripts generates ALTER TABLE scripts using templates
func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, modelTable *models.Table, currentTable *models.Table) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Build map of current columns
@@ -308,85 +326,93 @@ func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, model
if !exists {
// Column doesn't exist, add it
defaultVal := ""
if modelCol.Default != nil {
defaultVal = fmt.Sprintf("%v", modelCol.Default)
}
sql, err := w.executor.ExecuteAddColumn(AddColumnData{
SchemaName: schema.Name,
TableName: modelTable.Name,
ColumnName: modelCol.Name,
ColumnType: modelCol.Type,
Default: defaultVal,
NotNull: modelCol.NotNull,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
ObjectType: "create column",
Schema: schema.Name,
Priority: 120,
Sequence: len(scripts),
Body: fmt.Sprintf(
"ALTER TABLE %s.%s\n ADD COLUMN IF NOT EXISTS %s %s%s%s;",
schema.Name, modelTable.Name, modelCol.Name, modelCol.Type,
func() string {
if modelCol.Default != nil {
return fmt.Sprintf(" DEFAULT %v", modelCol.Default)
}
return ""
}(),
func() string {
if modelCol.NotNull {
return " NOT NULL"
}
return ""
}(),
),
Body: sql,
}
scripts = append(scripts, script)
} else if !columnsEqual(modelCol, currentCol) {
// Column exists but type or properties changed
// Column exists but properties changed
if modelCol.Type != currentCol.Type {
sql, err := w.executor.ExecuteAlterColumnType(AlterColumnTypeData{
SchemaName: schema.Name,
TableName: modelTable.Name,
ColumnName: modelCol.Name,
NewType: modelCol.Type,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
ObjectType: "alter column type",
Schema: schema.Name,
Priority: 120,
Sequence: len(scripts),
Body: fmt.Sprintf(
"ALTER TABLE %s.%s\n ALTER COLUMN %s TYPE %s;",
schema.Name, modelTable.Name, modelCol.Name, modelCol.Type,
),
Body: sql,
}
scripts = append(scripts, script)
}
// Check default value changes
if fmt.Sprintf("%v", modelCol.Default) != fmt.Sprintf("%v", currentCol.Default) {
if modelCol.Default != nil {
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
ObjectType: "alter column default",
Schema: schema.Name,
Priority: 145,
Sequence: len(scripts),
Body: fmt.Sprintf(
"ALTER TABLE %s.%s\n ALTER COLUMN %s SET DEFAULT %v;",
schema.Name, modelTable.Name, modelCol.Name, modelCol.Default,
),
}
scripts = append(scripts, script)
} else {
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
ObjectType: "alter column default",
Schema: schema.Name,
Priority: 145,
Sequence: len(scripts),
Body: fmt.Sprintf(
"ALTER TABLE %s.%s\n ALTER COLUMN %s DROP DEFAULT;",
schema.Name, modelTable.Name, modelCol.Name,
),
}
scripts = append(scripts, script)
setDefault := modelCol.Default != nil
defaultVal := ""
if setDefault {
defaultVal = fmt.Sprintf("%v", modelCol.Default)
}
sql, err := w.executor.ExecuteAlterColumnDefault(AlterColumnDefaultData{
SchemaName: schema.Name,
TableName: modelTable.Name,
ColumnName: modelCol.Name,
SetDefault: setDefault,
DefaultValue: defaultVal,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", schema.Name, modelTable.Name, modelCol.Name),
ObjectType: "alter column default",
Schema: schema.Name,
Priority: 145,
Sequence: len(scripts),
Body: sql,
}
scripts = append(scripts, script)
}
}
}
return scripts
return scripts, nil
}
// generateIndexScripts generates CREATE INDEX scripts
func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *models.Schema) []MigrationScript {
// generateIndexScripts generates CREATE INDEX scripts using templates
func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Build map of current tables
@@ -401,47 +427,7 @@ func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *mo
for _, modelTable := range model.Tables {
currentTable := currentTables[strings.ToLower(modelTable.Name)]
// Process each index in model
for indexName, modelIndex := range modelTable.Indexes {
shouldCreate := true
// Check if index exists in current
if currentTable != nil {
if currentIndex, exists := currentTable.Indexes[indexName]; exists {
if indexesEqual(modelIndex, currentIndex) {
shouldCreate = false
}
}
}
if shouldCreate {
unique := ""
if modelIndex.Unique {
unique = "UNIQUE "
}
indexType := "btree"
if modelIndex.Type != "" {
indexType = modelIndex.Type
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, indexName),
ObjectType: "create index",
Schema: model.Name,
Priority: 180,
Sequence: len(scripts),
Body: fmt.Sprintf(
"CREATE %sINDEX IF NOT EXISTS %s\n ON %s.%s USING %s (%s);",
unique, indexName, model.Name, modelTable.Name, indexType,
strings.Join(modelIndex.Columns, ", "),
),
}
scripts = append(scripts, script)
}
}
// Add primary key constraint if it exists
// Process primary keys first
for constraintName, constraint := range modelTable.Constraints {
if constraint.Type == models.PrimaryKeyConstraint {
shouldCreate := true
@@ -455,39 +441,82 @@ func (w *MigrationWriter) generateIndexScripts(model *models.Schema, current *mo
}
if shouldCreate {
sql, err := w.executor.ExecuteCreatePrimaryKey(CreatePrimaryKeyData{
SchemaName: model.Name,
TableName: modelTable.Name,
ConstraintName: constraintName,
Columns: strings.Join(constraint.Columns, ", "),
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, constraintName),
ObjectType: "create primary key",
Schema: model.Name,
Priority: 160,
Sequence: len(scripts),
Body: fmt.Sprintf(
"DO $$\nBEGIN\n IF NOT EXISTS (\n"+
" SELECT 1 FROM information_schema.table_constraints\n"+
" WHERE table_schema = '%s'\n"+
" AND table_name = '%s'\n"+
" AND constraint_name = '%s'\n"+
" ) THEN\n"+
" ALTER TABLE %s.%s\n"+
" ADD CONSTRAINT %s PRIMARY KEY (%s);\n"+
" END IF;\n"+
"END;\n$$;",
model.Name, modelTable.Name, constraintName,
model.Name, modelTable.Name, constraintName,
strings.Join(constraint.Columns, ", "),
),
Body: sql,
}
scripts = append(scripts, script)
}
}
}
// Process indexes
for indexName, modelIndex := range modelTable.Indexes {
// Skip primary key indexes
if strings.HasPrefix(strings.ToLower(indexName), "pk_") {
continue
}
shouldCreate := true
if currentTable != nil {
if currentIndex, exists := currentTable.Indexes[indexName]; exists {
if indexesEqual(modelIndex, currentIndex) {
shouldCreate = false
}
}
}
if shouldCreate {
indexType := "btree"
if modelIndex.Type != "" {
indexType = modelIndex.Type
}
sql, err := w.executor.ExecuteCreateIndex(CreateIndexData{
SchemaName: model.Name,
TableName: modelTable.Name,
IndexName: indexName,
IndexType: indexType,
Columns: strings.Join(modelIndex.Columns, ", "),
Unique: modelIndex.Unique,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, indexName),
ObjectType: "create index",
Schema: model.Name,
Priority: 180,
Sequence: len(scripts),
Body: sql,
}
scripts = append(scripts, script)
}
}
}
return scripts
return scripts, nil
}
// generateForeignKeyScripts generates ADD CONSTRAINT FOREIGN KEY scripts
func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, current *models.Schema) []MigrationScript {
// generateForeignKeyScripts generates ADD CONSTRAINT FOREIGN KEY scripts using templates
func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Build map of current tables
@@ -510,7 +539,6 @@ func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, curren
shouldCreate := true
// Check if constraint exists in current
if currentTable != nil {
if currentConstraint, exists := currentTable.Constraints[constraintName]; exists {
if constraintsEqual(constraint, currentConstraint) {
@@ -530,59 +558,62 @@ func (w *MigrationWriter) generateForeignKeyScripts(model *models.Schema, curren
onUpdate = strings.ToUpper(constraint.OnUpdate)
}
sql, err := w.executor.ExecuteCreateForeignKey(CreateForeignKeyData{
SchemaName: model.Name,
TableName: modelTable.Name,
ConstraintName: constraintName,
SourceColumns: strings.Join(constraint.Columns, ", "),
TargetSchema: constraint.ReferencedSchema,
TargetTable: constraint.ReferencedTable,
TargetColumns: strings.Join(constraint.ReferencedColumns, ", "),
OnDelete: onDelete,
OnUpdate: onUpdate,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, constraintName),
ObjectType: "create foreign key",
Schema: model.Name,
Priority: 195,
Sequence: len(scripts),
Body: fmt.Sprintf(
"ALTER TABLE %s.%s\n"+
" DROP CONSTRAINT IF EXISTS %s;\n\n"+
"ALTER TABLE %s.%s\n"+
" ADD CONSTRAINT %s\n"+
" FOREIGN KEY (%s)\n"+
" REFERENCES %s.%s (%s)\n"+
" ON DELETE %s\n"+
" ON UPDATE %s\n"+
" DEFERRABLE;",
model.Name, modelTable.Name, constraintName,
model.Name, modelTable.Name, constraintName,
strings.Join(constraint.Columns, ", "),
constraint.ReferencedSchema, constraint.ReferencedTable,
strings.Join(constraint.ReferencedColumns, ", "),
onDelete, onUpdate,
),
Body: sql,
}
scripts = append(scripts, script)
}
}
}
return scripts
return scripts, nil
}
// generateCommentScripts generates COMMENT ON scripts
func (w *MigrationWriter) generateCommentScripts(model *models.Schema, current *models.Schema) []MigrationScript {
// generateCommentScripts generates COMMENT ON scripts using templates
func (w *MigrationWriter) generateCommentScripts(model *models.Schema, current *models.Schema) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Suppress unused parameter warning (current not used yet, could be used for diffing)
_ = current
_ = current // TODO: Compare with current schema to only add new/changed comments
// Process each model table
for _, modelTable := range model.Tables {
// Table comment
if modelTable.Description != "" {
sql, err := w.executor.ExecuteCommentTable(CommentTableData{
SchemaName: model.Name,
TableName: modelTable.Name,
Comment: escapeQuote(modelTable.Description),
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s", model.Name, modelTable.Name),
ObjectType: "comment on table",
Schema: model.Name,
Priority: 200,
Sequence: len(scripts),
Body: fmt.Sprintf(
"COMMENT ON TABLE %s.%s IS '%s';",
model.Name, modelTable.Name, escapeQuote(modelTable.Description),
),
Body: sql,
}
scripts = append(scripts, script)
}
@@ -590,79 +621,218 @@ func (w *MigrationWriter) generateCommentScripts(model *models.Schema, current *
// Column comments
for _, col := range modelTable.Columns {
if col.Description != "" {
sql, err := w.executor.ExecuteCommentColumn(CommentColumnData{
SchemaName: model.Name,
TableName: modelTable.Name,
ColumnName: col.Name,
Comment: escapeQuote(col.Description),
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s.%s", model.Name, modelTable.Name, col.Name),
ObjectType: "comment on column",
Schema: model.Name,
Priority: 200,
Sequence: len(scripts),
Body: fmt.Sprintf(
"COMMENT ON COLUMN %s.%s.%s IS '%s';",
model.Name, modelTable.Name, col.Name, escapeQuote(col.Description),
),
Body: sql,
}
scripts = append(scripts, script)
}
}
}
return scripts
return scripts, nil
}
// Comparison helper functions
// generateAuditTablesScript generates audit table creation scripts using templates
func (w *MigrationWriter) generateAuditTablesScript(auditConfig *AuditConfig) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
func constraintsEqual(a, b *models.Constraint) bool {
if a.Type != b.Type {
auditSchema := auditConfig.AuditSchema
if auditSchema == "" {
auditSchema = "public"
}
sql, err := w.executor.ExecuteAuditTables(AuditTablesData{
AuditSchema: auditSchema,
})
if err != nil {
return nil, err
}
script := MigrationScript{
ObjectName: fmt.Sprintf("%s.atevent+atdetail", auditSchema),
ObjectType: "create audit tables",
Schema: auditSchema,
Priority: 90,
Sequence: 0,
Body: sql,
}
scripts = append(scripts, script)
return scripts, nil
}
// generateAuditScripts generates audit functions and triggers using templates
func (w *MigrationWriter) generateAuditScripts(schema *models.Schema, auditConfig *AuditConfig) ([]MigrationScript, error) {
scripts := make([]MigrationScript, 0)
// Process each table in the schema
for _, table := range schema.Tables {
if !auditConfig.IsTableAudited(schema.Name, table.Name) {
continue
}
config := auditConfig.GetTableConfig(schema.Name, table.Name)
if config == nil {
continue
}
// Find primary key
pk := table.GetPrimaryKey()
if pk == nil {
continue
}
auditSchema := auditConfig.AuditSchema
if auditSchema == "" {
auditSchema = schema.Name
}
// Generate audit function
funcName := fmt.Sprintf("ft_audit_%s", table.Name)
funcData := BuildAuditFunctionData(schema.Name, table, pk, config, auditSchema, auditConfig.UserFunction)
funcSQL, err := w.executor.ExecuteAuditFunction(funcData)
if err != nil {
return nil, err
}
functionScript := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s", schema.Name, funcName),
ObjectType: "create audit function",
Schema: schema.Name,
Priority: 345,
Sequence: len(scripts),
Body: funcSQL,
}
scripts = append(scripts, functionScript)
// Generate audit trigger
triggerName := fmt.Sprintf("t_audit_%s", table.Name)
events := make([]string, 0)
if config.AuditInsert {
events = append(events, "INSERT")
}
if config.AuditUpdate {
events = append(events, "UPDATE")
}
if config.AuditDelete {
events = append(events, "DELETE")
}
if len(events) == 0 {
continue
}
triggerSQL, err := w.executor.ExecuteAuditTrigger(AuditTriggerData{
SchemaName: schema.Name,
TableName: table.Name,
TriggerName: triggerName,
FunctionName: funcName,
Events: strings.Join(events, " OR "),
})
if err != nil {
return nil, err
}
triggerScript := MigrationScript{
ObjectName: fmt.Sprintf("%s.%s", schema.Name, triggerName),
ObjectType: "create audit trigger",
Schema: schema.Name,
Priority: 355,
Sequence: len(scripts),
Body: triggerSQL,
}
scripts = append(scripts, triggerScript)
}
return scripts, nil
}
// Helper functions for comparing database objects
// columnsEqual checks if two columns have the same definition
func columnsEqual(col1, col2 *models.Column) bool {
if col1 == nil || col2 == nil {
return false
}
if len(a.Columns) != len(b.Columns) {
return strings.EqualFold(col1.Type, col2.Type) &&
col1.NotNull == col2.NotNull &&
fmt.Sprintf("%v", col1.Default) == fmt.Sprintf("%v", col2.Default)
}
// constraintsEqual checks if two constraints are equal
func constraintsEqual(c1, c2 *models.Constraint) bool {
if c1 == nil || c2 == nil {
return false
}
for i := range a.Columns {
if !strings.EqualFold(a.Columns[i], b.Columns[i]) {
if c1.Type != c2.Type {
return false
}
// Compare columns
if len(c1.Columns) != len(c2.Columns) {
return false
}
for i, col := range c1.Columns {
if !strings.EqualFold(col, c2.Columns[i]) {
return false
}
}
if a.Type == models.ForeignKeyConstraint {
if a.ReferencedTable != b.ReferencedTable || a.ReferencedSchema != b.ReferencedSchema {
// For foreign keys, also compare referenced table and columns
if c1.Type == models.ForeignKeyConstraint {
if !strings.EqualFold(c1.ReferencedTable, c2.ReferencedTable) {
return false
}
if len(a.ReferencedColumns) != len(b.ReferencedColumns) {
if len(c1.ReferencedColumns) != len(c2.ReferencedColumns) {
return false
}
for i := range a.ReferencedColumns {
if !strings.EqualFold(a.ReferencedColumns[i], b.ReferencedColumns[i]) {
for i, col := range c1.ReferencedColumns {
if !strings.EqualFold(col, c2.ReferencedColumns[i]) {
return false
}
}
if c1.OnDelete != c2.OnDelete || c1.OnUpdate != c2.OnUpdate {
return false
}
}
return true
}
func indexesEqual(a, b *models.Index) bool {
if a.Unique != b.Unique {
// indexesEqual checks if two indexes are equal
func indexesEqual(idx1, idx2 *models.Index) bool {
if idx1 == nil || idx2 == nil {
return false
}
if len(a.Columns) != len(b.Columns) {
if idx1.Unique != idx2.Unique {
return false
}
for i := range a.Columns {
if !strings.EqualFold(a.Columns[i], b.Columns[i]) {
if !strings.EqualFold(idx1.Type, idx2.Type) {
return false
}
if len(idx1.Columns) != len(idx2.Columns) {
return false
}
for i, col := range idx1.Columns {
if !strings.EqualFold(col, idx2.Columns[i]) {
return false
}
}
return true
}
func columnsEqual(a, b *models.Column) bool {
if a.Type != b.Type {
return false
}
if a.NotNull != b.NotNull {
return false
}
if fmt.Sprintf("%v", a.Default) != fmt.Sprintf("%v", b.Default) {
return false
}
return true
}

View File

@@ -34,10 +34,13 @@ func TestWriteMigration_NewTable(t *testing.T) {
// Generate migration
var buf bytes.Buffer
writer := NewMigrationWriter(&writers.WriterOptions{})
writer, err := NewMigrationWriter(&writers.WriterOptions{})
if err != nil {
t.Fatalf("Failed to create writer: %v", err)
}
writer.writer = &buf
err := writer.WriteMigration(model, current)
err = writer.WriteMigration(model, current)
if err != nil {
t.Fatalf("WriteMigration failed: %v", err)
}
@@ -54,234 +57,161 @@ func TestWriteMigration_NewTable(t *testing.T) {
}
}
func TestWriteMigration_AddColumn(t *testing.T) {
// Current database (with table but missing column)
func TestWriteMigration_WithAudit(t *testing.T) {
// Current database (empty)
current := models.InitDatabase("testdb")
currentSchema := models.InitSchema("public")
currentTable := models.InitTable("users", "public")
current.Schemas = append(current.Schemas, currentSchema)
// Model database (with table to audit)
model := models.InitDatabase("testdb")
modelSchema := models.InitSchema("public")
table := models.InitTable("users", "public")
idCol := models.InitColumn("id", "users", "public")
idCol.Type = "integer"
currentTable.Columns["id"] = idCol
idCol.IsPrimaryKey = true
table.Columns["id"] = idCol
currentSchema.Tables = append(currentSchema.Tables, currentTable)
current.Schemas = append(current.Schemas, currentSchema)
nameCol := models.InitColumn("name", "users", "public")
nameCol.Type = "text"
table.Columns["name"] = nameCol
// Model database (with additional column)
model := models.InitDatabase("testdb")
modelSchema := models.InitSchema("public")
modelTable := models.InitTable("users", "public")
passwordCol := models.InitColumn("password", "users", "public")
passwordCol.Type = "text"
table.Columns["password"] = passwordCol
idCol2 := models.InitColumn("id", "users", "public")
idCol2.Type = "integer"
modelTable.Columns["id"] = idCol2
emailCol := models.InitColumn("email", "users", "public")
emailCol.Type = "text"
modelTable.Columns["email"] = emailCol
modelSchema.Tables = append(modelSchema.Tables, modelTable)
modelSchema.Tables = append(modelSchema.Tables, table)
model.Schemas = append(model.Schemas, modelSchema)
// Generate migration
// Configure audit
auditConfig := NewAuditConfig()
auditConfig.AuditSchema = "public"
tableConfig := auditConfig.EnableTableAudit("public", "users")
tableConfig.EncryptedColumns = []string{"password"}
// Generate migration with audit
var buf bytes.Buffer
writer := NewMigrationWriter(&writers.WriterOptions{})
options := &writers.WriterOptions{
Metadata: map[string]interface{}{
"audit_config": auditConfig,
},
}
writer, err := NewMigrationWriter(options)
if err != nil {
t.Fatalf("Failed to create writer: %v", err)
}
writer.writer = &buf
err := writer.WriteMigration(model, current)
err = writer.WriteMigration(model, current)
if err != nil {
t.Fatalf("WriteMigration failed: %v", err)
}
output := buf.String()
t.Logf("Generated migration:\n%s", output)
t.Logf("Generated migration with audit:\n%s", output)
// Verify ADD COLUMN is present
if !strings.Contains(output, "ADD COLUMN") {
t.Error("Migration missing ADD COLUMN statement")
// Verify audit tables
if !strings.Contains(output, "CREATE TABLE IF NOT EXISTS public.atevent") {
t.Error("Migration missing atevent table")
}
if !strings.Contains(output, "email") {
t.Error("Migration missing column name 'email'")
if !strings.Contains(output, "CREATE TABLE IF NOT EXISTS public.atdetail") {
t.Error("Migration missing atdetail table")
}
// Verify audit function
if !strings.Contains(output, "CREATE OR REPLACE FUNCTION public.ft_audit_users()") {
t.Error("Migration missing audit function")
}
// Verify audit trigger
if !strings.Contains(output, "CREATE TRIGGER t_audit_users") {
t.Error("Migration missing audit trigger")
}
// Verify encrypted column handling
if !strings.Contains(output, "'****************'") {
t.Error("Migration missing encrypted column handling")
}
}
func TestWriteMigration_ChangeColumnType(t *testing.T) {
// Current database (with integer column)
current := models.InitDatabase("testdb")
currentSchema := models.InitSchema("public")
currentTable := models.InitTable("users", "public")
idCol := models.InitColumn("id", "users", "public")
idCol.Type = "integer"
currentTable.Columns["id"] = idCol
currentSchema.Tables = append(currentSchema.Tables, currentTable)
current.Schemas = append(current.Schemas, currentSchema)
// Model database (changed to bigint)
model := models.InitDatabase("testdb")
modelSchema := models.InitSchema("public")
modelTable := models.InitTable("users", "public")
idCol2 := models.InitColumn("id", "users", "public")
idCol2.Type = "bigint"
modelTable.Columns["id"] = idCol2
modelSchema.Tables = append(modelSchema.Tables, modelTable)
model.Schemas = append(model.Schemas, modelSchema)
// Generate migration
var buf bytes.Buffer
writer := NewMigrationWriter(&writers.WriterOptions{})
writer.writer = &buf
err := writer.WriteMigration(model, current)
func TestTemplateExecutor_CreateTable(t *testing.T) {
executor, err := NewTemplateExecutor()
if err != nil {
t.Fatalf("WriteMigration failed: %v", err)
t.Fatalf("Failed to create executor: %v", err)
}
output := buf.String()
t.Logf("Generated migration:\n%s", output)
// Verify ALTER COLUMN TYPE is present
if !strings.Contains(output, "ALTER COLUMN") {
t.Error("Migration missing ALTER COLUMN statement")
data := CreateTableData{
SchemaName: "public",
TableName: "test_table",
Columns: []ColumnData{
{Name: "id", Type: "integer", NotNull: true},
{Name: "name", Type: "text", Default: "'unknown'"},
},
}
if !strings.Contains(output, "TYPE bigint") {
t.Error("Migration missing TYPE bigint")
}
}
func TestWriteMigration_AddForeignKey(t *testing.T) {
// Current database (two tables, no relationship)
current := models.InitDatabase("testdb")
currentSchema := models.InitSchema("public")
usersTable := models.InitTable("users", "public")
idCol := models.InitColumn("id", "users", "public")
idCol.Type = "integer"
usersTable.Columns["id"] = idCol
postsTable := models.InitTable("posts", "public")
postIdCol := models.InitColumn("id", "posts", "public")
postIdCol.Type = "integer"
postsTable.Columns["id"] = postIdCol
userIdCol := models.InitColumn("user_id", "posts", "public")
userIdCol.Type = "integer"
postsTable.Columns["user_id"] = userIdCol
currentSchema.Tables = append(currentSchema.Tables, usersTable, postsTable)
current.Schemas = append(current.Schemas, currentSchema)
// Model database (with foreign key)
model := models.InitDatabase("testdb")
modelSchema := models.InitSchema("public")
modelUsersTable := models.InitTable("users", "public")
modelIdCol := models.InitColumn("id", "users", "public")
modelIdCol.Type = "integer"
modelUsersTable.Columns["id"] = modelIdCol
modelPostsTable := models.InitTable("posts", "public")
modelPostIdCol := models.InitColumn("id", "posts", "public")
modelPostIdCol.Type = "integer"
modelPostsTable.Columns["id"] = modelPostIdCol
modelUserIdCol := models.InitColumn("user_id", "posts", "public")
modelUserIdCol.Type = "integer"
modelPostsTable.Columns["user_id"] = modelUserIdCol
// Add foreign key constraint
fkConstraint := &models.Constraint{
Name: "fk_posts_users",
Type: models.ForeignKeyConstraint,
Columns: []string{"user_id"},
ReferencedTable: "users",
ReferencedSchema: "public",
ReferencedColumns: []string{"id"},
OnDelete: "CASCADE",
OnUpdate: "CASCADE",
}
modelPostsTable.Constraints["fk_posts_users"] = fkConstraint
modelSchema.Tables = append(modelSchema.Tables, modelUsersTable, modelPostsTable)
model.Schemas = append(model.Schemas, modelSchema)
// Generate migration
var buf bytes.Buffer
writer := NewMigrationWriter(&writers.WriterOptions{})
writer.writer = &buf
err := writer.WriteMigration(model, current)
sql, err := executor.ExecuteCreateTable(data)
if err != nil {
t.Fatalf("WriteMigration failed: %v", err)
t.Fatalf("Failed to execute template: %v", err)
}
output := buf.String()
t.Logf("Generated migration:\n%s", output)
t.Logf("Generated SQL:\n%s", sql)
// Verify FOREIGN KEY is present
if !strings.Contains(output, "FOREIGN KEY") {
t.Error("Migration missing FOREIGN KEY statement")
if !strings.Contains(sql, "CREATE TABLE IF NOT EXISTS public.test_table") {
t.Error("SQL missing CREATE TABLE statement")
}
if !strings.Contains(output, "ON DELETE CASCADE") {
t.Error("Migration missing ON DELETE CASCADE")
if !strings.Contains(sql, "id integer NOT NULL") {
t.Error("SQL missing id column definition")
}
if !strings.Contains(sql, "name text DEFAULT 'unknown'") {
t.Error("SQL missing name column definition")
}
}
func TestWriteMigration_AddIndex(t *testing.T) {
// Current database (table without index)
current := models.InitDatabase("testdb")
currentSchema := models.InitSchema("public")
currentTable := models.InitTable("users", "public")
emailCol := models.InitColumn("email", "users", "public")
emailCol.Type = "text"
currentTable.Columns["email"] = emailCol
currentSchema.Tables = append(currentSchema.Tables, currentTable)
current.Schemas = append(current.Schemas, currentSchema)
// Model database (with unique index)
model := models.InitDatabase("testdb")
modelSchema := models.InitSchema("public")
modelTable := models.InitTable("users", "public")
modelEmailCol := models.InitColumn("email", "users", "public")
modelEmailCol.Type = "text"
modelTable.Columns["email"] = modelEmailCol
// Add unique index
index := &models.Index{
Name: "uk_users_email",
Unique: true,
Columns: []string{"email"},
Type: "btree",
}
modelTable.Indexes["uk_users_email"] = index
modelSchema.Tables = append(modelSchema.Tables, modelTable)
model.Schemas = append(model.Schemas, modelSchema)
// Generate migration
var buf bytes.Buffer
writer := NewMigrationWriter(&writers.WriterOptions{})
writer.writer = &buf
err := writer.WriteMigration(model, current)
func TestTemplateExecutor_AuditFunction(t *testing.T) {
executor, err := NewTemplateExecutor()
if err != nil {
t.Fatalf("WriteMigration failed: %v", err)
t.Fatalf("Failed to create executor: %v", err)
}
output := buf.String()
t.Logf("Generated migration:\n%s", output)
// Verify CREATE UNIQUE INDEX is present
if !strings.Contains(output, "CREATE UNIQUE INDEX") {
t.Error("Migration missing CREATE UNIQUE INDEX statement")
data := AuditFunctionData{
SchemaName: "public",
FunctionName: "ft_audit_users",
TableName: "users",
TablePrefix: "NULL",
PrimaryKey: "id",
AuditSchema: "public",
UserFunction: "current_user",
AuditInsert: true,
AuditUpdate: true,
AuditDelete: true,
UpdateCondition: "old.name IS DISTINCT FROM new.name",
UpdateColumns: []AuditColumnData{
{Name: "name", OldValue: "old.name::text", NewValue: "new.name::text"},
},
DeleteColumns: []AuditColumnData{
{Name: "name", OldValue: "old.name::text"},
},
}
if !strings.Contains(output, "uk_users_email") {
t.Error("Migration missing index name")
sql, err := executor.ExecuteAuditFunction(data)
if err != nil {
t.Fatalf("Failed to execute template: %v", err)
}
t.Logf("Generated SQL:\n%s", sql)
if !strings.Contains(sql, "CREATE OR REPLACE FUNCTION public.ft_audit_users()") {
t.Error("SQL missing function definition")
}
if !strings.Contains(sql, "IF TG_OP = 'INSERT'") {
t.Error("SQL missing INSERT handling")
}
if !strings.Contains(sql, "ELSIF TG_OP = 'UPDATE'") {
t.Error("SQL missing UPDATE handling")
}
if !strings.Contains(sql, "ELSIF TG_OP = 'DELETE'") {
t.Error("SQL missing DELETE handling")
}
}

View File

@@ -0,0 +1,285 @@
package pgsql
import (
"fmt"
"regexp"
"strings"
"unicode"
)
// TemplateFunctions returns a map of custom template functions
func TemplateFunctions() map[string]interface{} {
return map[string]interface{}{
// String manipulation
"upper": strings.ToUpper,
"lower": strings.ToLower,
"snake_case": toSnakeCase,
"camelCase": toCamelCase,
// SQL formatting
"indent": indent,
"quote": quote,
"escape": escape,
"safe_identifier": safeIdentifier,
// Type conversion
"goTypeToSQL": goTypeToSQL,
"sqlTypeToGo": sqlTypeToGo,
"isNumeric": isNumeric,
"isText": isText,
// Collection helpers
"first": first,
"last": last,
"filter": filter,
"mapFunc": mapFunc,
"join_with": joinWith,
// Built-in Go template function (for convenience)
"join": strings.Join,
}
}
// String manipulation functions
// toSnakeCase converts a string to snake_case
func toSnakeCase(s string) string {
// Insert underscore before uppercase letters
var result strings.Builder
for i, r := range s {
if unicode.IsUpper(r) {
if i > 0 {
result.WriteRune('_')
}
result.WriteRune(unicode.ToLower(r))
} else {
result.WriteRune(r)
}
}
return result.String()
}
// toCamelCase converts a string to camelCase
func toCamelCase(s string) string {
// Split by underscore
parts := strings.Split(s, "_")
if len(parts) == 0 {
return s
}
// First part stays lowercase
result := strings.ToLower(parts[0])
// Capitalize first letter of remaining parts
for _, part := range parts[1:] {
if len(part) > 0 {
result += strings.ToUpper(part[0:1]) + strings.ToLower(part[1:])
}
}
return result
}
// SQL formatting functions
// indent indents each line of text by the specified number of spaces
func indent(spaces int, text string) string {
prefix := strings.Repeat(" ", spaces)
lines := strings.Split(text, "\n")
for i, line := range lines {
if line != "" {
lines[i] = prefix + line
}
}
return strings.Join(lines, "\n")
}
// quote quotes a string value for SQL (escapes single quotes)
func quote(s string) string {
return "'" + strings.ReplaceAll(s, "'", "''") + "'"
}
// escape escapes a string for SQL (escapes single quotes and backslashes)
func escape(s string) string {
s = strings.ReplaceAll(s, "\\", "\\\\")
s = strings.ReplaceAll(s, "'", "''")
return s
}
// safeIdentifier makes a string safe to use as a SQL identifier
func safeIdentifier(s string) string {
// Remove or replace dangerous characters
// Allow: letters, numbers, underscore
reg := regexp.MustCompile(`[^a-zA-Z0-9_]`)
safe := reg.ReplaceAllString(s, "_")
// Ensure it doesn't start with a number
if len(safe) > 0 && unicode.IsDigit(rune(safe[0])) {
safe = "_" + safe
}
// Convert to lowercase (PostgreSQL convention)
return strings.ToLower(safe)
}
// Type conversion functions
// goTypeToSQL converts Go type to PostgreSQL type
func goTypeToSQL(goType string) string {
typeMap := map[string]string{
"string": "text",
"int": "integer",
"int32": "integer",
"int64": "bigint",
"float32": "real",
"float64": "double precision",
"bool": "boolean",
"time.Time": "timestamp",
"[]byte": "bytea",
}
if sqlType, ok := typeMap[goType]; ok {
return sqlType
}
return "text" // Default
}
// sqlTypeToGo converts PostgreSQL type to Go type
func sqlTypeToGo(sqlType string) string {
sqlType = strings.ToLower(sqlType)
typeMap := map[string]string{
"text": "string",
"varchar": "string",
"char": "string",
"integer": "int",
"int": "int",
"bigint": "int64",
"smallint": "int16",
"serial": "int",
"bigserial": "int64",
"real": "float32",
"double precision": "float64",
"numeric": "float64",
"decimal": "float64",
"boolean": "bool",
"timestamp": "time.Time",
"timestamptz": "time.Time",
"date": "time.Time",
"time": "time.Time",
"bytea": "[]byte",
"json": "json.RawMessage",
"jsonb": "json.RawMessage",
"uuid": "string",
}
if goType, ok := typeMap[sqlType]; ok {
return goType
}
return "string" // Default
}
// isNumeric checks if a SQL type is numeric
func isNumeric(sqlType string) bool {
sqlType = strings.ToLower(sqlType)
numericTypes := []string{
"integer", "int", "bigint", "smallint", "serial", "bigserial",
"real", "double precision", "numeric", "decimal", "float",
}
for _, t := range numericTypes {
if strings.Contains(sqlType, t) {
return true
}
}
return false
}
// isText checks if a SQL type is text-based
func isText(sqlType string) bool {
sqlType = strings.ToLower(sqlType)
textTypes := []string{
"text", "varchar", "char", "character", "string",
}
for _, t := range textTypes {
if strings.Contains(sqlType, t) {
return true
}
}
return false
}
// Collection helper functions
// first returns the first element of a slice, or nil if empty
func first(slice interface{}) interface{} {
switch v := slice.(type) {
case []string:
if len(v) > 0 {
return v[0]
}
case []int:
if len(v) > 0 {
return v[0]
}
case []interface{}:
if len(v) > 0 {
return v[0]
}
}
return nil
}
// last returns the last element of a slice, or nil if empty
func last(slice interface{}) interface{} {
switch v := slice.(type) {
case []string:
if len(v) > 0 {
return v[len(v)-1]
}
case []int:
if len(v) > 0 {
return v[len(v)-1]
}
case []interface{}:
if len(v) > 0 {
return v[len(v)-1]
}
}
return nil
}
// filter filters a slice based on a predicate (simplified version)
// Usage in template: {{filter .Columns "NotNull"}}
func filter(slice interface{}, fieldName string) interface{} {
// This is a simplified implementation
// In templates, you'd use: {{range $col := .Columns}}{{if $col.NotNull}}...{{end}}{{end}}
// This function is mainly for documentation purposes
return slice
}
// mapFunc maps a function over a slice (simplified version)
// Usage in template: {{range .Columns}}{{mapFunc .Name "upper"}}{{end}}
func mapFunc(value interface{}, funcName string) interface{} {
// This is a simplified implementation
// In templates, you'd directly call: {{upper .Name}}
// This function is mainly for documentation purposes
return value
}
// joinWith joins a slice of strings with a separator
func joinWith(slice []string, separator string) string {
return strings.Join(slice, separator)
}
// Additional helper functions
// formatType formats a SQL type with length/precision
func formatType(baseType string, length, precision int) string {
if length > 0 && precision > 0 {
return fmt.Sprintf("%s(%d,%d)", baseType, length, precision)
}
if length > 0 {
return fmt.Sprintf("%s(%d)", baseType, length)
}
return baseType
}

View File

@@ -0,0 +1,332 @@
package pgsql
import (
"strings"
"testing"
)
func TestToSnakeCase(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"UserId", "user_id"},
{"UserID", "user_i_d"},
{"HTTPResponse", "h_t_t_p_response"},
{"already_snake", "already_snake"},
{"", ""},
}
for _, tt := range tests {
result := toSnakeCase(tt.input)
if result != tt.expected {
t.Errorf("toSnakeCase(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestToCamelCase(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"user_id", "userId"},
{"user_name", "userName"},
{"http_response", "httpResponse"},
{"", ""},
{"alreadycamel", "alreadycamel"},
}
for _, tt := range tests {
result := toCamelCase(tt.input)
if result != tt.expected {
t.Errorf("toCamelCase(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestQuote(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"hello", "'hello'"},
{"O'Brien", "'O''Brien'"},
{"", "''"},
}
for _, tt := range tests {
result := quote(tt.input)
if result != tt.expected {
t.Errorf("quote(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestEscape(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"hello", "hello"},
{"O'Brien", "O''Brien"},
{"path\\to\\file", "path\\\\to\\\\file"},
}
for _, tt := range tests {
result := escape(tt.input)
if result != tt.expected {
t.Errorf("escape(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestSafeIdentifier(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"User-Id", "user_id"},
{"123column", "_123column"},
{"valid_name", "valid_name"},
{"Column@Name!", "column_name_"},
{"UPPERCASE", "uppercase"},
}
for _, tt := range tests {
result := safeIdentifier(tt.input)
if result != tt.expected {
t.Errorf("safeIdentifier(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestGoTypeToSQL(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"string", "text"},
{"int", "integer"},
{"int64", "bigint"},
{"bool", "boolean"},
{"time.Time", "timestamp"},
{"unknown", "text"},
}
for _, tt := range tests {
result := goTypeToSQL(tt.input)
if result != tt.expected {
t.Errorf("goTypeToSQL(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestSQLTypeToGo(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"text", "string"},
{"integer", "int"},
{"bigint", "int64"},
{"boolean", "bool"},
{"timestamp", "time.Time"},
{"unknown", "string"},
}
for _, tt := range tests {
result := sqlTypeToGo(tt.input)
if result != tt.expected {
t.Errorf("sqlTypeToGo(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestIsNumeric(t *testing.T) {
tests := []struct {
input string
expected bool
}{
{"integer", true},
{"bigint", true},
{"numeric(10,2)", true},
{"text", false},
{"varchar", false},
}
for _, tt := range tests {
result := isNumeric(tt.input)
if result != tt.expected {
t.Errorf("isNumeric(%q) = %v, want %v", tt.input, result, tt.expected)
}
}
}
func TestIsText(t *testing.T) {
tests := []struct {
input string
expected bool
}{
{"text", true},
{"varchar(255)", true},
{"character varying", true},
{"integer", false},
{"bigint", false},
}
for _, tt := range tests {
result := isText(tt.input)
if result != tt.expected {
t.Errorf("isText(%q) = %v, want %v", tt.input, result, tt.expected)
}
}
}
func TestIndent(t *testing.T) {
input := "line1\nline2\nline3"
expected := " line1\n line2\n line3"
result := indent(2, input)
if result != expected {
t.Errorf("indent(2, %q) = %q, want %q", input, result, expected)
}
}
func TestFirst(t *testing.T) {
tests := []struct {
input interface{}
expected interface{}
}{
{[]string{"a", "b", "c"}, "a"},
{[]string{}, nil},
{[]int{1, 2, 3}, 1},
}
for _, tt := range tests {
result := first(tt.input)
if result != tt.expected {
t.Errorf("first(%v) = %v, want %v", tt.input, result, tt.expected)
}
}
}
func TestLast(t *testing.T) {
tests := []struct {
input interface{}
expected interface{}
}{
{[]string{"a", "b", "c"}, "c"},
{[]string{}, nil},
{[]int{1, 2, 3}, 3},
}
for _, tt := range tests {
result := last(tt.input)
if result != tt.expected {
t.Errorf("last(%v) = %v, want %v", tt.input, result, tt.expected)
}
}
}
func TestJoinWith(t *testing.T) {
input := []string{"a", "b", "c"}
expected := "a, b, c"
result := joinWith(input, ", ")
if result != expected {
t.Errorf("joinWith(%v, \", \") = %q, want %q", input, result, expected)
}
}
func TestTemplateFunctions(t *testing.T) {
funcs := TemplateFunctions()
// Check that all expected functions are registered
expectedFuncs := []string{
"upper", "lower", "snake_case", "camelCase",
"indent", "quote", "escape", "safe_identifier",
"goTypeToSQL", "sqlTypeToGo", "isNumeric", "isText",
"first", "last", "filter", "mapFunc", "join_with",
"join",
}
for _, name := range expectedFuncs {
if _, ok := funcs[name]; !ok {
t.Errorf("Expected function %q not found in TemplateFunctions()", name)
}
}
// Test that they're callable
if upperFunc, ok := funcs["upper"].(func(string) string); ok {
result := upperFunc("hello")
if result != "HELLO" {
t.Errorf("upper function not working correctly")
}
} else {
t.Error("upper function has wrong type")
}
}
func TestFormatType(t *testing.T) {
tests := []struct {
baseType string
length int
precision int
expected string
}{
{"varchar", 255, 0, "varchar(255)"},
{"numeric", 10, 2, "numeric(10,2)"},
{"integer", 0, 0, "integer"},
}
for _, tt := range tests {
result := formatType(tt.baseType, tt.length, tt.precision)
if result != tt.expected {
t.Errorf("formatType(%q, %d, %d) = %q, want %q",
tt.baseType, tt.length, tt.precision, result, tt.expected)
}
}
}
// Test that template functions work in actual templates
func TestTemplateFunctionsInTemplate(t *testing.T) {
executor, err := NewTemplateExecutor()
if err != nil {
t.Fatalf("Failed to create executor: %v", err)
}
// Create a simple test template
tmpl, err := executor.templates.New("test").Parse(`
{{- upper .Name -}}
{{- lower .Type -}}
{{- snake_case .CamelName -}}
{{- safe_identifier .UnsafeName -}}
`)
if err != nil {
t.Fatalf("Failed to parse test template: %v", err)
}
data := struct {
Name string
Type string
CamelName string
UnsafeName string
}{
Name: "hello",
Type: "TEXT",
CamelName: "UserId",
UnsafeName: "user-id!",
}
var buf strings.Builder
err = tmpl.Execute(&buf, data)
if err != nil {
t.Fatalf("Failed to execute template: %v", err)
}
result := buf.String()
expected := "HELLOtextuser_iduser_id_"
if result != expected {
t.Errorf("Template output = %q, want %q", result, expected)
}
}

View File

@@ -0,0 +1,457 @@
package pgsql
import (
"bytes"
"embed"
"fmt"
"strings"
"text/template"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
//go:embed templates/*.tmpl
var templateFS embed.FS
// TemplateExecutor manages and executes SQL templates
type TemplateExecutor struct {
templates *template.Template
}
// NewTemplateExecutor creates a new template executor
func NewTemplateExecutor() (*TemplateExecutor, error) {
// Create template with custom functions
funcMap := make(template.FuncMap)
for k, v := range TemplateFunctions() {
funcMap[k] = v
}
tmpl, err := template.New("").Funcs(funcMap).ParseFS(templateFS, "templates/*.tmpl")
if err != nil {
return nil, fmt.Errorf("failed to parse templates: %w", err)
}
return &TemplateExecutor{
templates: tmpl,
}, nil
}
// Template data structures
// CreateTableData contains data for create table template
type CreateTableData struct {
SchemaName string
TableName string
Columns []ColumnData
}
// ColumnData represents column information
type ColumnData struct {
Name string
Type string
Default string
NotNull bool
}
// AddColumnData contains data for add column template
type AddColumnData struct {
SchemaName string
TableName string
ColumnName string
ColumnType string
Default string
NotNull bool
}
// AlterColumnTypeData contains data for alter column type template
type AlterColumnTypeData struct {
SchemaName string
TableName string
ColumnName string
NewType string
}
// AlterColumnDefaultData contains data for alter column default template
type AlterColumnDefaultData struct {
SchemaName string
TableName string
ColumnName string
SetDefault bool
DefaultValue string
}
// CreatePrimaryKeyData contains data for create primary key template
type CreatePrimaryKeyData struct {
SchemaName string
TableName string
ConstraintName string
Columns string
}
// CreateIndexData contains data for create index template
type CreateIndexData struct {
SchemaName string
TableName string
IndexName string
IndexType string
Columns string
Unique bool
}
// CreateForeignKeyData contains data for create foreign key template
type CreateForeignKeyData struct {
SchemaName string
TableName string
ConstraintName string
SourceColumns string
TargetSchema string
TargetTable string
TargetColumns string
OnDelete string
OnUpdate string
}
// DropConstraintData contains data for drop constraint template
type DropConstraintData struct {
SchemaName string
TableName string
ConstraintName string
}
// DropIndexData contains data for drop index template
type DropIndexData struct {
SchemaName string
IndexName string
}
// CommentTableData contains data for table comment template
type CommentTableData struct {
SchemaName string
TableName string
Comment string
}
// CommentColumnData contains data for column comment template
type CommentColumnData struct {
SchemaName string
TableName string
ColumnName string
Comment string
}
// AuditTablesData contains data for audit tables template
type AuditTablesData struct {
AuditSchema string
}
// AuditColumnData represents a column in audit template
type AuditColumnData struct {
Name string
OldValue string
NewValue string
}
// AuditFunctionData contains data for audit function template
type AuditFunctionData struct {
SchemaName string
FunctionName string
TableName string
TablePrefix string
PrimaryKey string
AuditSchema string
UserFunction string
AuditInsert bool
AuditUpdate bool
AuditDelete bool
UpdateCondition string
UpdateColumns []AuditColumnData
DeleteColumns []AuditColumnData
}
// AuditTriggerData contains data for audit trigger template
type AuditTriggerData struct {
SchemaName string
TableName string
TriggerName string
FunctionName string
Events string
}
// Execute methods for each template
// ExecuteCreateTable executes the create table template
func (te *TemplateExecutor) ExecuteCreateTable(data CreateTableData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_table.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_table template: %w", err)
}
return buf.String(), nil
}
// ExecuteAddColumn executes the add column template
func (te *TemplateExecutor) ExecuteAddColumn(data AddColumnData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "add_column.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute add_column template: %w", err)
}
return buf.String(), nil
}
// ExecuteAlterColumnType executes the alter column type template
func (te *TemplateExecutor) ExecuteAlterColumnType(data AlterColumnTypeData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "alter_column_type.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute alter_column_type template: %w", err)
}
return buf.String(), nil
}
// ExecuteAlterColumnDefault executes the alter column default template
func (te *TemplateExecutor) ExecuteAlterColumnDefault(data AlterColumnDefaultData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "alter_column_default.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute alter_column_default template: %w", err)
}
return buf.String(), nil
}
// ExecuteCreatePrimaryKey executes the create primary key template
func (te *TemplateExecutor) ExecuteCreatePrimaryKey(data CreatePrimaryKeyData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_primary_key.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_primary_key template: %w", err)
}
return buf.String(), nil
}
// ExecuteCreateIndex executes the create index template
func (te *TemplateExecutor) ExecuteCreateIndex(data CreateIndexData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_index.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_index template: %w", err)
}
return buf.String(), nil
}
// ExecuteCreateForeignKey executes the create foreign key template
func (te *TemplateExecutor) ExecuteCreateForeignKey(data CreateForeignKeyData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_foreign_key.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_foreign_key template: %w", err)
}
return buf.String(), nil
}
// ExecuteDropConstraint executes the drop constraint template
func (te *TemplateExecutor) ExecuteDropConstraint(data DropConstraintData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "drop_constraint.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute drop_constraint template: %w", err)
}
return buf.String(), nil
}
// ExecuteDropIndex executes the drop index template
func (te *TemplateExecutor) ExecuteDropIndex(data DropIndexData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "drop_index.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute drop_index template: %w", err)
}
return buf.String(), nil
}
// ExecuteCommentTable executes the table comment template
func (te *TemplateExecutor) ExecuteCommentTable(data CommentTableData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "comment_table.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute comment_table template: %w", err)
}
return buf.String(), nil
}
// ExecuteCommentColumn executes the column comment template
func (te *TemplateExecutor) ExecuteCommentColumn(data CommentColumnData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "comment_column.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute comment_column template: %w", err)
}
return buf.String(), nil
}
// ExecuteAuditTables executes the audit tables template
func (te *TemplateExecutor) ExecuteAuditTables(data AuditTablesData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "audit_tables.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute audit_tables template: %w", err)
}
return buf.String(), nil
}
// ExecuteAuditFunction executes the audit function template
func (te *TemplateExecutor) ExecuteAuditFunction(data AuditFunctionData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "audit_function.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute audit_function template: %w", err)
}
return buf.String(), nil
}
// ExecuteAuditTrigger executes the audit trigger template
func (te *TemplateExecutor) ExecuteAuditTrigger(data AuditTriggerData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "audit_trigger.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute audit_trigger template: %w", err)
}
return buf.String(), nil
}
// Helper functions to build template data from models
// BuildCreateTableData builds CreateTableData from a models.Table
func BuildCreateTableData(schemaName string, table *models.Table) CreateTableData {
columns := make([]ColumnData, 0, len(table.Columns))
// Get sorted columns
sortedCols := getSortedColumns(table.Columns)
for _, col := range sortedCols {
colData := ColumnData{
Name: col.Name,
Type: col.Type,
NotNull: col.NotNull,
}
if col.Default != nil {
colData.Default = fmt.Sprintf("%v", col.Default)
}
columns = append(columns, colData)
}
return CreateTableData{
SchemaName: schemaName,
TableName: table.Name,
Columns: columns,
}
}
// BuildAuditFunctionData builds AuditFunctionData from table and config
func BuildAuditFunctionData(
schemaName string,
table *models.Table,
pk *models.Column,
config *TableAuditConfig,
auditSchema string,
userFunction string,
) AuditFunctionData {
funcName := fmt.Sprintf("ft_audit_%s", table.Name)
// Build list of audited columns
auditedColumns := make([]*models.Column, 0)
for _, col := range table.Columns {
if col.Name == pk.Name {
continue
}
excluded := false
for _, excl := range config.ExcludedColumns {
if strings.EqualFold(col.Name, excl) {
excluded = true
break
}
}
if excluded {
continue
}
auditedColumns = append(auditedColumns, col)
}
// Build update condition
updateComparisons := make([]string, 0)
for _, col := range auditedColumns {
updateComparisons = append(updateComparisons,
fmt.Sprintf("old.%s IS DISTINCT FROM new.%s", col.Name, col.Name))
}
updateCondition := strings.Join(updateComparisons, " OR ")
// Build update columns data
updateColumns := make([]AuditColumnData, 0)
for _, col := range auditedColumns {
isEncrypted := false
for _, enc := range config.EncryptedColumns {
if strings.EqualFold(col.Name, enc) {
isEncrypted = true
break
}
}
oldValue := fmt.Sprintf("old.%s::text", col.Name)
newValue := fmt.Sprintf("new.%s::text", col.Name)
if isEncrypted {
oldValue = "'****************'"
newValue = "'****************'"
}
updateColumns = append(updateColumns, AuditColumnData{
Name: col.Name,
OldValue: oldValue,
NewValue: newValue,
})
}
// Build delete columns data (same as update but only old values)
deleteColumns := make([]AuditColumnData, 0)
for _, col := range auditedColumns {
isEncrypted := false
for _, enc := range config.EncryptedColumns {
if strings.EqualFold(col.Name, enc) {
isEncrypted = true
break
}
}
oldValue := fmt.Sprintf("old.%s::text", col.Name)
if isEncrypted {
oldValue = "'****************'"
}
deleteColumns = append(deleteColumns, AuditColumnData{
Name: col.Name,
OldValue: oldValue,
})
}
tablePrefix := "NULL"
if config.TablePrefix != "" {
tablePrefix = fmt.Sprintf("'%s'", config.TablePrefix)
}
return AuditFunctionData{
SchemaName: schemaName,
FunctionName: funcName,
TableName: table.Name,
TablePrefix: tablePrefix,
PrimaryKey: pk.Name,
AuditSchema: auditSchema,
UserFunction: userFunction,
AuditInsert: config.AuditInsert,
AuditUpdate: config.AuditUpdate,
AuditDelete: config.AuditDelete,
UpdateCondition: updateCondition,
UpdateColumns: updateColumns,
DeleteColumns: deleteColumns,
}
}

View File

@@ -0,0 +1,4 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}}
ADD COLUMN IF NOT EXISTS {{.ColumnName}} {{.ColumnType}}
{{- if .Default}} DEFAULT {{.Default}}{{end}}
{{- if .NotNull}} NOT NULL{{end}};

View File

@@ -0,0 +1,7 @@
{{- if .SetDefault -}}
ALTER TABLE {{.SchemaName}}.{{.TableName}}
ALTER COLUMN {{.ColumnName}} SET DEFAULT {{.DefaultValue}};
{{- else -}}
ALTER TABLE {{.SchemaName}}.{{.TableName}}
ALTER COLUMN {{.ColumnName}} DROP DEFAULT;
{{- end -}}

View File

@@ -0,0 +1,2 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}}
ALTER COLUMN {{.ColumnName}} TYPE {{.NewType}};

View File

@@ -0,0 +1,84 @@
CREATE OR REPLACE FUNCTION {{.SchemaName}}.{{.FunctionName}}()
RETURNS trigger AS
$body$
DECLARE
m_funcname text = '{{.FunctionName}}';
m_user text;
m_atevent integer;
BEGIN
-- Get current user
m_user := {{.UserFunction}}::text;
-- Skip audit for specific users if needed
IF m_user IN ('noaudit', 'importuser') THEN
IF (TG_OP = 'DELETE') THEN
RETURN OLD;
ELSIF (TG_OP = 'UPDATE') THEN
RETURN NEW;
ELSIF (TG_OP = 'INSERT') THEN
RETURN NEW;
END IF;
END IF;
{{- if .AuditInsert}}
IF TG_OP = 'INSERT' THEN
-- Record INSERT
INSERT INTO {{.AuditSchema}}.atevent (tablename, tableprefix, rid_parent, changeuser, changedate, changetime, actionx)
VALUES ('{{.TableName}}', {{.TablePrefix}}, new.{{.PrimaryKey}}, m_user, CURRENT_DATE, CURRENT_TIME, 1)
RETURNING rid_atevent INTO m_atevent;
{{- end}}
{{- if .AuditUpdate}}
ELSIF TG_OP = 'UPDATE' THEN
-- Check if any audited columns changed
IF ({{.UpdateCondition}}) THEN
INSERT INTO {{.AuditSchema}}.atevent (tablename, tableprefix, rid_parent, changeuser, changedate, changetime, actionx)
VALUES ('{{.TableName}}', {{.TablePrefix}}, new.{{.PrimaryKey}}, m_user, CURRENT_DATE, CURRENT_TIME, 2)
RETURNING rid_atevent INTO m_atevent;
-- Record column changes
{{- range .UpdateColumns}}
IF (old.{{.Name}} IS DISTINCT FROM new.{{.Name}}) THEN
INSERT INTO {{$.AuditSchema}}.atdetail(rid_atevent, datacolumn, changedfrom, changedto)
VALUES (m_atevent, '{{.Name}}', substr({{.OldValue}}, 1, 1000), substr({{.NewValue}}, 1, 1000));
END IF;
{{- end}}
END IF;
{{- end}}
{{- if .AuditDelete}}
ELSIF TG_OP = 'DELETE' THEN
-- Record DELETE
INSERT INTO {{.AuditSchema}}.atevent (tablename, tableprefix, rid_parent, rid_deletedparent, changeuser, changedate, changetime, actionx)
VALUES ('{{.TableName}}', {{.TablePrefix}}, old.{{.PrimaryKey}}, old.{{.PrimaryKey}}, m_user, CURRENT_DATE, CURRENT_TIME, 3)
RETURNING rid_atevent INTO m_atevent;
-- Record deleted column values
{{- range .DeleteColumns}}
INSERT INTO {{$.AuditSchema}}.atdetail(rid_atevent, datacolumn, changedfrom, changedto)
VALUES (m_atevent, '{{.Name}}', substr({{.OldValue}}, 1, 1000), NULL);
{{- end}}
{{- end}}
END IF;
IF (TG_OP = 'DELETE') THEN
RETURN OLD;
ELSIF (TG_OP = 'UPDATE') THEN
RETURN NEW;
ELSIF (TG_OP = 'INSERT') THEN
RETURN NEW;
END IF;
RETURN NULL;
EXCEPTION
WHEN OTHERS THEN
RAISE WARNING 'Audit function % failed: %', m_funcname, SQLERRM;
RETURN NULL;
END;
$body$
LANGUAGE plpgsql
VOLATILE
SECURITY DEFINER;
COMMENT ON FUNCTION {{.SchemaName}}.{{.FunctionName}}() IS 'Audit trigger function for table {{.SchemaName}}.{{.TableName}}';

View File

@@ -0,0 +1,49 @@
-- Audit Event Header Table
CREATE TABLE IF NOT EXISTS {{.AuditSchema}}.atevent (
rid_atevent serial PRIMARY KEY,
tablename text NOT NULL,
tableprefix text,
rid_parent integer NOT NULL,
rid_deletedparent integer,
changeuser text NOT NULL,
changedate date NOT NULL,
changetime time NOT NULL,
actionx smallint NOT NULL,
CONSTRAINT ck_atevent_action CHECK (actionx IN (1, 2, 3))
);
CREATE INDEX IF NOT EXISTS idx_atevent_tablename ON {{.AuditSchema}}.atevent(tablename);
CREATE INDEX IF NOT EXISTS idx_atevent_rid_parent ON {{.AuditSchema}}.atevent(rid_parent);
CREATE INDEX IF NOT EXISTS idx_atevent_changedate ON {{.AuditSchema}}.atevent(changedate);
CREATE INDEX IF NOT EXISTS idx_atevent_changeuser ON {{.AuditSchema}}.atevent(changeuser);
COMMENT ON TABLE {{.AuditSchema}}.atevent IS 'Audit trail header table - tracks all data changes';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.rid_atevent IS 'Audit event ID';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.tablename IS 'Name of the table that was modified';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.rid_parent IS 'Primary key value of the modified record';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.rid_deletedparent IS 'Parent reference for deleted records';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.changeuser IS 'User who made the change';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.changedate IS 'Date of change';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.changetime IS 'Time of change';
COMMENT ON COLUMN {{.AuditSchema}}.atevent.actionx IS 'Action type: 1=INSERT, 2=UPDATE, 3=DELETE';
-- Audit Event Detail Table
CREATE TABLE IF NOT EXISTS {{.AuditSchema}}.atdetail (
rid_atdetail serial PRIMARY KEY,
rid_atevent integer NOT NULL,
datacolumn text NOT NULL,
changedfrom text,
changedto text,
CONSTRAINT fk_atdetail_atevent FOREIGN KEY (rid_atevent)
REFERENCES {{.AuditSchema}}.atevent(rid_atevent) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_atdetail_rid_atevent ON {{.AuditSchema}}.atdetail(rid_atevent);
CREATE INDEX IF NOT EXISTS idx_atdetail_datacolumn ON {{.AuditSchema}}.atdetail(datacolumn);
COMMENT ON TABLE {{.AuditSchema}}.atdetail IS 'Audit trail detail table - stores individual column changes';
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.rid_atdetail IS 'Audit detail ID';
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.rid_atevent IS 'Reference to audit event';
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.datacolumn IS 'Name of the column that changed';
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.changedfrom IS 'Old value before change';
COMMENT ON COLUMN {{.AuditSchema}}.atdetail.changedto IS 'New value after change';

View File

@@ -0,0 +1,16 @@
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_trigger
WHERE tgname = '{{.TriggerName}}'
AND tgrelid = '{{.SchemaName}}.{{.TableName}}'::regclass
) THEN
CREATE TRIGGER {{.TriggerName}}
AFTER {{.Events}}
ON {{.SchemaName}}.{{.TableName}}
FOR EACH ROW
EXECUTE FUNCTION {{.SchemaName}}.{{.FunctionName}}();
END IF;
END;
$$;

View File

@@ -0,0 +1,39 @@
{{/* Base constraint template */}}
{{- define "constraint_base" -}}
ALTER TABLE {{.SchemaName}}.{{.TableName}}
ADD CONSTRAINT {{.ConstraintName}}
{{block "constraint_definition" .}}{{end}};
{{- end -}}
{{/* Drop constraint with check */}}
{{- define "drop_constraint_safe" -}}
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}'
) THEN
ALTER TABLE {{.SchemaName}}.{{.TableName}}
DROP CONSTRAINT {{.ConstraintName}};
END IF;
END;
$$;
{{- end -}}
{{/* Add constraint with existence check */}}
{{- define "add_constraint_safe" -}}
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}'
) THEN
{{template "constraint_base" .}}
END IF;
END;
$$;
{{- end -}}

View File

@@ -0,0 +1,34 @@
{{/* Base DDL template with common structure */}}
{{- define "ddl_header" -}}
-- DDL Operation: {{.Operation}}
-- Schema: {{.Schema}}
-- Object: {{.ObjectName}}
{{- end -}}
{{- define "ddl_footer" -}}
-- End of {{.Operation}}
{{- end -}}
{{/* Base ALTER TABLE structure */}}
{{- define "alter_table_base" -}}
ALTER TABLE {{.SchemaName}}.{{.TableName}}
{{block "alter_operation" .}}{{end}};
{{- end -}}
{{/* Common existence check pattern */}}
{{- define "exists_check" -}}
DO $$
BEGIN
IF NOT EXISTS (
{{block "exists_query" .}}{{end}}
) THEN
{{block "create_statement" .}}{{end}}
END IF;
END;
$$;
{{- end -}}
{{/* Common drop pattern */}}
{{- define "drop_if_exists" -}}
{{block "drop_type" .}}{{end}} IF EXISTS {{.SchemaName}}.{{.ObjectName}};
{{- end -}}

View File

@@ -0,0 +1 @@
COMMENT ON COLUMN {{.SchemaName}}.{{.TableName}}.{{.ColumnName}} IS '{{.Comment}}';

View File

@@ -0,0 +1 @@
COMMENT ON TABLE {{.SchemaName}}.{{.TableName}} IS '{{.Comment}}';

View File

@@ -0,0 +1,10 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}}
DROP CONSTRAINT IF EXISTS {{.ConstraintName}};
ALTER TABLE {{.SchemaName}}.{{.TableName}}
ADD CONSTRAINT {{.ConstraintName}}
FOREIGN KEY ({{.SourceColumns}})
REFERENCES {{.TargetSchema}}.{{.TargetTable}} ({{.TargetColumns}})
ON DELETE {{.OnDelete}}
ON UPDATE {{.OnUpdate}}
DEFERRABLE;

View File

@@ -0,0 +1,2 @@
CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{.IndexName}}
ON {{.SchemaName}}.{{.TableName}} USING {{.IndexType}} ({{.Columns}});

View File

@@ -0,0 +1,13 @@
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}'
) THEN
ALTER TABLE {{.SchemaName}}.{{.TableName}}
ADD CONSTRAINT {{.ConstraintName}} PRIMARY KEY ({{.Columns}});
END IF;
END;
$$;

View File

@@ -0,0 +1,4 @@
{{/* Example of using template inheritance for primary key creation */}}
{{/* This demonstrates how to use the base exists_check pattern */}}
{{/* Note: This is an example and not used by the actual migration writer */}}
{{/* The actual create_primary_key.tmpl is used instead */}}

View File

@@ -0,0 +1,8 @@
CREATE TABLE IF NOT EXISTS {{.SchemaName}}.{{.TableName}} (
{{- range $i, $col := .Columns}}
{{- if $i}},{{end}}
{{$col.Name}} {{$col.Type}}
{{- if $col.Default}} DEFAULT {{$col.Default}}{{end}}
{{- if $col.NotNull}} NOT NULL{{end}}
{{- end}}
);

View File

@@ -0,0 +1,9 @@
{{/* Example of table creation using composition */}}
{{- define "create_table_composed" -}}
CREATE TABLE IF NOT EXISTS {{template "qualified_table" .}} (
{{- range $i, $col := .Columns}}
{{- if $i}},{{end}}
{{template "column_definition" $col}}
{{- end}}
);
{{- end -}}

View File

@@ -0,0 +1 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}} DROP CONSTRAINT IF EXISTS {{.ConstraintName}};

View File

@@ -0,0 +1 @@
DROP INDEX IF EXISTS {{.SchemaName}}.{{.IndexName}} CASCADE;

View File

@@ -0,0 +1,45 @@
{{/* Reusable template fragments */}}
{{/* Column definition fragment */}}
{{- define "column_definition" -}}
{{.Name}} {{.Type}}
{{- if .Default}} DEFAULT {{.Default}}{{end}}
{{- if .NotNull}} NOT NULL{{end}}
{{- end -}}
{{/* Comma-separated column list */}}
{{- define "column_list" -}}
{{- range $i, $col := . -}}
{{- if $i}}, {{end}}{{$col}}
{{- end -}}
{{- end -}}
{{/* Qualified table name */}}
{{- define "qualified_table" -}}
{{.SchemaName}}.{{.TableName}}
{{- end -}}
{{/* Index method clause */}}
{{- define "index_method" -}}
{{- if .IndexType}}USING {{.IndexType}}{{end -}}
{{- end -}}
{{/* Uniqueness keyword */}}
{{- define "unique_keyword" -}}
{{- if .Unique}}UNIQUE {{end -}}
{{- end -}}
{{/* Referential action clauses */}}
{{- define "referential_actions" -}}
{{- if .OnDelete}}
ON DELETE {{.OnDelete}}
{{- end}}
{{- if .OnUpdate}}
ON UPDATE {{.OnUpdate}}
{{- end}}
{{- end -}}
{{/* Comment statement */}}
{{- define "comment_on" -}}
COMMENT ON {{.ObjectType}} {{.ObjectName}} IS {{quote .Comment}};
{{- end -}}