Added a scripts execution ability
This commit is contained in:
@@ -18,4 +18,5 @@ JSON, YAML, SQL, etc.).`,
|
||||
func init() {
|
||||
rootCmd.AddCommand(convertCmd)
|
||||
rootCmd.AddCommand(diffCmd)
|
||||
rootCmd.AddCommand(scriptsCmd)
|
||||
}
|
||||
|
||||
263
cmd/relspec/scripts.go
Normal file
263
cmd/relspec/scripts.go
Normal file
@@ -0,0 +1,263 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"sort"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
var (
|
||||
scriptsDir string
|
||||
scriptsConn string
|
||||
scriptsSchemaName string
|
||||
scriptsDBName string
|
||||
)
|
||||
|
||||
var scriptsCmd = &cobra.Command{
|
||||
Use: "scripts",
|
||||
Short: "Manage and execute SQL migration scripts",
|
||||
Long: `Manage and execute SQL migration scripts from a directory.
|
||||
|
||||
Scripts must follow the naming pattern (both separators supported):
|
||||
{priority}_{sequence}_{name}.sql or .pgsql
|
||||
{priority}-{sequence}-{name}.sql or .pgsql
|
||||
|
||||
Example filenames (underscore format):
|
||||
1_001_create_users.sql # Priority 1, Sequence 1
|
||||
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||
2_001_add_indexes.pgsql # Priority 2, Sequence 1
|
||||
|
||||
Example filenames (hyphen format):
|
||||
1-001-create-users.sql # Priority 1, Sequence 1
|
||||
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||
|
||||
Both formats can be mixed in the same directory.
|
||||
Scripts are executed in order: Priority (ascending), then Sequence (ascending).`,
|
||||
}
|
||||
|
||||
var scriptsListCmd = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List SQL scripts from a directory",
|
||||
Long: `List SQL scripts from a directory and show their execution order.
|
||||
|
||||
The scripts are read from the specified directory and displayed in the order
|
||||
they would be executed (Priority ascending, then Sequence ascending).
|
||||
|
||||
Example:
|
||||
relspec scripts list --dir ./migrations`,
|
||||
RunE: runScriptsList,
|
||||
}
|
||||
|
||||
var scriptsExecuteCmd = &cobra.Command{
|
||||
Use: "execute",
|
||||
Short: "Execute SQL scripts against a database",
|
||||
Long: `Execute SQL scripts from a directory against a PostgreSQL database.
|
||||
|
||||
Scripts are executed in order: Priority (ascending), then Sequence (ascending).
|
||||
Execution stops immediately on the first error.
|
||||
|
||||
The directory is scanned recursively for files matching the patterns:
|
||||
{priority}_{sequence}_{name}.sql or .pgsql (underscore format)
|
||||
{priority}-{sequence}-{name}.sql or .pgsql (hyphen format)
|
||||
|
||||
PostgreSQL Connection String Examples:
|
||||
postgres://username:password@localhost:5432/database_name
|
||||
postgres://username:password@localhost/database_name
|
||||
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||
postgresql://user:pass@host/dbname?sslmode=require
|
||||
|
||||
Examples:
|
||||
# Execute migration scripts
|
||||
relspec scripts execute --dir ./migrations \
|
||||
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||
|
||||
# Execute with custom schema name
|
||||
relspec scripts execute --dir ./migrations \
|
||||
--conn "postgres://localhost/mydb" \
|
||||
--schema public
|
||||
|
||||
# Execute with SSL disabled
|
||||
relspec scripts execute --dir ./sql \
|
||||
--conn "postgres://user:pass@localhost/db?sslmode=disable"`,
|
||||
RunE: runScriptsExecute,
|
||||
}
|
||||
|
||||
func init() {
|
||||
// List command flags
|
||||
scriptsListCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||
scriptsListCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||
scriptsListCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||
err := scriptsListCmd.MarkFlagRequired("dir")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||
}
|
||||
|
||||
// Execute command flags
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||
|
||||
err = scriptsExecuteCmd.MarkFlagRequired("dir")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||
}
|
||||
err = scriptsExecuteCmd.MarkFlagRequired("conn")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking conn flag as required: %v\n", err)
|
||||
}
|
||||
|
||||
// Add subcommands to scripts command
|
||||
scriptsCmd.AddCommand(scriptsListCmd)
|
||||
scriptsCmd.AddCommand(scriptsExecuteCmd)
|
||||
}
|
||||
|
||||
func runScriptsList(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts List ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Directory: %s\n\n", scriptsDir)
|
||||
|
||||
// Read scripts from directory
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: scriptsDir,
|
||||
Metadata: map[string]any{
|
||||
"schema_name": scriptsSchemaName,
|
||||
"database_name": scriptsDBName,
|
||||
},
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read scripts: %w", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No schemas found\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No SQL scripts found matching pattern {priority}_{sequence}_{name}.sql\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Sort scripts by Priority then Sequence
|
||||
sortedScripts := make([]*struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
sqlLines int
|
||||
}, len(schema.Scripts))
|
||||
|
||||
for i, script := range schema.Scripts {
|
||||
// Count non-empty lines in SQL
|
||||
sqlLines := 0
|
||||
for _, line := range []byte(script.SQL) {
|
||||
if line == '\n' {
|
||||
sqlLines++
|
||||
}
|
||||
}
|
||||
if len(script.SQL) > 0 {
|
||||
sqlLines++ // Count last line if no trailing newline
|
||||
}
|
||||
|
||||
sortedScripts[i] = &struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
sqlLines int
|
||||
}{
|
||||
name: script.Name,
|
||||
priority: script.Priority,
|
||||
sequence: script.Sequence,
|
||||
sqlLines: sqlLines,
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||
if sortedScripts[i].priority != sortedScripts[j].priority {
|
||||
return sortedScripts[i].priority < sortedScripts[j].priority
|
||||
}
|
||||
return sortedScripts[i].sequence < sortedScripts[j].sequence
|
||||
})
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Found %d script(s) in execution order:\n\n", len(sortedScripts))
|
||||
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "No.", "Priority", "Sequence", "Name", "Lines")
|
||||
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "----", "--------", "--------", "------------------------------", "-----")
|
||||
|
||||
for i, script := range sortedScripts {
|
||||
fmt.Fprintf(os.Stderr, "%-4d %-10d %-8d %-30s %d\n",
|
||||
i+1,
|
||||
script.priority,
|
||||
script.sequence,
|
||||
script.name,
|
||||
script.sqlLines,
|
||||
)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
func runScriptsExecute(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts Execution ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Started at: %s\n", getCurrentTimestamp())
|
||||
fmt.Fprintf(os.Stderr, "Directory: %s\n", scriptsDir)
|
||||
fmt.Fprintf(os.Stderr, "Database: %s\n\n", maskPassword(scriptsConn))
|
||||
|
||||
// Step 1: Read scripts from directory
|
||||
fmt.Fprintf(os.Stderr, "[1/2] Reading SQL scripts...\n")
|
||||
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: scriptsDir,
|
||||
Metadata: map[string]any{
|
||||
"schema_name": scriptsSchemaName,
|
||||
"database_name": scriptsDBName,
|
||||
},
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read scripts: %w", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return fmt.Errorf("no schemas found")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) == 0 {
|
||||
fmt.Fprintf(os.Stderr, " No scripts found. Nothing to execute.\n\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Found %d script(s)\n\n", len(schema.Scripts))
|
||||
|
||||
// Step 2: Execute scripts
|
||||
fmt.Fprintf(os.Stderr, "[2/2] Executing scripts in order (Priority → Sequence)...\n\n")
|
||||
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": scriptsConn,
|
||||
},
|
||||
})
|
||||
|
||||
if err := writer.WriteSchema(schema); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "\n")
|
||||
return fmt.Errorf("execution failed: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||
fmt.Fprintf(os.Stderr, "Successfully executed %d script(s)\n\n", len(schema.Scripts))
|
||||
|
||||
return nil
|
||||
}
|
||||
360
docs/SCRIPTS_COMMAND.md
Normal file
360
docs/SCRIPTS_COMMAND.md
Normal file
@@ -0,0 +1,360 @@
|
||||
# RelSpec Scripts Command
|
||||
|
||||
The `relspec scripts` command provides tools for managing and executing SQL migration scripts from a directory structure.
|
||||
|
||||
## Overview
|
||||
|
||||
The scripts command supports two main operations:
|
||||
- **list**: List SQL scripts from a directory in execution order
|
||||
- **execute**: Execute SQL scripts against a PostgreSQL database
|
||||
|
||||
Scripts are read from a directory (recursively) and executed in a deterministic order based on **Priority** (ascending) and **Sequence** (ascending).
|
||||
|
||||
## File Naming Convention
|
||||
|
||||
SQL scripts must follow this naming pattern (both separators are supported):
|
||||
|
||||
```
|
||||
{priority}_{sequence}_{name}.{sql|pgsql} (underscore format)
|
||||
{priority}-{sequence}-{name}.{sql|pgsql} (hyphen format)
|
||||
```
|
||||
|
||||
### Components
|
||||
|
||||
- **priority**: Integer (0-9999) - Execution priority level (lower executes first)
|
||||
- **sequence**: Integer (0-9999) - Order within priority level (lower executes first)
|
||||
- **separator**: Underscore `_` or hyphen `-` (both formats can be mixed)
|
||||
- **name**: Descriptive name (alphanumeric, underscores, hyphens)
|
||||
- **extension**: `.sql` or `.pgsql`
|
||||
|
||||
### Valid Examples
|
||||
|
||||
**Underscore format:**
|
||||
```
|
||||
1_001_create_users.sql # Priority 1, Sequence 1
|
||||
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||
1_003_create_comments.pgsql # Priority 1, Sequence 3
|
||||
2_001_add_indexes.sql # Priority 2, Sequence 1
|
||||
2_002_add_constraints.sql # Priority 2, Sequence 2
|
||||
3_001_seed_users.sql # Priority 3, Sequence 1
|
||||
```
|
||||
|
||||
**Hyphen format:**
|
||||
```
|
||||
1-001-create-users.sql # Priority 1, Sequence 1
|
||||
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||
1-003-create-comments.pgsql # Priority 1, Sequence 3
|
||||
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||
```
|
||||
|
||||
**Mixed format (both in same directory):**
|
||||
```
|
||||
1_001_create_users.sql # Priority 1, Sequence 1 (underscore)
|
||||
1-002-create-posts.sql # Priority 1, Sequence 2 (hyphen)
|
||||
2_001_add_indexes.sql # Priority 2, Sequence 1 (underscore)
|
||||
```
|
||||
|
||||
**Execution Order**: 1→2→3→4→5→6 (sorted by Priority, then Sequence)
|
||||
|
||||
### Invalid Examples (Will be ignored)
|
||||
|
||||
```
|
||||
migration.sql # Missing priority/sequence
|
||||
create_users.sql # Missing priority/sequence
|
||||
1_create_users.sql # Missing sequence
|
||||
1_001_test.txt # Wrong extension
|
||||
README.md # Not a SQL file
|
||||
```
|
||||
|
||||
## Directory Structure
|
||||
|
||||
Scripts can be organized in subdirectories. The scanner recursively finds all matching SQL files:
|
||||
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_schema.sql
|
||||
├── 1_002_create_users.sql
|
||||
├── tables/
|
||||
│ ├── 1_003_create_posts.sql
|
||||
│ └── 1_004_create_comments.pgsql
|
||||
├── indexes/
|
||||
│ └── 2_001_add_indexes.sql
|
||||
└── data/
|
||||
└── 3_001_seed_data.sql
|
||||
```
|
||||
|
||||
All files will be found and executed in Priority→Sequence order regardless of directory structure.
|
||||
|
||||
## Commands
|
||||
|
||||
### relspec scripts list
|
||||
|
||||
List all SQL scripts in a directory and show their execution order.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
relspec scripts list --dir <directory> [flags]
|
||||
```
|
||||
|
||||
**Flags:**
|
||||
- `--dir <path>` (required): Directory containing SQL scripts
|
||||
- `--schema <name>`: Schema name (default: "public")
|
||||
- `--database <name>`: Database name (default: "database")
|
||||
|
||||
**Example:**
|
||||
```bash
|
||||
relspec scripts list --dir ./migrations
|
||||
```
|
||||
|
||||
**Output:**
|
||||
```
|
||||
=== SQL Scripts List ===
|
||||
Directory: ./migrations
|
||||
|
||||
Found 5 script(s) in execution order:
|
||||
|
||||
No. Priority Sequence Name Lines
|
||||
---- -------- -------- ------------------------------ -----
|
||||
1 1 1 create_users 7
|
||||
2 1 2 create_posts 8
|
||||
3 2 1 add_indexes 4
|
||||
4 2 2 add_constraints 6
|
||||
5 3 1 seed_data 4
|
||||
```
|
||||
|
||||
### relspec scripts execute
|
||||
|
||||
Execute SQL scripts from a directory against a PostgreSQL database.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
relspec scripts execute --dir <directory> --conn <connection-string> [flags]
|
||||
```
|
||||
|
||||
**Flags:**
|
||||
- `--dir <path>` (required): Directory containing SQL scripts
|
||||
- `--conn <string>` (required): PostgreSQL connection string
|
||||
- `--schema <name>`: Schema name (default: "public")
|
||||
- `--database <name>`: Database name (default: "database")
|
||||
|
||||
**Connection String Formats:**
|
||||
|
||||
```bash
|
||||
# Standard PostgreSQL URLs
|
||||
postgres://username:password@localhost:5432/database_name
|
||||
postgres://username:password@localhost/database_name
|
||||
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||
postgresql://user:pass@host/dbname?sslmode=require
|
||||
|
||||
# Key-value format
|
||||
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||
```
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Execute migration scripts
|
||||
relspec scripts execute \
|
||||
--dir ./migrations \
|
||||
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||
|
||||
# Execute with custom schema
|
||||
relspec scripts execute \
|
||||
--dir ./migrations \
|
||||
--conn "postgres://localhost/mydb" \
|
||||
--schema public
|
||||
|
||||
# Execute with SSL disabled
|
||||
relspec scripts execute \
|
||||
--dir ./sql \
|
||||
--conn "postgres://user:pass@localhost/db?sslmode=disable"
|
||||
|
||||
# Execute using key-value connection string
|
||||
relspec scripts execute \
|
||||
--dir ./migrations \
|
||||
--conn "host=localhost port=5432 user=admin password=secret dbname=prod"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
```
|
||||
=== SQL Scripts Execution ===
|
||||
Started at: 2025-12-30 22:30:15
|
||||
Directory: ./migrations
|
||||
Database: postgres://user:***@localhost:5432/mydb
|
||||
|
||||
[1/2] Reading SQL scripts...
|
||||
✓ Found 4 script(s)
|
||||
|
||||
[2/2] Executing scripts in order (Priority → Sequence)...
|
||||
|
||||
Executing script: create_users (Priority=1, Sequence=1)
|
||||
✓ Successfully executed: create_users
|
||||
Executing script: create_posts (Priority=1, Sequence=2)
|
||||
✓ Successfully executed: create_posts
|
||||
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||
✓ Successfully executed: add_indexes
|
||||
Executing script: seed_data (Priority=2, Sequence=2)
|
||||
✓ Successfully executed: seed_data
|
||||
|
||||
=== Execution Complete ===
|
||||
Completed at: 2025-12-30 22:30:16
|
||||
Successfully executed 4 script(s)
|
||||
```
|
||||
|
||||
## Execution Behavior
|
||||
|
||||
### Execution Order
|
||||
|
||||
Scripts are **always** executed in this order:
|
||||
1. Sort by **Priority** (ascending)
|
||||
2. Within same priority, sort by **Sequence** (ascending)
|
||||
|
||||
Example:
|
||||
```
|
||||
Priority 1, Sequence 1 → Executes 1st
|
||||
Priority 1, Sequence 2 → Executes 2nd
|
||||
Priority 1, Sequence 10 → Executes 3rd
|
||||
Priority 2, Sequence 1 → Executes 4th
|
||||
Priority 2, Sequence 5 → Executes 5th
|
||||
Priority 10, Sequence 1 → Executes 6th
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
- **Stop on First Error**: Execution stops immediately when any script fails
|
||||
- **No Automatic Rollback**: Scripts executed before the failure remain committed
|
||||
- **Error Details**: Full error message with script name, priority, and sequence
|
||||
|
||||
Example error output:
|
||||
```
|
||||
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||
Error: execution failed: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||
ERROR: syntax error at or near "IDNEX" (SQLSTATE 42601)
|
||||
```
|
||||
|
||||
### Transaction Behavior
|
||||
|
||||
- Each script executes in its own implicit transaction (PostgreSQL default)
|
||||
- No automatic transaction wrapping across multiple scripts
|
||||
- For atomic migrations, manually wrap SQL in `BEGIN/COMMIT` blocks
|
||||
|
||||
### Empty Scripts
|
||||
|
||||
Scripts with empty SQL content are silently skipped.
|
||||
|
||||
## Use Cases
|
||||
|
||||
### Development Migrations
|
||||
|
||||
Organize database changes by priority levels:
|
||||
|
||||
```
|
||||
migrations/
|
||||
├── 1_xxx_schema.sql # Priority 1: Core schema
|
||||
├── 1_xxx_tables.sql
|
||||
├── 2_xxx_indexes.sql # Priority 2: Performance
|
||||
├── 2_xxx_constraints.sql
|
||||
└── 3_xxx_seed.sql # Priority 3: Data
|
||||
```
|
||||
|
||||
### Multi-Environment Deployments
|
||||
|
||||
Use priority levels for environment-specific scripts:
|
||||
|
||||
```
|
||||
deploy/
|
||||
├── 1_xxx_core_schema.sql # Priority 1: All environments
|
||||
├── 2_xxx_dev_data.sql # Priority 2: Dev only
|
||||
├── 2_xxx_staging_data.sql # Priority 2: Staging only
|
||||
└── 3_xxx_prod_data.sql # Priority 3: Production only
|
||||
```
|
||||
|
||||
### Incremental Rollouts
|
||||
|
||||
Use sequence for ordered feature rollouts:
|
||||
|
||||
```
|
||||
features/
|
||||
├── 1_001_feature_a_schema.sql
|
||||
├── 1_002_feature_a_data.sql
|
||||
├── 1_003_feature_b_schema.sql
|
||||
├── 1_004_feature_b_data.sql
|
||||
```
|
||||
|
||||
## Integration with RelSpec
|
||||
|
||||
The scripts command uses:
|
||||
- **Reader**: `pkg/readers/sqldir/` - Reads SQL files into `models.Schema.Scripts`
|
||||
- **Writer**: `pkg/writers/sqlexec/` - Executes scripts from `models.Schema.Scripts`
|
||||
|
||||
You can use these packages programmatically:
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
// Read scripts
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "./migrations",
|
||||
})
|
||||
db, _ := reader.ReadDatabase()
|
||||
|
||||
// Execute scripts
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/mydb",
|
||||
},
|
||||
})
|
||||
writer.WriteDatabase(db)
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Naming
|
||||
|
||||
- Use zero-padded sequences: `001`, `002`, `010` (not `1`, `2`, `10`)
|
||||
- Use descriptive names: `create_users_table`, not `table1`
|
||||
- Group related changes: same priority for related DDL
|
||||
|
||||
### Organization
|
||||
|
||||
- Keep scripts small and focused (one logical change per file)
|
||||
- Use priority levels to organize phases (schema → indexes → data)
|
||||
- Document complex migrations with SQL comments
|
||||
|
||||
### Safety
|
||||
|
||||
- Always test migrations in development first
|
||||
- Use `scripts list` to verify execution order before running
|
||||
- Back up production databases before executing
|
||||
- Consider using transactions for critical changes
|
||||
- Review generated SQL before execution
|
||||
|
||||
### Version Control
|
||||
|
||||
- Commit scripts to version control
|
||||
- Never modify executed scripts (create new ones instead)
|
||||
- Use meaningful commit messages
|
||||
- Tag releases with migration checkpoints
|
||||
|
||||
## Limitations
|
||||
|
||||
- PostgreSQL only (currently)
|
||||
- No built-in rollback support
|
||||
- No migration state tracking (no "already executed" detection)
|
||||
- No dry-run mode
|
||||
- Stops on first error (no partial execution tracking)
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
Potential future features:
|
||||
- Migration state tracking (executed scripts table)
|
||||
- Rollback script support (using `models.Script.Rollback` field)
|
||||
- Dry-run mode (validate without executing)
|
||||
- Transaction wrapping (all-or-nothing execution)
|
||||
- Multi-database support (MySQL, SQLite, etc.)
|
||||
- Parallel execution for independent scripts
|
||||
393
docs/SCRIPTS_EXAMPLES.md
Normal file
393
docs/SCRIPTS_EXAMPLES.md
Normal file
@@ -0,0 +1,393 @@
|
||||
# RelSpec Scripts Command - Quick Examples
|
||||
|
||||
## Basic Workflow
|
||||
|
||||
### 1. Create migration directory structure
|
||||
|
||||
```bash
|
||||
mkdir -p migrations
|
||||
```
|
||||
|
||||
### 2. Create migration scripts
|
||||
|
||||
Both underscore and hyphen formats are supported. Examples below use underscore format,
|
||||
but you can also use: `1-001-create-users-table.sql`
|
||||
|
||||
```bash
|
||||
# Priority 1: Core schema
|
||||
cat > migrations/1_001_create_users_table.sql << 'EOF'
|
||||
CREATE TABLE users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(100) NOT NULL UNIQUE,
|
||||
email VARCHAR(255) NOT NULL UNIQUE,
|
||||
password_hash VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX idx_users_username ON users(username);
|
||||
CREATE INDEX idx_users_email ON users(email);
|
||||
EOF
|
||||
|
||||
cat > migrations/1_002_create_posts_table.sql << 'EOF'
|
||||
CREATE TABLE posts (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
title VARCHAR(200) NOT NULL,
|
||||
content TEXT,
|
||||
published BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
EOF
|
||||
|
||||
# Priority 2: Additional indexes
|
||||
cat > migrations/2_001_add_post_indexes.sql << 'EOF'
|
||||
CREATE INDEX idx_posts_user_id ON posts(user_id);
|
||||
CREATE INDEX idx_posts_published ON posts(published);
|
||||
CREATE INDEX idx_posts_created_at ON posts(created_at);
|
||||
EOF
|
||||
|
||||
# Priority 3: Seed data
|
||||
cat > migrations/3_001_seed_admin_user.sql << 'EOF'
|
||||
INSERT INTO users (username, email, password_hash)
|
||||
VALUES ('admin', 'admin@example.com', 'hashed_password_here')
|
||||
ON CONFLICT (username) DO NOTHING;
|
||||
EOF
|
||||
```
|
||||
|
||||
### 3. List scripts to verify order
|
||||
|
||||
```bash
|
||||
relspec scripts list --dir migrations
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
=== SQL Scripts List ===
|
||||
Directory: migrations
|
||||
|
||||
Found 4 script(s) in execution order:
|
||||
|
||||
No. Priority Sequence Name Lines
|
||||
---- -------- -------- ------------------------------ -----
|
||||
1 1 1 create_users_table 13
|
||||
2 1 2 create_posts_table 11
|
||||
3 2 1 add_post_indexes 4
|
||||
4 3 1 seed_admin_user 4
|
||||
```
|
||||
|
||||
### 4. Execute against database
|
||||
|
||||
```bash
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://myuser:mypass@localhost:5432/myapp"
|
||||
```
|
||||
|
||||
## Real-World Examples
|
||||
|
||||
### Example 1: E-commerce Database Setup
|
||||
|
||||
```bash
|
||||
# Directory structure
|
||||
migrations/
|
||||
├── 1_001_create_users.sql
|
||||
├── 1_002_create_products.sql
|
||||
├── 1_003_create_orders.sql
|
||||
├── 1_004_create_order_items.sql
|
||||
├── 2_001_add_indexes.sql
|
||||
├── 2_002_add_constraints.sql
|
||||
├── 3_001_seed_categories.sql
|
||||
└── 3_002_seed_sample_products.sql
|
||||
|
||||
# Execute
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://ecommerce_user:pass@db.example.com:5432/ecommerce_prod?sslmode=require"
|
||||
```
|
||||
|
||||
### Example 2: Multi-Schema Database
|
||||
|
||||
```bash
|
||||
# Organize by schema using subdirectories
|
||||
migrations/
|
||||
├── public/
|
||||
│ ├── 1_001_create_users.sql
|
||||
│ └── 1_002_create_sessions.sql
|
||||
├── analytics/
|
||||
│ ├── 1_001_create_events.sql
|
||||
│ └── 2_001_create_views.sql
|
||||
└── reporting/
|
||||
└── 1_001_create_reports.sql
|
||||
|
||||
# Execute (all schemas processed together)
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://localhost/multi_schema_db" \
|
||||
--schema public
|
||||
```
|
||||
|
||||
### Example 3: Development Environment Setup
|
||||
|
||||
```bash
|
||||
# Create local development database
|
||||
createdb myapp_dev
|
||||
|
||||
# Run migrations
|
||||
relspec scripts execute \
|
||||
--dir ./db/migrations \
|
||||
--conn "postgres://localhost/myapp_dev?sslmode=disable"
|
||||
|
||||
# Verify
|
||||
psql myapp_dev -c "\dt"
|
||||
```
|
||||
|
||||
### Example 4: CI/CD Pipeline
|
||||
|
||||
```yaml
|
||||
# .github/workflows/deploy.yml
|
||||
- name: Run database migrations
|
||||
run: |
|
||||
relspec scripts list --dir migrations
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "${{ secrets.DATABASE_URL }}"
|
||||
```
|
||||
|
||||
### Example 5: Docker Compose Integration
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
environment:
|
||||
POSTGRES_DB: myapp
|
||||
POSTGRES_USER: myuser
|
||||
POSTGRES_PASSWORD: mypass
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
migrate:
|
||||
image: relspec:latest
|
||||
depends_on:
|
||||
- postgres
|
||||
volumes:
|
||||
- ./migrations:/migrations
|
||||
command: >
|
||||
scripts execute
|
||||
--dir /migrations
|
||||
--conn "postgres://myuser:mypass@postgres:5432/myapp"
|
||||
```
|
||||
|
||||
```bash
|
||||
# Run migrations with docker-compose
|
||||
docker-compose up -d postgres
|
||||
sleep 5 # Wait for postgres to be ready
|
||||
docker-compose run --rm migrate
|
||||
```
|
||||
|
||||
### Example 6: Incremental Feature Rollout
|
||||
|
||||
```bash
|
||||
# Feature branch structure
|
||||
migrations/
|
||||
├── 1_100_user_profiles_schema.sql # Feature: User profiles
|
||||
├── 1_101_user_profiles_constraints.sql
|
||||
├── 1_102_user_profiles_indexes.sql
|
||||
├── 2_100_notifications_schema.sql # Feature: Notifications
|
||||
├── 2_101_notifications_constraints.sql
|
||||
└── 2_102_notifications_indexes.sql
|
||||
|
||||
# Deploy just user profiles (Priority 1)
|
||||
# Then later deploy notifications (Priority 2)
|
||||
```
|
||||
|
||||
### Example 7: Rollback Strategy (Manual)
|
||||
|
||||
```bash
|
||||
# Forward migration
|
||||
cat > migrations/1_001_add_column.sql << 'EOF'
|
||||
ALTER TABLE users ADD COLUMN phone VARCHAR(20);
|
||||
EOF
|
||||
|
||||
# Create manual rollback script (not auto-executed)
|
||||
cat > rollbacks/1_001_remove_column.sql << 'EOF'
|
||||
ALTER TABLE users DROP COLUMN phone;
|
||||
EOF
|
||||
|
||||
# If needed, manually execute rollback
|
||||
psql myapp -f rollbacks/1_001_remove_column.sql
|
||||
```
|
||||
|
||||
### Example 8: Complex Schema Changes
|
||||
|
||||
```bash
|
||||
# migrations/1_001_alter_users_table.sql
|
||||
BEGIN;
|
||||
|
||||
-- Add new column
|
||||
ALTER TABLE users ADD COLUMN full_name VARCHAR(200);
|
||||
|
||||
-- Populate from existing data
|
||||
UPDATE users SET full_name = username WHERE full_name IS NULL;
|
||||
|
||||
-- Make it required
|
||||
ALTER TABLE users ALTER COLUMN full_name SET NOT NULL;
|
||||
|
||||
-- Add index
|
||||
CREATE INDEX idx_users_full_name ON users(full_name);
|
||||
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
Execute:
|
||||
```bash
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://localhost/myapp"
|
||||
```
|
||||
|
||||
## File Naming Format Examples
|
||||
|
||||
### Underscore Format (Traditional)
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_users.sql
|
||||
├── 1_002_create_posts.sql
|
||||
├── 2_001_add_indexes.sql
|
||||
└── 3_001_seed_data.sql
|
||||
```
|
||||
|
||||
### Hyphen Format (Alternative)
|
||||
```
|
||||
migrations/
|
||||
├── 1-001-create-users.sql
|
||||
├── 1-002-create-posts.sql
|
||||
├── 10-10-create-newid.pgsql
|
||||
└── 2-001-add-indexes.sql
|
||||
```
|
||||
|
||||
### Mixed Format (Both in Same Directory)
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_users.sql # Underscore format
|
||||
├── 1-002-create-posts.sql # Hyphen format
|
||||
├── 2_001_add_indexes.sql # Underscore format
|
||||
└── 10-10-special-migration.pgsql # Hyphen format
|
||||
```
|
||||
|
||||
**Note:** All three approaches work identically - use whichever naming style you prefer!
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Pattern 1: Schema → Indexes → Constraints → Data
|
||||
|
||||
```
|
||||
1_xxx_*.sql # Tables and basic structure
|
||||
2_xxx_*.sql # Indexes for performance
|
||||
3_xxx_*.sql # Foreign keys and constraints
|
||||
4_xxx_*.sql # Seed/reference data
|
||||
```
|
||||
|
||||
### Pattern 2: Feature-Based Organization
|
||||
|
||||
```
|
||||
1_001_feature_auth_users.sql
|
||||
1_002_feature_auth_sessions.sql
|
||||
1_003_feature_auth_permissions.sql
|
||||
2_001_feature_blog_posts.sql
|
||||
2_002_feature_blog_comments.sql
|
||||
3_001_feature_payments_transactions.sql
|
||||
```
|
||||
|
||||
### Pattern 3: Date-Based Versioning
|
||||
|
||||
```
|
||||
1_20250130_create_users.sql
|
||||
2_20250131_add_user_indexes.sql
|
||||
3_20250201_create_posts.sql
|
||||
```
|
||||
|
||||
### Pattern 4: Environment-Specific Scripts
|
||||
|
||||
```bash
|
||||
# Base migrations (all environments)
|
||||
migrations/base/
|
||||
├── 1_001_create_users.sql
|
||||
├── 1_002_create_products.sql
|
||||
|
||||
# Development-specific
|
||||
migrations/dev/
|
||||
└── 9_001_seed_test_data.sql
|
||||
|
||||
# Production-specific
|
||||
migrations/prod/
|
||||
└── 9_001_seed_production_config.sql
|
||||
|
||||
# Execute different paths based on environment
|
||||
ENV=dev
|
||||
relspec scripts execute \
|
||||
--dir migrations/base \
|
||||
--conn "postgres://localhost/myapp_${ENV}"
|
||||
|
||||
relspec scripts execute \
|
||||
--dir migrations/${ENV} \
|
||||
--conn "postgres://localhost/myapp_${ENV}"
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Check script order before execution
|
||||
```bash
|
||||
relspec scripts list --dir migrations
|
||||
```
|
||||
|
||||
### Test against local database first
|
||||
```bash
|
||||
# Create test database
|
||||
createdb myapp_test
|
||||
|
||||
# Test migrations
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://localhost/myapp_test"
|
||||
|
||||
# Inspect results
|
||||
psql myapp_test
|
||||
|
||||
# Cleanup
|
||||
dropdb myapp_test
|
||||
```
|
||||
|
||||
### Validate SQL syntax
|
||||
```bash
|
||||
# Use PostgreSQL to check syntax without executing
|
||||
for f in migrations/*.sql; do
|
||||
echo "Checking $f..."
|
||||
psql myapp -c "BEGIN; \i $f; ROLLBACK;" --single-transaction
|
||||
done
|
||||
```
|
||||
|
||||
### Debug connection issues
|
||||
```bash
|
||||
# Test connection string
|
||||
psql "postgres://user:pass@localhost:5432/myapp"
|
||||
|
||||
# If that works, use the same string for relspec
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://user:pass@localhost:5432/myapp"
|
||||
```
|
||||
|
||||
## Tips
|
||||
|
||||
1. **Always review execution order** with `list` before running `execute`
|
||||
2. **Test in development** before running against production
|
||||
3. **Use zero-padded sequences** (001, 002, not 1, 2) for consistent sorting
|
||||
4. **Keep scripts idempotent** when possible (use IF NOT EXISTS, ON CONFLICT, etc.)
|
||||
5. **Back up production** before running migrations
|
||||
6. **Use transactions** for complex multi-statement migrations
|
||||
7. **Document breaking changes** with SQL comments in the migration files
|
||||
8. **Version control everything** - commit migrations with code changes
|
||||
160
pkg/readers/sqldir/README.md
Normal file
160
pkg/readers/sqldir/README.md
Normal file
@@ -0,0 +1,160 @@
|
||||
# SQL Directory Reader
|
||||
|
||||
The SQL Directory Reader (`sqldir`) reads SQL scripts from a directory structure and populates the `Scripts` field of a `Schema`. It supports recursive directory scanning and extracts priority, sequence, and name information from filenames.
|
||||
|
||||
## File Naming Convention
|
||||
|
||||
Scripts must follow this naming pattern (supports both underscores and hyphens as separators):
|
||||
|
||||
```
|
||||
{priority}_{sequence}_{name}.{sql|pgsql}
|
||||
{priority}-{sequence}-{name}.{sql|pgsql}
|
||||
```
|
||||
|
||||
### Components
|
||||
|
||||
- **priority**: Integer (0-9999) - Defines execution order (lower executes first)
|
||||
- **sequence**: Integer (0-9999) - Defines order within the same priority level
|
||||
- **separator**: Underscore `_` or hyphen `-` (can be mixed)
|
||||
- **name**: Descriptive name (alphanumeric, underscores, hyphens allowed)
|
||||
- **extension**: `.sql` or `.pgsql`
|
||||
|
||||
### Examples
|
||||
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_schema.sql # Priority 1, Sequence 1 (underscore format)
|
||||
├── 1-002-create-users-table.sql # Priority 1, Sequence 2 (hyphen format)
|
||||
├── 1_003_create_posts_table.pgsql # Priority 1, Sequence 3 (underscore format)
|
||||
├── 2-001-add-indexes.sql # Priority 2, Sequence 1 (hyphen format)
|
||||
├── 2_002_add_constraints.sql # Priority 2, Sequence 2 (underscore format)
|
||||
├── 10-10-create-newid.pgsql # Priority 10, Sequence 10 (hyphen format)
|
||||
└── subdirectory/
|
||||
└── 3_001_seed_data.sql # Priority 3, Sequence 1 (subdirs supported)
|
||||
```
|
||||
|
||||
**Execution Order**: 1→2→3→4→5→6→7 (sorted by Priority ascending, then Sequence ascending)
|
||||
|
||||
**Both formats can be mixed** in the same directory - the reader handles both seamlessly.
|
||||
|
||||
### Invalid Filenames (Ignored)
|
||||
|
||||
- `migration.sql` - Missing priority/sequence
|
||||
- `1_create_users.sql` - Missing sequence
|
||||
- `create_users.sql` - Missing priority/sequence
|
||||
- `1_001_test.txt` - Wrong extension
|
||||
- `readme.md` - Not a SQL file
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
)
|
||||
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/path/to/migrations",
|
||||
Metadata: map[string]any{
|
||||
"schema_name": "public", // Optional, defaults to "public"
|
||||
"database_name": "myapp", // Optional, defaults to "database"
|
||||
},
|
||||
})
|
||||
|
||||
// Read all scripts
|
||||
database, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Access scripts
|
||||
for _, schema := range database.Schemas {
|
||||
for _, script := range schema.Scripts {
|
||||
fmt.Printf("Script: %s (P:%d S:%d)\n",
|
||||
script.Name, script.Priority, script.Sequence)
|
||||
fmt.Printf("SQL: %s\n", script.SQL)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Read Schema Only
|
||||
|
||||
```go
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d scripts\n", len(schema.Scripts))
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- **Recursive Directory Scanning**: Automatically scans all subdirectories
|
||||
- **Multiple Extensions**: Supports both `.sql` and `.pgsql` files
|
||||
- **Flexible Naming**: Extract metadata from filename patterns
|
||||
- **Error Handling**: Validates directory existence and file accessibility
|
||||
- **Schema Integration**: Scripts are added to the standard RelSpec `Schema` model
|
||||
|
||||
## Script Model
|
||||
|
||||
Each script is stored as a `models.Script`:
|
||||
|
||||
```go
|
||||
type Script struct {
|
||||
Name string // Extracted from filename (e.g., "create_users")
|
||||
Description string // Auto-generated description with file path
|
||||
SQL string // Complete SQL content from file
|
||||
Priority int // Execution priority from filename
|
||||
Sequence uint // Execution sequence from filename
|
||||
// ... other fields available but not populated by this reader
|
||||
}
|
||||
```
|
||||
|
||||
## Integration with SQL Executor
|
||||
|
||||
The SQL Directory Reader is designed to work seamlessly with the SQL Executor Writer:
|
||||
|
||||
```go
|
||||
// Read scripts
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "./migrations",
|
||||
})
|
||||
db, _ := reader.ReadDatabase()
|
||||
|
||||
// Execute scripts
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/mydb",
|
||||
},
|
||||
})
|
||||
writer.WriteDatabase(db) // Executes in Priority→Sequence order
|
||||
```
|
||||
|
||||
See `pkg/writers/sqlexec/README.md` for more details on script execution.
|
||||
|
||||
## Error Handling
|
||||
|
||||
The reader will return errors for:
|
||||
- Non-existent directory paths
|
||||
- Inaccessible directories or files
|
||||
- Invalid file permissions
|
||||
- File read failures
|
||||
|
||||
Files that don't match the naming pattern are silently ignored (not treated as errors).
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests:
|
||||
```bash
|
||||
go test ./pkg/readers/sqldir/
|
||||
```
|
||||
|
||||
Tests include:
|
||||
- Valid file parsing
|
||||
- Recursive directory scanning
|
||||
- Invalid filename handling
|
||||
- Empty directory handling
|
||||
- Error conditions
|
||||
127
pkg/readers/sqldir/example_test.go
Normal file
127
pkg/readers/sqldir/example_test.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package sqldir_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
// Example demonstrates how to read SQL scripts from a directory and execute them
|
||||
func Example() {
|
||||
// Step 1: Read SQL scripts from a directory
|
||||
// Directory structure example:
|
||||
// migrations/
|
||||
// 1_001_create_schema.sql
|
||||
// 1_002_create_users_table.sql
|
||||
// 1_003_create_posts_table.pgsql
|
||||
// 2_001_add_indexes.sql
|
||||
// 2_002_seed_data.sql
|
||||
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/path/to/migrations",
|
||||
Metadata: map[string]any{
|
||||
"schema_name": "public",
|
||||
"database_name": "myapp",
|
||||
},
|
||||
})
|
||||
|
||||
// Read the database schema with scripts
|
||||
database, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to read scripts: %v", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Read %d schemas\n", len(database.Schemas))
|
||||
fmt.Printf("Found %d scripts in schema '%s'\n",
|
||||
len(database.Schemas[0].Scripts),
|
||||
database.Schemas[0].Name)
|
||||
|
||||
// Step 2: Execute the scripts against a PostgreSQL database
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://user:password@localhost:5432/myapp?sslmode=disable",
|
||||
},
|
||||
})
|
||||
|
||||
// Execute all scripts in Priority then Sequence order
|
||||
if err := writer.WriteDatabase(database); err != nil {
|
||||
log.Fatalf("Failed to execute scripts: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("All scripts executed successfully!")
|
||||
}
|
||||
|
||||
// Example_withSingleSchema shows how to read and execute scripts for a single schema
|
||||
func Example_withSingleSchema() {
|
||||
// Read scripts
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/path/to/migrations",
|
||||
})
|
||||
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to read schema: %v", err)
|
||||
}
|
||||
|
||||
// Execute scripts
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/testdb",
|
||||
},
|
||||
})
|
||||
|
||||
if err := writer.WriteSchema(schema); err != nil {
|
||||
log.Fatalf("Failed to execute scripts: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("Schema scripts executed successfully!")
|
||||
}
|
||||
|
||||
// Example_fileNamingConvention shows the expected file naming pattern
|
||||
func Example_fileNamingConvention() {
|
||||
// File naming pattern: {priority}_{sequence}_{name}.sql or .pgsql
|
||||
// OR: {priority}-{sequence}-{name}.sql or .pgsql
|
||||
//
|
||||
// Both underscore (_) and hyphen (-) separators are supported and can be mixed.
|
||||
//
|
||||
// Components:
|
||||
// - priority: Integer (0-9999) - Scripts with lower priority execute first
|
||||
// - sequence: Integer (0-9999) - Within same priority, lower sequence executes first
|
||||
// - separator: Underscore (_) or hyphen (-)
|
||||
// - name: Descriptive name (alphanumeric, underscores, hyphens)
|
||||
// - extension: .sql or .pgsql
|
||||
//
|
||||
// Examples (underscore format):
|
||||
// ✓ 1_001_create_users.sql (Priority=1, Sequence=1)
|
||||
// ✓ 1_002_create_posts.sql (Priority=1, Sequence=2)
|
||||
// ✓ 2_001_add_indexes.pgsql (Priority=2, Sequence=1)
|
||||
// ✓ 10_100_migration.sql (Priority=10, Sequence=100)
|
||||
//
|
||||
// Examples (hyphen format):
|
||||
// ✓ 1-001-create-users.sql (Priority=1, Sequence=1)
|
||||
// ✓ 1-002-create-posts.sql (Priority=1, Sequence=2)
|
||||
// ✓ 2-001-add-indexes.pgsql (Priority=2, Sequence=1)
|
||||
// ✓ 10-10-create-newid.pgsql (Priority=10, Sequence=10)
|
||||
//
|
||||
// Mixed format (both in same directory):
|
||||
// ✓ 1_001_create_users.sql (underscore format)
|
||||
// ✓ 1-002-create-posts.sql (hyphen format)
|
||||
// ✓ 2_001_add_indexes.sql (underscore format)
|
||||
//
|
||||
// Execution order for mixed examples:
|
||||
// 1. 1_001_create_users.sql (Priority 1, Sequence 1)
|
||||
// 2. 1-002-create-posts.sql (Priority 1, Sequence 2)
|
||||
// 3. 2_001_add_indexes.sql (Priority 2, Sequence 1)
|
||||
//
|
||||
// Invalid filenames (will be ignored):
|
||||
// ✗ migration.sql (missing priority/sequence)
|
||||
// ✗ 1_create_users.sql (missing sequence)
|
||||
// ✗ create_users.sql (missing priority/sequence)
|
||||
// ✗ 1_001_create_users.txt (wrong extension)
|
||||
|
||||
fmt.Println("See comments for file naming conventions")
|
||||
}
|
||||
171
pkg/readers/sqldir/reader.go
Normal file
171
pkg/readers/sqldir/reader.go
Normal file
@@ -0,0 +1,171 @@
|
||||
package sqldir
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
// Reader implements the readers.Reader interface for SQL script directories
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
// NewReader creates a new SQL directory reader
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadDatabase reads all SQL scripts from a directory into a Database
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("directory path is required")
|
||||
}
|
||||
|
||||
// Check if directory exists
|
||||
info, err := os.Stat(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to access directory: %w", err)
|
||||
}
|
||||
if !info.IsDir() {
|
||||
return nil, fmt.Errorf("path is not a directory: %s", r.options.FilePath)
|
||||
}
|
||||
|
||||
// Read scripts from directory
|
||||
scripts, err := r.readScripts()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read scripts: %w", err)
|
||||
}
|
||||
|
||||
// Get schema name from metadata or use default
|
||||
schemaName := "public"
|
||||
if name, ok := r.options.Metadata["schema_name"].(string); ok && name != "" {
|
||||
schemaName = name
|
||||
}
|
||||
|
||||
// Create schema with scripts
|
||||
schema := &models.Schema{
|
||||
Name: schemaName,
|
||||
Scripts: scripts,
|
||||
}
|
||||
|
||||
// Get database name from metadata or use default
|
||||
dbName := "database"
|
||||
if name, ok := r.options.Metadata["database_name"].(string); ok && name != "" {
|
||||
dbName = name
|
||||
}
|
||||
|
||||
// Create database with schema
|
||||
database := &models.Database{
|
||||
Name: dbName,
|
||||
Schemas: []*models.Schema{schema},
|
||||
}
|
||||
|
||||
// Set back-reference
|
||||
schema.RefDatabase = database
|
||||
|
||||
return database, nil
|
||||
}
|
||||
|
||||
// ReadSchema reads all SQL scripts from a directory into a Schema
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schema found")
|
||||
}
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
// ReadTable is not applicable for SQL script directories
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
return nil, fmt.Errorf("ReadTable is not supported for SQL script directories")
|
||||
}
|
||||
|
||||
// readScripts recursively scans the directory for SQL files and parses them into Script models
|
||||
func (r *Reader) readScripts() ([]*models.Script, error) {
|
||||
var scripts []*models.Script
|
||||
|
||||
// Regular expression to parse filename: {priority}{sep}{sequence}{sep}{name}.sql or .pgsql
|
||||
// Separator can be underscore (_) or hyphen (-)
|
||||
// Example: 1_001_create_users.sql -> priority=1, sequence=001, name=create_users
|
||||
// Example: 2_005_add_indexes.pgsql -> priority=2, sequence=005, name=add_indexes
|
||||
// Example: 10-10-create-newid.pgsql -> priority=10, sequence=10, name=create-newid
|
||||
pattern := regexp.MustCompile(`^(\d+)[_-](\d+)[_-](.+)\.(sql|pgsql)$`)
|
||||
|
||||
err := filepath.WalkDir(r.options.FilePath, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Skip directories
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get filename
|
||||
filename := d.Name()
|
||||
|
||||
// Match against pattern
|
||||
matches := pattern.FindStringSubmatch(filename)
|
||||
if matches == nil {
|
||||
// Skip files that don't match the pattern
|
||||
return nil
|
||||
}
|
||||
|
||||
// Parse priority
|
||||
priority, err := strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid priority in filename %s: %w", filename, err)
|
||||
}
|
||||
|
||||
// Parse sequence
|
||||
sequence, err := strconv.ParseUint(matches[2], 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid sequence in filename %s: %w", filename, err)
|
||||
}
|
||||
|
||||
// Extract name
|
||||
name := matches[3]
|
||||
|
||||
// Read SQL content
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read file %s: %w", path, err)
|
||||
}
|
||||
|
||||
// Get relative path from base directory
|
||||
relPath, err := filepath.Rel(r.options.FilePath, path)
|
||||
if err != nil {
|
||||
relPath = path
|
||||
}
|
||||
|
||||
// Create Script model
|
||||
script := &models.Script{
|
||||
Name: name,
|
||||
Description: fmt.Sprintf("SQL script from %s", relPath),
|
||||
SQL: string(content),
|
||||
Priority: priority,
|
||||
Sequence: uint(sequence),
|
||||
}
|
||||
|
||||
scripts = append(scripts, script)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scripts, nil
|
||||
}
|
||||
375
pkg/readers/sqldir/reader_test.go
Normal file
375
pkg/readers/sqldir/reader_test.go
Normal file
@@ -0,0 +1,375 @@
|
||||
package sqldir
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
func TestReader_ReadDatabase(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create test SQL files with both underscore and hyphen separators
|
||||
testFiles := map[string]string{
|
||||
"1_001_create_users.sql": "CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT);",
|
||||
"1_002_create_posts.sql": "CREATE TABLE posts (id SERIAL PRIMARY KEY, user_id INT);",
|
||||
"2_001_add_indexes.sql": "CREATE INDEX idx_posts_user_id ON posts(user_id);",
|
||||
"1_003_seed_data.pgsql": "INSERT INTO users (name) VALUES ('Alice'), ('Bob');",
|
||||
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL PRIMARY KEY);",
|
||||
"2-005-add-column.sql": "ALTER TABLE users ADD COLUMN email TEXT;",
|
||||
}
|
||||
|
||||
for filename, content := range testFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create subdirectory with additional script
|
||||
subDir := filepath.Join(tempDir, "migrations")
|
||||
if err := os.MkdirAll(subDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create subdirectory: %v", err)
|
||||
}
|
||||
subFile := filepath.Join(subDir, "3_001_add_column.sql")
|
||||
if err := os.WriteFile(subFile, []byte("ALTER TABLE users ADD COLUMN email TEXT;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create subdirectory file: %v", err)
|
||||
}
|
||||
|
||||
// Create reader
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
Metadata: map[string]any{
|
||||
"schema_name": "test_schema",
|
||||
"database_name": "test_db",
|
||||
},
|
||||
})
|
||||
|
||||
// Read database
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify database
|
||||
if db.Name != "test_db" {
|
||||
t.Errorf("Expected database name 'test_db', got '%s'", db.Name)
|
||||
}
|
||||
|
||||
if len(db.Schemas) != 1 {
|
||||
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "test_schema" {
|
||||
t.Errorf("Expected schema name 'test_schema', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
// Verify scripts (should be 7 total: 4 underscore + 2 hyphen + 1 subdirectory)
|
||||
if len(schema.Scripts) != 7 {
|
||||
t.Fatalf("Expected 7 scripts, got %d", len(schema.Scripts))
|
||||
}
|
||||
|
||||
// Verify script details
|
||||
expectedScripts := []struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
}{
|
||||
{"create_users", 1, 1},
|
||||
{"create_posts", 1, 2},
|
||||
{"seed_data", 1, 3},
|
||||
{"add_indexes", 2, 1},
|
||||
{"add-column", 2, 5},
|
||||
{"add_column", 3, 1},
|
||||
{"create-newid", 10, 10},
|
||||
}
|
||||
|
||||
scriptMap := make(map[string]*struct {
|
||||
priority int
|
||||
sequence uint
|
||||
sql string
|
||||
})
|
||||
for _, script := range schema.Scripts {
|
||||
scriptMap[script.Name] = &struct {
|
||||
priority int
|
||||
sequence uint
|
||||
sql string
|
||||
}{
|
||||
priority: script.Priority,
|
||||
sequence: script.Sequence,
|
||||
sql: script.SQL,
|
||||
}
|
||||
}
|
||||
|
||||
for _, expected := range expectedScripts {
|
||||
script, exists := scriptMap[expected.name]
|
||||
if !exists {
|
||||
t.Errorf("Expected script '%s' not found", expected.name)
|
||||
continue
|
||||
}
|
||||
if script.priority != expected.priority {
|
||||
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||
expected.name, expected.priority, script.priority)
|
||||
}
|
||||
if script.sequence != expected.sequence {
|
||||
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||
expected.name, expected.sequence, script.sequence)
|
||||
}
|
||||
if script.sql == "" {
|
||||
t.Errorf("Script '%s': SQL content is empty", expected.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadSchema(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create test SQL file
|
||||
testFile := filepath.Join(tempDir, "1_001_test.sql")
|
||||
if err := os.WriteFile(testFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Create reader
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
// Read schema
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadSchema failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify schema
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected default schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Scripts) != 1 {
|
||||
t.Fatalf("Expected 1 script, got %d", len(schema.Scripts))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_InvalidDirectory(t *testing.T) {
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/nonexistent/directory",
|
||||
})
|
||||
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for nonexistent directory, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_EmptyDirectory(t *testing.T) {
|
||||
// Create temporary empty directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas[0].Scripts) != 0 {
|
||||
t.Errorf("Expected 0 scripts in empty directory, got %d", len(db.Schemas[0].Scripts))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_InvalidFilename(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create files with various invalid patterns
|
||||
invalidFiles := []string{
|
||||
"invalid.sql", // No priority/sequence
|
||||
"1_test.sql", // Missing sequence
|
||||
"test_1_2.sql", // Wrong order
|
||||
"a_001_test.sql", // Non-numeric priority
|
||||
"1_abc_test.sql", // Non-numeric sequence
|
||||
"1_001_test.txt", // Wrong extension
|
||||
"1_001_test.sql.backup", // Wrong extension
|
||||
}
|
||||
|
||||
for _, filename := range invalidFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte("SELECT 1;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create one valid file
|
||||
validFile := filepath.Join(tempDir, "1_001_valid.sql")
|
||||
if err := os.WriteFile(validFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create valid file: %v", err)
|
||||
}
|
||||
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
// Should only have the valid file
|
||||
if len(db.Schemas[0].Scripts) != 1 {
|
||||
t.Errorf("Expected 1 script (invalid files should be skipped), got %d", len(db.Schemas[0].Scripts))
|
||||
}
|
||||
|
||||
if db.Schemas[0].Scripts[0].Name != "valid" {
|
||||
t.Errorf("Expected script name 'valid', got '%s'", db.Schemas[0].Scripts[0].Name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadTable(t *testing.T) {
|
||||
reader := NewReader(&readers.ReaderOptions{})
|
||||
|
||||
_, err := reader.ReadTable()
|
||||
if err == nil {
|
||||
t.Error("Expected error for ReadTable (not supported), got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_HyphenFormat(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-hyphen-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create test files with hyphen separators
|
||||
testFiles := map[string]string{
|
||||
"1-001-create-table.sql": "CREATE TABLE test (id INT);",
|
||||
"1-002-insert-data.pgsql": "INSERT INTO test VALUES (1);",
|
||||
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL);",
|
||||
"2-005-add-index.sql": "CREATE INDEX idx_test ON test(id);",
|
||||
}
|
||||
|
||||
for filename, content := range testFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create reader
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
// Read database
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) != 4 {
|
||||
t.Fatalf("Expected 4 scripts, got %d", len(schema.Scripts))
|
||||
}
|
||||
|
||||
// Verify specific hyphen-formatted scripts
|
||||
expectedScripts := map[string]struct {
|
||||
priority int
|
||||
sequence uint
|
||||
}{
|
||||
"create-table": {1, 1},
|
||||
"insert-data": {1, 2},
|
||||
"add-index": {2, 5},
|
||||
"create-newid": {10, 10},
|
||||
}
|
||||
|
||||
for _, script := range schema.Scripts {
|
||||
expected, exists := expectedScripts[script.Name]
|
||||
if !exists {
|
||||
t.Errorf("Unexpected script: %s", script.Name)
|
||||
continue
|
||||
}
|
||||
if script.Priority != expected.priority {
|
||||
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||
script.Name, expected.priority, script.Priority)
|
||||
}
|
||||
if script.Sequence != expected.sequence {
|
||||
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||
script.Name, expected.sequence, script.Sequence)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_MixedFormat(t *testing.T) {
|
||||
// Test that both underscore and hyphen formats can be mixed
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-mixed-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
testFiles := map[string]string{
|
||||
"1_001_underscore.sql": "SELECT 1;",
|
||||
"1-002-hyphen.sql": "SELECT 2;",
|
||||
"2_003_underscore.sql": "SELECT 3;",
|
||||
"2-004-hyphen.sql": "SELECT 4;",
|
||||
}
|
||||
|
||||
for filename, content := range testFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) != 4 {
|
||||
t.Fatalf("Expected 4 scripts (mixed format), got %d", len(schema.Scripts))
|
||||
}
|
||||
|
||||
// Verify both formats are parsed correctly
|
||||
names := make(map[string]bool)
|
||||
for _, script := range schema.Scripts {
|
||||
names[script.Name] = true
|
||||
}
|
||||
|
||||
expectedNames := []string{"underscore", "hyphen", "underscore", "hyphen"}
|
||||
for _, name := range expectedNames {
|
||||
if !names[name] {
|
||||
t.Errorf("Expected script name '%s' not found", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
226
pkg/writers/sqlexec/README.md
Normal file
226
pkg/writers/sqlexec/README.md
Normal file
@@ -0,0 +1,226 @@
|
||||
# SQL Executor Writer
|
||||
|
||||
The SQL Executor Writer (`sqlexec`) executes SQL scripts from `models.Script` objects against a PostgreSQL database. Scripts are executed in order based on Priority (ascending) and Sequence (ascending).
|
||||
|
||||
## Features
|
||||
|
||||
- **Ordered Execution**: Scripts execute in Priority→Sequence order
|
||||
- **PostgreSQL Support**: Uses `pgx/v5` driver for robust PostgreSQL connectivity
|
||||
- **Stop on Error**: Execution halts immediately on first error (default behavior)
|
||||
- **Progress Reporting**: Prints execution status to stdout
|
||||
- **Multiple Schemas**: Can execute scripts from multiple schemas in a database
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://user:password@localhost:5432/dbname?sslmode=disable",
|
||||
},
|
||||
})
|
||||
|
||||
// Execute all scripts from database
|
||||
err := writer.WriteDatabase(database)
|
||||
if err != nil {
|
||||
log.Fatalf("Execution failed: %v", err)
|
||||
}
|
||||
```
|
||||
|
||||
### Execute Single Schema
|
||||
|
||||
```go
|
||||
err := writer.WriteSchema(schema)
|
||||
if err != nil {
|
||||
log.Fatalf("Schema execution failed: %v", err)
|
||||
}
|
||||
```
|
||||
|
||||
### Complete Example with SQL Directory Reader
|
||||
|
||||
```go
|
||||
import (
|
||||
"log"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Read SQL scripts from directory
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "./migrations",
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Execute scripts against PostgreSQL
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/myapp",
|
||||
},
|
||||
})
|
||||
|
||||
if err := writer.WriteDatabase(db); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Metadata
|
||||
|
||||
- **connection_string**: PostgreSQL connection string (required)
|
||||
|
||||
### Connection String Format
|
||||
|
||||
```
|
||||
postgres://[user[:password]@][host][:port][/dbname][?param1=value1&...]
|
||||
```
|
||||
|
||||
Examples:
|
||||
```
|
||||
postgres://localhost/mydb
|
||||
postgres://user:pass@localhost:5432/mydb?sslmode=disable
|
||||
postgres://user@localhost/mydb?sslmode=require
|
||||
postgresql://user:pass@prod-db.example.com:5432/production
|
||||
```
|
||||
|
||||
## Execution Order
|
||||
|
||||
Scripts are sorted and executed based on:
|
||||
|
||||
1. **Priority** (ascending): Lower priority values execute first
|
||||
2. **Sequence** (ascending): Within same priority, lower sequence values execute first
|
||||
|
||||
### Example Execution Order
|
||||
|
||||
Given these scripts:
|
||||
```
|
||||
Script A: Priority=2, Sequence=1
|
||||
Script B: Priority=1, Sequence=3
|
||||
Script C: Priority=1, Sequence=1
|
||||
Script D: Priority=1, Sequence=2
|
||||
Script E: Priority=3, Sequence=1
|
||||
```
|
||||
|
||||
Execution order: **C → D → B → A → E**
|
||||
|
||||
## Output
|
||||
|
||||
The writer prints progress to stdout:
|
||||
|
||||
```
|
||||
Executing script: create_users (Priority=1, Sequence=1)
|
||||
✓ Successfully executed: create_users
|
||||
Executing script: create_posts (Priority=1, Sequence=2)
|
||||
✓ Successfully executed: create_posts
|
||||
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||
✓ Successfully executed: add_indexes
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Connection Errors
|
||||
|
||||
If the database connection fails, execution stops immediately:
|
||||
|
||||
```
|
||||
Error: failed to connect to database: connection refused
|
||||
```
|
||||
|
||||
### Script Execution Errors
|
||||
|
||||
If a script fails, execution stops and returns the error with context:
|
||||
|
||||
```
|
||||
Error: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||
syntax error at or near "IDNEX"
|
||||
```
|
||||
|
||||
**Behavior**: Stop on first error (scripts executed before the error remain committed)
|
||||
|
||||
### Empty Script Handling
|
||||
|
||||
Scripts with empty SQL content are skipped silently.
|
||||
|
||||
## Database Support
|
||||
|
||||
Currently supports:
|
||||
- ✅ PostgreSQL (via pgx/v5)
|
||||
|
||||
Future support planned for:
|
||||
- MySQL/MariaDB
|
||||
- SQLite
|
||||
- Generic SQL via database/sql
|
||||
|
||||
## Transaction Behavior
|
||||
|
||||
**Current**: Each script executes in its own implicit transaction (PostgreSQL default behavior)
|
||||
|
||||
**Future Enhancement**: Option to wrap all scripts in a single transaction for atomic execution with rollback on error.
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- Scripts execute sequentially (not in parallel)
|
||||
- Each script creates a database round-trip
|
||||
- For large migrations, consider:
|
||||
- Combining related statements into fewer scripts
|
||||
- Using PostgreSQL's COPY command for bulk data
|
||||
- Running during low-traffic periods
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests:
|
||||
```bash
|
||||
go test ./pkg/writers/sqlexec/
|
||||
```
|
||||
|
||||
Current tests include:
|
||||
- Validation and error handling
|
||||
- Script sorting logic
|
||||
- Configuration validation
|
||||
|
||||
### Integration Tests
|
||||
|
||||
For integration testing with a real database:
|
||||
|
||||
```bash
|
||||
# Start PostgreSQL (example with Docker)
|
||||
docker run -d --name postgres-test \
|
||||
-e POSTGRES_PASSWORD=test \
|
||||
-e POSTGRES_DB=testdb \
|
||||
-p 5432:5432 \
|
||||
postgres:16
|
||||
|
||||
# Run your integration tests
|
||||
go test -tags=integration ./pkg/writers/sqlexec/
|
||||
|
||||
# Cleanup
|
||||
docker stop postgres-test
|
||||
docker rm postgres-test
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
- `WriteTable()` is not supported (returns error)
|
||||
- Requires PostgreSQL connection (no offline mode)
|
||||
- No built-in transaction wrapping (yet)
|
||||
- No rollback script support (yet, though `models.Script.Rollback` field exists)
|
||||
|
||||
## Related
|
||||
|
||||
- **SQL Directory Reader**: `pkg/readers/sqldir/` - Read scripts from filesystem
|
||||
- **Script Model**: `pkg/models/models.go` - Script structure definition
|
||||
- **pgx Documentation**: https://github.com/jackc/pgx - PostgreSQL driver docs
|
||||
125
pkg/writers/sqlexec/writer.go
Normal file
125
pkg/writers/sqlexec/writer.go
Normal file
@@ -0,0 +1,125 @@
|
||||
package sqlexec
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
)
|
||||
|
||||
// Writer implements the writers.Writer interface for executing SQL scripts
|
||||
type Writer struct {
|
||||
options *writers.WriterOptions
|
||||
}
|
||||
|
||||
// NewWriter creates a new SQL executor writer
|
||||
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||
return &Writer{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// WriteDatabase executes all scripts from all schemas in the database
|
||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||
if db == nil {
|
||||
return fmt.Errorf("database is nil")
|
||||
}
|
||||
|
||||
// Get connection string from metadata
|
||||
connString, ok := w.options.Metadata["connection_string"].(string)
|
||||
if !ok || connString == "" {
|
||||
return fmt.Errorf("connection_string is required in writer metadata")
|
||||
}
|
||||
|
||||
// Connect to database
|
||||
ctx := context.Background()
|
||||
conn, err := pgx.Connect(ctx, connString)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to connect to database: %w", err)
|
||||
}
|
||||
defer conn.Close(ctx)
|
||||
|
||||
// Execute scripts from all schemas
|
||||
for _, schema := range db.Schemas {
|
||||
if err := w.executeScripts(ctx, conn, schema.Scripts); err != nil {
|
||||
return fmt.Errorf("failed to execute scripts from schema %s: %w", schema.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteSchema executes all scripts from a single schema
|
||||
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||
if schema == nil {
|
||||
return fmt.Errorf("schema is nil")
|
||||
}
|
||||
|
||||
// Get connection string from metadata
|
||||
connString, ok := w.options.Metadata["connection_string"].(string)
|
||||
if !ok || connString == "" {
|
||||
return fmt.Errorf("connection_string is required in writer metadata")
|
||||
}
|
||||
|
||||
// Connect to database
|
||||
ctx := context.Background()
|
||||
conn, err := pgx.Connect(ctx, connString)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to connect to database: %w", err)
|
||||
}
|
||||
defer conn.Close(ctx)
|
||||
|
||||
// Execute scripts
|
||||
if err := w.executeScripts(ctx, conn, schema.Scripts); err != nil {
|
||||
return fmt.Errorf("failed to execute scripts: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteTable is not applicable for SQL script execution
|
||||
func (w *Writer) WriteTable(table *models.Table) error {
|
||||
return fmt.Errorf("WriteTable is not supported for SQL script execution")
|
||||
}
|
||||
|
||||
// executeScripts executes scripts in Priority then Sequence order
|
||||
func (w *Writer) executeScripts(ctx context.Context, conn *pgx.Conn, scripts []*models.Script) error {
|
||||
if len(scripts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Sort scripts by Priority (ascending) then Sequence (ascending)
|
||||
sortedScripts := make([]*models.Script, len(scripts))
|
||||
copy(sortedScripts, scripts)
|
||||
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||
if sortedScripts[i].Priority != sortedScripts[j].Priority {
|
||||
return sortedScripts[i].Priority < sortedScripts[j].Priority
|
||||
}
|
||||
return sortedScripts[i].Sequence < sortedScripts[j].Sequence
|
||||
})
|
||||
|
||||
// Execute each script in order
|
||||
for _, script := range sortedScripts {
|
||||
if script.SQL == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Printf("Executing script: %s (Priority=%d, Sequence=%d)\n",
|
||||
script.Name, script.Priority, script.Sequence)
|
||||
|
||||
// Execute the SQL script
|
||||
_, err := conn.Exec(ctx, script.SQL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute script %s (Priority=%d, Sequence=%d): %w",
|
||||
script.Name, script.Priority, script.Sequence, err)
|
||||
}
|
||||
|
||||
fmt.Printf("✓ Successfully executed: %s\n", script.Name)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
201
pkg/writers/sqlexec/writer_test.go
Normal file
201
pkg/writers/sqlexec/writer_test.go
Normal file
@@ -0,0 +1,201 @@
|
||||
package sqlexec
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
)
|
||||
|
||||
func TestNewWriter(t *testing.T) {
|
||||
opts := &writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/test",
|
||||
},
|
||||
}
|
||||
|
||||
writer := NewWriter(opts)
|
||||
if writer == nil {
|
||||
t.Fatal("Expected non-nil writer")
|
||||
}
|
||||
if writer.options != opts {
|
||||
t.Error("Writer options not set correctly")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriter_WriteDatabase_NilDatabase(t *testing.T) {
|
||||
writer := NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/test",
|
||||
},
|
||||
})
|
||||
|
||||
err := writer.WriteDatabase(nil)
|
||||
if err == nil {
|
||||
t.Error("Expected error for nil database, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriter_WriteDatabase_MissingConnectionString(t *testing.T) {
|
||||
writer := NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{},
|
||||
})
|
||||
|
||||
db := &models.Database{
|
||||
Name: "test",
|
||||
Schemas: []*models.Schema{
|
||||
{
|
||||
Name: "public",
|
||||
Scripts: []*models.Script{
|
||||
{Name: "test", SQL: "SELECT 1;"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := writer.WriteDatabase(db)
|
||||
if err == nil {
|
||||
t.Error("Expected error for missing connection_string, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriter_WriteSchema_NilSchema(t *testing.T) {
|
||||
writer := NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/test",
|
||||
},
|
||||
})
|
||||
|
||||
err := writer.WriteSchema(nil)
|
||||
if err == nil {
|
||||
t.Error("Expected error for nil schema, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriter_WriteSchema_MissingConnectionString(t *testing.T) {
|
||||
writer := NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{},
|
||||
})
|
||||
|
||||
schema := &models.Schema{
|
||||
Name: "public",
|
||||
Scripts: []*models.Script{
|
||||
{Name: "test", SQL: "SELECT 1;"},
|
||||
},
|
||||
}
|
||||
|
||||
err := writer.WriteSchema(schema)
|
||||
if err == nil {
|
||||
t.Error("Expected error for missing connection_string, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriter_WriteTable(t *testing.T) {
|
||||
writer := NewWriter(&writers.WriterOptions{})
|
||||
|
||||
err := writer.WriteTable(&models.Table{})
|
||||
if err == nil {
|
||||
t.Error("Expected error for WriteTable (not supported), got nil")
|
||||
}
|
||||
}
|
||||
|
||||
// TestScriptSorting verifies that scripts are sorted correctly by Priority then Sequence
|
||||
func TestScriptSorting(t *testing.T) {
|
||||
scripts := []*models.Script{
|
||||
{Name: "script1", Priority: 2, Sequence: 1, SQL: "SELECT 1;"},
|
||||
{Name: "script2", Priority: 1, Sequence: 3, SQL: "SELECT 2;"},
|
||||
{Name: "script3", Priority: 1, Sequence: 1, SQL: "SELECT 3;"},
|
||||
{Name: "script4", Priority: 1, Sequence: 2, SQL: "SELECT 4;"},
|
||||
{Name: "script5", Priority: 3, Sequence: 1, SQL: "SELECT 5;"},
|
||||
{Name: "script6", Priority: 2, Sequence: 2, SQL: "SELECT 6;"},
|
||||
}
|
||||
|
||||
// Create a copy and sort it using the same logic as executeScripts
|
||||
sortedScripts := make([]*models.Script, len(scripts))
|
||||
copy(sortedScripts, scripts)
|
||||
|
||||
// Use the same sorting logic from executeScripts
|
||||
for i := 0; i < len(sortedScripts)-1; i++ {
|
||||
for j := i + 1; j < len(sortedScripts); j++ {
|
||||
if sortedScripts[i].Priority > sortedScripts[j].Priority ||
|
||||
(sortedScripts[i].Priority == sortedScripts[j].Priority &&
|
||||
sortedScripts[i].Sequence > sortedScripts[j].Sequence) {
|
||||
sortedScripts[i], sortedScripts[j] = sortedScripts[j], sortedScripts[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Expected order after sorting
|
||||
expectedOrder := []string{
|
||||
"script3", // Priority 1, Sequence 1
|
||||
"script4", // Priority 1, Sequence 2
|
||||
"script2", // Priority 1, Sequence 3
|
||||
"script1", // Priority 2, Sequence 1
|
||||
"script6", // Priority 2, Sequence 2
|
||||
"script5", // Priority 3, Sequence 1
|
||||
}
|
||||
|
||||
for i, expected := range expectedOrder {
|
||||
if sortedScripts[i].Name != expected {
|
||||
t.Errorf("Position %d: expected %s, got %s", i, expected, sortedScripts[i].Name)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify priorities are ascending
|
||||
for i := 0; i < len(sortedScripts)-1; i++ {
|
||||
if sortedScripts[i].Priority > sortedScripts[i+1].Priority {
|
||||
t.Errorf("Priority not ascending at position %d: %d > %d",
|
||||
i, sortedScripts[i].Priority, sortedScripts[i+1].Priority)
|
||||
}
|
||||
// Within same priority, sequences should be ascending
|
||||
if sortedScripts[i].Priority == sortedScripts[i+1].Priority &&
|
||||
sortedScripts[i].Sequence > sortedScripts[i+1].Sequence {
|
||||
t.Errorf("Sequence not ascending at position %d with same priority %d: %d > %d",
|
||||
i, sortedScripts[i].Priority, sortedScripts[i].Sequence, sortedScripts[i+1].Sequence)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriter_WriteSchema_EmptyScripts(t *testing.T) {
|
||||
// This test verifies that writing an empty script list doesn't cause errors
|
||||
// even without a database connection (should return early)
|
||||
writer := NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://invalid/test",
|
||||
},
|
||||
})
|
||||
|
||||
schema := &models.Schema{
|
||||
Name: "public",
|
||||
Scripts: []*models.Script{},
|
||||
}
|
||||
|
||||
// Note: This will try to connect even with empty scripts
|
||||
// In a real scenario, the executeScripts function returns early for empty scripts
|
||||
// but the connection is made before that. This test documents the behavior.
|
||||
err := writer.WriteSchema(schema)
|
||||
// We expect a connection error since we're using an invalid connection string
|
||||
if err == nil {
|
||||
t.Error("Expected connection error, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: Integration tests for actual database execution should be added separately
|
||||
// Those tests would require:
|
||||
// 1. A running PostgreSQL instance
|
||||
// 2. Test database setup/teardown
|
||||
// 3. Verification of actual script execution
|
||||
// 4. Testing error handling during execution
|
||||
// 5. Testing transaction behavior if added
|
||||
//
|
||||
// Example integration test structure:
|
||||
// func TestWriter_Integration_ExecuteScripts(t *testing.T) {
|
||||
// if testing.Short() {
|
||||
// t.Skip("Skipping integration test")
|
||||
// }
|
||||
// // Setup test database
|
||||
// // Create test scripts
|
||||
// // Execute scripts
|
||||
// // Verify results
|
||||
// // Cleanup
|
||||
// }
|
||||
Reference in New Issue
Block a user