- Introduce a new screen for importing and merging database schemas. - Implement merge logic to combine schemas, tables, columns, and other objects. - Add options to skip specific object types during the merge process. - Update main menu to include the new import and merge option.
434 lines
15 KiB
Go
434 lines
15 KiB
Go
package main
|
|
|
|
import (
|
|
"fmt"
|
|
"os"
|
|
"path/filepath"
|
|
"strings"
|
|
|
|
"github.com/spf13/cobra"
|
|
|
|
"git.warky.dev/wdevs/relspecgo/pkg/merge"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
|
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
|
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
|
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
|
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
|
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
|
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
|
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
|
)
|
|
|
|
var (
|
|
mergeTargetType string
|
|
mergeTargetPath string
|
|
mergeTargetConn string
|
|
mergeSourceType string
|
|
mergeSourcePath string
|
|
mergeSourceConn string
|
|
mergeOutputType string
|
|
mergeOutputPath string
|
|
mergeOutputConn string
|
|
mergeSkipDomains bool
|
|
mergeSkipRelations bool
|
|
mergeSkipEnums bool
|
|
mergeSkipViews bool
|
|
mergeSkipSequences bool
|
|
mergeSkipTables string // Comma-separated table names to skip
|
|
mergeVerbose bool
|
|
)
|
|
|
|
var mergeCmd = &cobra.Command{
|
|
Use: "merge",
|
|
Short: "Merge database schemas (additive only - adds missing items)",
|
|
Long: `Merge one database schema into another. Performs additive merging only:
|
|
adds missing schemas, tables, columns, and other objects without modifying
|
|
or deleting existing items.
|
|
|
|
The target database is loaded first, then the source database is merged into it.
|
|
The result can be saved to a new format or updated in place.
|
|
|
|
Examples:
|
|
# Merge two JSON schemas
|
|
relspec merge --target json --target-path base.json \
|
|
--source json --source-path additional.json \
|
|
--output json --output-path merged.json
|
|
|
|
# Merge from PostgreSQL into JSON
|
|
relspec merge --target json --target-path mydb.json \
|
|
--source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
|
|
--output json --output-path combined.json
|
|
|
|
# Merge DBML and YAML, skip relations
|
|
relspec merge --target dbml --target-path schema.dbml \
|
|
--source yaml --source-path tables.yaml \
|
|
--output dbml --output-path merged.dbml \
|
|
--skip-relations
|
|
|
|
# Merge and save back to target format
|
|
relspec merge --target json --target-path base.json \
|
|
--source json --source-path patch.json \
|
|
--output json --output-path base.json`,
|
|
RunE: runMerge,
|
|
}
|
|
|
|
func init() {
|
|
// Target database flags
|
|
mergeCmd.Flags().StringVar(&mergeTargetType, "target", "", "Target format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
|
mergeCmd.Flags().StringVar(&mergeTargetPath, "target-path", "", "Target file path (required for file-based formats)")
|
|
mergeCmd.Flags().StringVar(&mergeTargetConn, "target-conn", "", "Target connection string (required for pgsql)")
|
|
|
|
// Source database flags
|
|
mergeCmd.Flags().StringVar(&mergeSourceType, "source", "", "Source format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
|
mergeCmd.Flags().StringVar(&mergeSourcePath, "source-path", "", "Source file path (required for file-based formats)")
|
|
mergeCmd.Flags().StringVar(&mergeSourceConn, "source-conn", "", "Source connection string (required for pgsql)")
|
|
|
|
// Output flags
|
|
mergeCmd.Flags().StringVar(&mergeOutputType, "output", "", "Output format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
|
mergeCmd.Flags().StringVar(&mergeOutputPath, "output-path", "", "Output file path (required for file-based formats)")
|
|
mergeCmd.Flags().StringVar(&mergeOutputConn, "output-conn", "", "Output connection string (for pgsql)")
|
|
|
|
// Merge options
|
|
mergeCmd.Flags().BoolVar(&mergeSkipDomains, "skip-domains", false, "Skip domains during merge")
|
|
mergeCmd.Flags().BoolVar(&mergeSkipRelations, "skip-relations", false, "Skip relations during merge")
|
|
mergeCmd.Flags().BoolVar(&mergeSkipEnums, "skip-enums", false, "Skip enums during merge")
|
|
mergeCmd.Flags().BoolVar(&mergeSkipViews, "skip-views", false, "Skip views during merge")
|
|
mergeCmd.Flags().BoolVar(&mergeSkipSequences, "skip-sequences", false, "Skip sequences during merge")
|
|
mergeCmd.Flags().StringVar(&mergeSkipTables, "skip-tables", "", "Comma-separated list of table names to skip during merge")
|
|
mergeCmd.Flags().BoolVar(&mergeVerbose, "verbose", false, "Show verbose output")
|
|
}
|
|
|
|
func runMerge(cmd *cobra.Command, args []string) error {
|
|
fmt.Fprintf(os.Stderr, "\n=== RelSpec Merge ===\n")
|
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
|
|
|
// Validate required flags
|
|
if mergeTargetType == "" {
|
|
return fmt.Errorf("--target format is required")
|
|
}
|
|
if mergeSourceType == "" {
|
|
return fmt.Errorf("--source format is required")
|
|
}
|
|
if mergeOutputType == "" {
|
|
return fmt.Errorf("--output format is required")
|
|
}
|
|
|
|
// Validate and expand file paths
|
|
if mergeTargetType != "pgsql" {
|
|
if mergeTargetPath == "" {
|
|
return fmt.Errorf("--target-path is required for %s format", mergeTargetType)
|
|
}
|
|
mergeTargetPath = expandPath(mergeTargetPath)
|
|
} else if mergeTargetConn == "" {
|
|
|
|
return fmt.Errorf("--target-conn is required for pgsql format")
|
|
|
|
}
|
|
|
|
if mergeSourceType != "pgsql" {
|
|
if mergeSourcePath == "" {
|
|
return fmt.Errorf("--source-path is required for %s format", mergeSourceType)
|
|
}
|
|
mergeSourcePath = expandPath(mergeSourcePath)
|
|
} else if mergeSourceConn == "" {
|
|
return fmt.Errorf("--source-conn is required for pgsql format")
|
|
}
|
|
|
|
if mergeOutputType != "pgsql" {
|
|
if mergeOutputPath == "" {
|
|
return fmt.Errorf("--output-path is required for %s format", mergeOutputType)
|
|
}
|
|
mergeOutputPath = expandPath(mergeOutputPath)
|
|
}
|
|
|
|
// Step 1: Read target database
|
|
fmt.Fprintf(os.Stderr, "[1/3] Reading target database...\n")
|
|
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeTargetType)
|
|
if mergeTargetPath != "" {
|
|
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeTargetPath)
|
|
}
|
|
if mergeTargetConn != "" {
|
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeTargetConn))
|
|
}
|
|
|
|
targetDB, err := readDatabaseForMerge(mergeTargetType, mergeTargetPath, mergeTargetConn, "Target")
|
|
if err != nil {
|
|
return fmt.Errorf("failed to read target database: %w", err)
|
|
}
|
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read target database '%s'\n", targetDB.Name)
|
|
printDatabaseStats(targetDB)
|
|
|
|
// Step 2: Read source database
|
|
fmt.Fprintf(os.Stderr, "\n[2/3] Reading source database...\n")
|
|
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeSourceType)
|
|
if mergeSourcePath != "" {
|
|
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeSourcePath)
|
|
}
|
|
if mergeSourceConn != "" {
|
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeSourceConn))
|
|
}
|
|
|
|
sourceDB, err := readDatabaseForMerge(mergeSourceType, mergeSourcePath, mergeSourceConn, "Source")
|
|
if err != nil {
|
|
return fmt.Errorf("failed to read source database: %w", err)
|
|
}
|
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read source database '%s'\n", sourceDB.Name)
|
|
printDatabaseStats(sourceDB)
|
|
|
|
// Step 3: Merge databases
|
|
fmt.Fprintf(os.Stderr, "\n[3/3] Merging databases...\n")
|
|
|
|
opts := &merge.MergeOptions{
|
|
SkipDomains: mergeSkipDomains,
|
|
SkipRelations: mergeSkipRelations,
|
|
SkipEnums: mergeSkipEnums,
|
|
SkipViews: mergeSkipViews,
|
|
SkipSequences: mergeSkipSequences,
|
|
}
|
|
|
|
// Parse skip-tables flag
|
|
if mergeSkipTables != "" {
|
|
opts.SkipTableNames = parseSkipTables(mergeSkipTables)
|
|
if len(opts.SkipTableNames) > 0 {
|
|
fmt.Fprintf(os.Stderr, " Skipping tables: %s\n", mergeSkipTables)
|
|
}
|
|
}
|
|
|
|
result := merge.MergeDatabases(targetDB, sourceDB, opts)
|
|
|
|
// Update timestamp
|
|
targetDB.UpdateDate()
|
|
|
|
// Print merge summary
|
|
fmt.Fprintf(os.Stderr, " ✓ Merge complete\n\n")
|
|
fmt.Fprintf(os.Stderr, "%s\n", merge.GetMergeSummary(result))
|
|
|
|
// Step 4: Write output
|
|
fmt.Fprintf(os.Stderr, "\n[4/4] Writing output...\n")
|
|
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeOutputType)
|
|
if mergeOutputPath != "" {
|
|
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeOutputPath)
|
|
}
|
|
|
|
err = writeDatabaseForMerge(mergeOutputType, mergeOutputPath, "", targetDB, "Output")
|
|
if err != nil {
|
|
return fmt.Errorf("failed to write output: %w", err)
|
|
}
|
|
|
|
fmt.Fprintf(os.Stderr, " ✓ Successfully written merged database\n")
|
|
fmt.Fprintf(os.Stderr, "\n=== Merge complete ===\n")
|
|
|
|
return nil
|
|
}
|
|
|
|
func readDatabaseForMerge(dbType, filePath, connString, label string) (*models.Database, error) {
|
|
var reader readers.Reader
|
|
|
|
switch strings.ToLower(dbType) {
|
|
case "dbml":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
|
|
}
|
|
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "dctx":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
|
|
}
|
|
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "drawdb":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
|
|
}
|
|
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "graphql":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
|
|
}
|
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "json":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
|
|
}
|
|
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "yaml":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
|
|
}
|
|
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "gorm":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
|
|
}
|
|
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "bun":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
|
|
}
|
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "drizzle":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
|
|
}
|
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "prisma":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
|
|
}
|
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "typeorm":
|
|
if filePath == "" {
|
|
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
|
|
}
|
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
|
case "pgsql":
|
|
if connString == "" {
|
|
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
|
|
}
|
|
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
|
default:
|
|
return nil, fmt.Errorf("%s: unsupported format '%s'", label, dbType)
|
|
}
|
|
|
|
db, err := reader.ReadDatabase()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
return db, nil
|
|
}
|
|
|
|
func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Database, label string) error {
|
|
var writer writers.Writer
|
|
|
|
switch strings.ToLower(dbType) {
|
|
case "dbml":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for DBML format", label)
|
|
}
|
|
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "dctx":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for DCTX format", label)
|
|
}
|
|
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "drawdb":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for DrawDB format", label)
|
|
}
|
|
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "graphql":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for GraphQL format", label)
|
|
}
|
|
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "json":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for JSON format", label)
|
|
}
|
|
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "yaml":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for YAML format", label)
|
|
}
|
|
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "gorm":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for GORM format", label)
|
|
}
|
|
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "bun":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for Bun format", label)
|
|
}
|
|
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "drizzle":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for Drizzle format", label)
|
|
}
|
|
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "prisma":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for Prisma format", label)
|
|
}
|
|
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "typeorm":
|
|
if filePath == "" {
|
|
return fmt.Errorf("%s: file path is required for TypeORM format", label)
|
|
}
|
|
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
case "pgsql":
|
|
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
|
default:
|
|
return fmt.Errorf("%s: unsupported format '%s'", label, dbType)
|
|
}
|
|
|
|
return writer.WriteDatabase(db)
|
|
}
|
|
|
|
func expandPath(path string) string {
|
|
if len(path) > 0 && path[0] == '~' {
|
|
home, err := os.UserHomeDir()
|
|
if err == nil {
|
|
return filepath.Join(home, path[1:])
|
|
}
|
|
}
|
|
return path
|
|
}
|
|
|
|
func printDatabaseStats(db *models.Database) {
|
|
totalTables := 0
|
|
totalColumns := 0
|
|
totalConstraints := 0
|
|
totalIndexes := 0
|
|
|
|
for _, schema := range db.Schemas {
|
|
totalTables += len(schema.Tables)
|
|
for _, table := range schema.Tables {
|
|
totalColumns += len(table.Columns)
|
|
totalConstraints += len(table.Constraints)
|
|
totalIndexes += len(table.Indexes)
|
|
}
|
|
}
|
|
|
|
fmt.Fprintf(os.Stderr, " Schemas: %d, Tables: %d, Columns: %d, Constraints: %d, Indexes: %d\n",
|
|
len(db.Schemas), totalTables, totalColumns, totalConstraints, totalIndexes)
|
|
}
|
|
|
|
func parseSkipTables(skipTablesStr string) map[string]bool {
|
|
skipTables := make(map[string]bool)
|
|
if skipTablesStr == "" {
|
|
return skipTables
|
|
}
|
|
|
|
// Split by comma and trim whitespace
|
|
parts := strings.Split(skipTablesStr, ",")
|
|
for _, part := range parts {
|
|
trimmed := strings.TrimSpace(part)
|
|
if trimmed != "" {
|
|
// Store in lowercase for case-insensitive matching
|
|
skipTables[strings.ToLower(trimmed)] = true
|
|
}
|
|
}
|
|
|
|
return skipTables
|
|
}
|