Files
relspecgo/cmd/relspec/convert.go
Hein 5f1923233e
Some checks failed
CI / Test (1.24) (push) Has been cancelled
CI / Test (1.25) (push) Has been cancelled
CI / Test (1.23) (push) Has been cancelled
CI / Lint (push) Failing after -26m40s
CI / Build (push) Failing after -25m24s
Reverse reading bun/gorm models
2025-12-18 20:00:59 +02:00

388 lines
12 KiB
Go

package main
import (
"fmt"
"os"
"strings"
"time"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
"github.com/spf13/cobra"
)
var (
convertSourceType string
convertSourcePath string
convertSourceConn string
convertTargetType string
convertTargetPath string
convertPackageName string
convertSchemaFilter string
)
var convertCmd = &cobra.Command{
Use: "convert",
Short: "Convert database schemas between different formats",
Long: `Convert database schemas from one format to another.
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
JSON, YAML, etc.) and writing to various formats (GORM, Bun, DBML, JSON,
YAML, SQL, etc.).
Input formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go, file or directory)
- bun: Bun model files (Go, file or directory)
- pgsql: PostgreSQL database (live connection)
Output formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go)
- bun: Bun model files (Go)
- pgsql: PostgreSQL SQL schema
PostgreSQL Connection String Examples:
postgres://username:password@localhost:5432/database_name
postgres://username:password@localhost/database_name
postgresql://user:pass@host:5432/dbname?sslmode=disable
postgresql://user:pass@host/dbname?sslmode=require
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
Examples:
# Convert DBML to GORM models
relspec convert --from dbml --from-path schema.dbml \
--to gorm --to-path models/ --package models
# Convert PostgreSQL database to DBML
relspec convert --from pgsql \
--from-conn "postgres://myuser:mypass@localhost:5432/mydb" \
--to dbml --to-path schema.dbml
# Convert PostgreSQL to JSON (with SSL disabled)
relspec convert --from pgsql \
--from-conn "postgresql://user:pass@localhost/db?sslmode=disable" \
--to json --to-path schema.json
# Convert DBML to JSON
relspec convert --from dbml --from-path schema.dbml \
--to json --to-path schema.json
# Convert DrawDB to PostgreSQL SQL
relspec convert --from drawdb --from-path design.ddb \
--to pgsql --to-path schema.sql
# Convert local PostgreSQL database to YAML
relspec convert --from pgsql \
--from-conn "host=localhost port=5432 user=admin password=secret dbname=prod" \
--to yaml --to-path schema.yaml
# Convert PostgreSQL to DCTX (single schema required)
relspec convert --from pgsql \
--from-conn "postgres://user:pass@localhost:5432/mydb" \
--to dctx --to-path schema.dctx --schema public
# Convert DBML to DCTX with specific schema
relspec convert --from dbml --from-path schema.dbml \
--to dctx --to-path output.dctx --schema public
# Convert GORM models directory to DBML
relspec convert --from gorm --from-path /path/to/models \
--to dbml --to-path schema.dbml
# Convert Bun models directory to JSON
relspec convert --from bun --from-path ./models \
--to json --to-path schema.json`,
RunE: runConvert,
}
func init() {
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
convertCmd.MarkFlagRequired("from")
convertCmd.MarkFlagRequired("to")
convertCmd.MarkFlagRequired("to-path")
}
func runConvert(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Converter ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
// Read source database
fmt.Fprintf(os.Stderr, "[1/2] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", convertSourceType)
if convertSourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", convertSourcePath)
}
if convertSourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(convertSourceConn))
}
db, err := readDatabaseForConvert(convertSourceType, convertSourcePath, convertSourceConn)
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
totalTables := 0
for _, schema := range db.Schemas {
totalTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
// Write to target format
fmt.Fprintf(os.Stderr, "[2/2] Writing to target format...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", convertTargetType)
fmt.Fprintf(os.Stderr, " Output: %s\n", convertTargetPath)
if convertPackageName != "" {
fmt.Fprintf(os.Stderr, " Package: %s\n", convertPackageName)
}
if convertSchemaFilter != "" {
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
}
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter); err != nil {
return fmt.Errorf("failed to write target: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully wrote output\n\n")
fmt.Fprintf(os.Stderr, "=== Conversion Complete ===\n")
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
return nil
}
func readDatabaseForConvert(dbType, filePath, connString string) (*models.Database, error) {
var reader readers.Reader
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DBML format")
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DCTX format")
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB format")
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
if filePath == "" {
return nil, fmt.Errorf("file path is required for JSON format")
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml", "yml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for YAML format")
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql", "postgres", "postgresql":
if connString == "" {
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GORM format")
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "bun":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Bun format")
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
default:
return nil, fmt.Errorf("unsupported source format: %s", dbType)
}
db, err := reader.ReadDatabase()
if err != nil {
return nil, fmt.Errorf("failed to read database: %w", err)
}
return db, nil
}
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string) error {
var writer writers.Writer
writerOpts := &writers.WriterOptions{
OutputPath: outputPath,
PackageName: packageName,
}
switch strings.ToLower(dbType) {
case "dbml":
writer = wdbml.NewWriter(writerOpts)
case "dctx":
writer = wdctx.NewWriter(writerOpts)
case "drawdb":
writer = wdrawdb.NewWriter(writerOpts)
case "json":
writer = wjson.NewWriter(writerOpts)
case "yaml", "yml":
writer = wyaml.NewWriter(writerOpts)
case "gorm":
if packageName == "" {
return fmt.Errorf("package name is required for GORM format (use --package flag)")
}
writer = wgorm.NewWriter(writerOpts)
case "bun":
if packageName == "" {
return fmt.Errorf("package name is required for Bun format (use --package flag)")
}
writer = wbun.NewWriter(writerOpts)
case "pgsql", "postgres", "postgresql", "sql":
writer = wpgsql.NewWriter(writerOpts)
default:
return fmt.Errorf("unsupported target format: %s", dbType)
}
// Handle schema filtering for formats that only support single schemas
if schemaFilter != "" {
// Find the specified schema
var targetSchema *models.Schema
for _, schema := range db.Schemas {
if schema.Name == schemaFilter {
targetSchema = schema
break
}
}
if targetSchema == nil {
return fmt.Errorf("schema '%s' not found in database. Available schemas: %v",
schemaFilter, getSchemaNames(db))
}
// Write just the filtered schema
if err := writer.WriteSchema(targetSchema); err != nil {
return fmt.Errorf("failed to write schema: %w", err)
}
return nil
}
// For formats like DCTX that don't support full database writes, require schema filter
if strings.ToLower(dbType) == "dctx" {
if len(db.Schemas) == 0 {
return fmt.Errorf("no schemas found in database")
}
if len(db.Schemas) == 1 {
// Auto-select the only schema
if err := writer.WriteSchema(db.Schemas[0]); err != nil {
return fmt.Errorf("failed to write schema: %w", err)
}
return nil
}
// Multiple schemas, require user to specify which one
return fmt.Errorf("multiple schemas found, please specify which schema to export using --schema flag. Available schemas: %v",
getSchemaNames(db))
}
// Write full database for formats that support it
if err := writer.WriteDatabase(db); err != nil {
return fmt.Errorf("failed to write database: %w", err)
}
return nil
}
// getSchemaNames returns a slice of schema names from a database
func getSchemaNames(db *models.Database) []string {
names := make([]string, len(db.Schemas))
for i, schema := range db.Schemas {
names[i] = schema.Name
}
return names
}
func getCurrentTimestamp() string {
return time.Now().Format("2006-01-02 15:04:05")
}
func maskPassword(connStr string) string {
// Mask password in connection strings for security
// Handle postgres://user:password@host format
if strings.Contains(connStr, "://") && strings.Contains(connStr, "@") {
parts := strings.Split(connStr, "@")
if len(parts) >= 2 {
userPart := parts[0]
if strings.Contains(userPart, ":") {
userPassParts := strings.Split(userPart, ":")
if len(userPassParts) >= 3 {
// postgres://user:pass -> postgres://user:***
return userPassParts[0] + ":" + userPassParts[1] + ":***@" + strings.Join(parts[1:], "@")
}
}
}
}
// Handle key=value format with password=
if strings.Contains(connStr, "password=") {
result := ""
parts := strings.Split(connStr, " ")
for i, part := range parts {
if strings.HasPrefix(part, "password=") {
parts[i] = "password=***"
}
}
result = strings.Join(parts, " ")
return result
}
return connStr
}