Compare commits

...

7 Commits

Author SHA1 Message Date
e828d48798 chore(release): update package version to 1.0.52
All checks were successful
Release / test (push) Successful in -32m39s
Release / release (push) Successful in -32m1s
Release / pkg-deb (push) Successful in -32m9s
Release / pkg-aur (push) Successful in -31m37s
Release / pkg-rpm (push) Successful in -27m28s
2026-05-03 17:19:22 +02:00
6e470a9239 fix(type_mapper): adjust array tag handling in BuildBunTag 2026-05-03 17:18:58 +02:00
096815fe49 chore(release): update package version to 1.0.51
All checks were successful
Release / test (push) Successful in -32m30s
Release / release (push) Successful in -31m54s
Release / pkg-aur (push) Successful in -32m31s
Release / pkg-deb (push) Successful in -32m7s
Release / pkg-rpm (push) Successful in -30m36s
2026-05-03 16:11:13 +02:00
b8f60203cb fix(type_mapper): handle PostgreSQL array types in tags
* Update BuildBunTag to append "array" for array types
* Add tests for handling array types in TypeMapper
* Adjust regex in SanitizeStructTagValue to preserve array suffix
2026-05-03 16:11:01 +02:00
Hein
15763f60cc Fix GIN opclass handling for array columns 2026-04-30 20:35:06 +02:00
Hein
6d2884f5cf chore(release): update package version to 1.0.50
Some checks failed
Release / test (push) Successful in -32m41s
Release / release (push) Successful in -28m56s
Release / pkg-deb (push) Successful in -31m19s
Release / pkg-aur (push) Successful in -27m21s
Release / pkg-rpm (push) Failing after -26m24s
2026-04-30 20:23:29 +02:00
Hein
f192decff8 Add Prisma 7 flag support 2026-04-30 20:22:57 +02:00
19 changed files with 399 additions and 93 deletions

View File

@@ -286,79 +286,79 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
if filePath == "" {
return nil, fmt.Errorf("file path is required for DBML format")
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dbml.NewReader(newReaderOptions(filePath, ""))
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DCTX format")
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dctx.NewReader(newReaderOptions(filePath, ""))
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB format")
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drawdb.NewReader(newReaderOptions(filePath, ""))
case "json":
if filePath == "" {
return nil, fmt.Errorf("file path is required for JSON format")
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = json.NewReader(newReaderOptions(filePath, ""))
case "yaml", "yml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for YAML format")
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = yaml.NewReader(newReaderOptions(filePath, ""))
case "pgsql", "postgres", "postgresql":
if connString == "" {
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
reader = pgsql.NewReader(newReaderOptions("", connString))
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GORM format")
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = gorm.NewReader(newReaderOptions(filePath, ""))
case "bun":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Bun format")
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = bun.NewReader(newReaderOptions(filePath, ""))
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Drizzle format")
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drizzle.NewReader(newReaderOptions(filePath, ""))
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Prisma format")
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = prisma.NewReader(newReaderOptions(filePath, ""))
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for TypeORM format")
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = typeorm.NewReader(newReaderOptions(filePath, ""))
case "graphql", "gql":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GraphQL format")
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = graphql.NewReader(newReaderOptions(filePath, ""))
case "mssql", "sqlserver", "mssql2016", "mssql2017", "mssql2019", "mssql2022":
if connString == "" {
return nil, fmt.Errorf("connection string is required for MSSQL format")
}
reader = mssql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
reader = mssql.NewReader(newReaderOptions("", connString))
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
@@ -369,7 +369,7 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
if dbPath == "" {
return nil, fmt.Errorf("file path or connection string is required for SQLite format")
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
reader = sqlite.NewReader(newReaderOptions(dbPath, ""))
default:
return nil, fmt.Errorf("unsupported source format: %s", dbType)
@@ -386,12 +386,7 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string, flattenSchema bool, nullableTypes string) error {
var writer writers.Writer
writerOpts := &writers.WriterOptions{
OutputPath: outputPath,
PackageName: packageName,
FlattenSchema: flattenSchema,
NullableTypes: nullableTypes,
}
writerOpts := newWriterOptions(outputPath, packageName, flattenSchema, nullableTypes)
switch strings.ToLower(dbType) {
case "dbml":

View File

@@ -240,62 +240,62 @@ func readDatabaseForEdit(dbType, filePath, connString, label string) (*models.Da
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dbml.NewReader(newReaderOptions(filePath, ""))
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dctx.NewReader(newReaderOptions(filePath, ""))
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drawdb.NewReader(newReaderOptions(filePath, ""))
case "graphql":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = graphql.NewReader(newReaderOptions(filePath, ""))
case "json":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = json.NewReader(newReaderOptions(filePath, ""))
case "yaml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = yaml.NewReader(newReaderOptions(filePath, ""))
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = gorm.NewReader(newReaderOptions(filePath, ""))
case "bun":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = bun.NewReader(newReaderOptions(filePath, ""))
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drizzle.NewReader(newReaderOptions(filePath, ""))
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = prisma.NewReader(newReaderOptions(filePath, ""))
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = typeorm.NewReader(newReaderOptions(filePath, ""))
case "pgsql":
if connString == "" {
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
reader = pgsql.NewReader(newReaderOptions("", connString))
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
dbPath := filePath
@@ -305,7 +305,7 @@ func readDatabaseForEdit(dbType, filePath, connString, label string) (*models.Da
if dbPath == "" {
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
reader = sqlite.NewReader(newReaderOptions(dbPath, ""))
default:
return nil, fmt.Errorf("%s: unsupported format: %s", label, dbType)
}
@@ -323,31 +323,31 @@ func writeDatabaseForEdit(dbType, filePath, connString string, db *models.Databa
switch strings.ToLower(dbType) {
case "dbml":
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wdbml.NewWriter(newWriterOptions(filePath, "", false, ""))
case "dctx":
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wdctx.NewWriter(newWriterOptions(filePath, "", false, ""))
case "drawdb":
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wdrawdb.NewWriter(newWriterOptions(filePath, "", false, ""))
case "graphql":
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wgraphql.NewWriter(newWriterOptions(filePath, "", false, ""))
case "json":
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wjson.NewWriter(newWriterOptions(filePath, "", false, ""))
case "yaml":
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wyaml.NewWriter(newWriterOptions(filePath, "", false, ""))
case "gorm":
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wgorm.NewWriter(newWriterOptions(filePath, "", false, ""))
case "bun":
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wbun.NewWriter(newWriterOptions(filePath, "", false, ""))
case "drizzle":
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wdrizzle.NewWriter(newWriterOptions(filePath, "", false, ""))
case "prisma":
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wprisma.NewWriter(newWriterOptions(filePath, "", false, ""))
case "typeorm":
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wtypeorm.NewWriter(newWriterOptions(filePath, "", false, ""))
case "sqlite", "sqlite3":
writer = wsqlite.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wsqlite.NewWriter(newWriterOptions(filePath, "", false, ""))
case "pgsql":
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
writer = wpgsql.NewWriter(newWriterOptions(filePath, "", false, ""))
default:
return fmt.Errorf("%s: unsupported format: %s", label, dbType)
}

View File

@@ -221,73 +221,73 @@ func readDatabaseForInspect(dbType, filePath, connString string) (*models.Databa
if filePath == "" {
return nil, fmt.Errorf("file path is required for DBML format")
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dbml.NewReader(newReaderOptions(filePath, ""))
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DCTX format")
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dctx.NewReader(newReaderOptions(filePath, ""))
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB format")
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drawdb.NewReader(newReaderOptions(filePath, ""))
case "graphql":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GraphQL format")
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = graphql.NewReader(newReaderOptions(filePath, ""))
case "json":
if filePath == "" {
return nil, fmt.Errorf("file path is required for JSON format")
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = json.NewReader(newReaderOptions(filePath, ""))
case "yaml", "yml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for YAML format")
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = yaml.NewReader(newReaderOptions(filePath, ""))
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GORM format")
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = gorm.NewReader(newReaderOptions(filePath, ""))
case "bun":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Bun format")
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = bun.NewReader(newReaderOptions(filePath, ""))
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Drizzle format")
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drizzle.NewReader(newReaderOptions(filePath, ""))
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Prisma format")
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = prisma.NewReader(newReaderOptions(filePath, ""))
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for TypeORM format")
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = typeorm.NewReader(newReaderOptions(filePath, ""))
case "pgsql", "postgres", "postgresql":
if connString == "" {
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
reader = pgsql.NewReader(newReaderOptions("", connString))
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
@@ -298,7 +298,7 @@ func readDatabaseForInspect(dbType, filePath, connString string) (*models.Databa
if dbPath == "" {
return nil, fmt.Errorf("file path or connection string is required for SQLite format")
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
reader = sqlite.NewReader(newReaderOptions(dbPath, ""))
default:
return nil, fmt.Errorf("unsupported database type: %s", dbType)

View File

@@ -284,62 +284,62 @@ func readDatabaseForMerge(dbType, filePath, connString, label string) (*models.D
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dbml.NewReader(newReaderOptions(filePath, ""))
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = dctx.NewReader(newReaderOptions(filePath, ""))
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drawdb.NewReader(newReaderOptions(filePath, ""))
case "graphql":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = graphql.NewReader(newReaderOptions(filePath, ""))
case "json":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = json.NewReader(newReaderOptions(filePath, ""))
case "yaml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = yaml.NewReader(newReaderOptions(filePath, ""))
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = gorm.NewReader(newReaderOptions(filePath, ""))
case "bun":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = bun.NewReader(newReaderOptions(filePath, ""))
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = drizzle.NewReader(newReaderOptions(filePath, ""))
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = prisma.NewReader(newReaderOptions(filePath, ""))
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
reader = typeorm.NewReader(newReaderOptions(filePath, ""))
case "pgsql":
if connString == "" {
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
reader = pgsql.NewReader(newReaderOptions("", connString))
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
dbPath := filePath
@@ -349,7 +349,7 @@ func readDatabaseForMerge(dbType, filePath, connString, label string) (*models.D
if dbPath == "" {
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
reader = sqlite.NewReader(newReaderOptions(dbPath, ""))
default:
return nil, fmt.Errorf("%s: unsupported format '%s'", label, dbType)
}
@@ -370,61 +370,61 @@ func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Datab
if filePath == "" {
return fmt.Errorf("%s: file path is required for DBML format", label)
}
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wdbml.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "dctx":
if filePath == "" {
return fmt.Errorf("%s: file path is required for DCTX format", label)
}
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wdctx.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "drawdb":
if filePath == "" {
return fmt.Errorf("%s: file path is required for DrawDB format", label)
}
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wdrawdb.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "graphql":
if filePath == "" {
return fmt.Errorf("%s: file path is required for GraphQL format", label)
}
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wgraphql.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "json":
if filePath == "" {
return fmt.Errorf("%s: file path is required for JSON format", label)
}
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wjson.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "yaml":
if filePath == "" {
return fmt.Errorf("%s: file path is required for YAML format", label)
}
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wyaml.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "gorm":
if filePath == "" {
return fmt.Errorf("%s: file path is required for GORM format", label)
}
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wgorm.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "bun":
if filePath == "" {
return fmt.Errorf("%s: file path is required for Bun format", label)
}
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wbun.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "drizzle":
if filePath == "" {
return fmt.Errorf("%s: file path is required for Drizzle format", label)
}
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wdrizzle.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "prisma":
if filePath == "" {
return fmt.Errorf("%s: file path is required for Prisma format", label)
}
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wprisma.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "typeorm":
if filePath == "" {
return fmt.Errorf("%s: file path is required for TypeORM format", label)
}
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wtypeorm.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "sqlite", "sqlite3":
writer = wsqlite.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
writer = wsqlite.NewWriter(newWriterOptions(filePath, "", flattenSchema, ""))
case "pgsql":
writerOpts := &writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema}
writerOpts := newWriterOptions(filePath, "", flattenSchema, "")
if connString != "" {
writerOpts.Metadata = map[string]interface{}{
"connection_string": connString,

View File

@@ -0,0 +1,24 @@
package main
import (
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
func newReaderOptions(filePath, connString string) *readers.ReaderOptions {
return &readers.ReaderOptions{
FilePath: filePath,
ConnectionString: connString,
Prisma7: prisma7,
}
}
func newWriterOptions(outputPath, packageName string, flattenSchema bool, nullableTypes string) *writers.WriterOptions {
return &writers.WriterOptions{
OutputPath: outputPath,
PackageName: packageName,
FlattenSchema: flattenSchema,
NullableTypes: nullableTypes,
Prisma7: prisma7,
}
}

View File

@@ -12,6 +12,7 @@ var (
// Version information, set via ldflags during build
version = "dev"
buildDate = "unknown"
prisma7 bool
)
func init() {
@@ -68,4 +69,5 @@ func init() {
rootCmd.AddCommand(mergeCmd)
rootCmd.AddCommand(splitCmd)
rootCmd.AddCommand(versionCmd)
rootCmd.PersistentFlags().BoolVar(&prisma7, "prisma7", false, "Use Prisma 7 generator conventions when reading/writing Prisma schemas")
}

View File

@@ -1,6 +1,6 @@
# Maintainer: Hein (Warky Devs) <hein@warky.dev>
pkgname=relspec
pkgver=1.0.49
pkgver=1.0.52
pkgrel=1
pkgdesc="RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs."
arch=('x86_64' 'aarch64')

View File

@@ -1,5 +1,5 @@
Name: relspec
Version: 1.0.49
Version: 1.0.52
Release: 1%{?dist}
Summary: RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.

View File

@@ -70,6 +70,7 @@ func (r *Reader) ReadTable() (*models.Table, error) {
// parsePrisma parses Prisma schema content and returns a Database model
func (r *Reader) parsePrisma(content string) (*models.Database, error) {
db := models.InitDatabase("database")
db.SourceFormat = "prisma"
if r.options.Metadata != nil {
if name, ok := r.options.Metadata["name"].(string); ok {
@@ -139,7 +140,7 @@ func (r *Reader) parsePrisma(content string) (*models.Database, error) {
case "datasource":
r.parseDatasource(blockContent, db)
case "generator":
// We don't need to do anything with generator blocks
r.parseGenerator(blockContent, db)
case "model":
if currentTable != nil {
r.parseModelFields(blockContent, currentTable)
@@ -173,10 +174,34 @@ func (r *Reader) parsePrisma(content string) (*models.Database, error) {
// Second pass: resolve relationships
r.resolveRelationships(schema)
if db.SourceFormat == "prisma" && r.options != nil && r.options.Prisma7 {
db.SourceFormat = "prisma7"
}
db.Schemas = append(db.Schemas, schema)
return db, nil
}
func (r *Reader) parseGenerator(lines []string, db *models.Database) {
providerRegex := regexp.MustCompile(`provider\s*=\s*"([^"]+)"`)
for _, line := range lines {
if matches := providerRegex.FindStringSubmatch(line); matches != nil {
switch matches[1] {
case "prisma-client":
db.SourceFormat = "prisma7"
default:
db.SourceFormat = "prisma"
}
return
}
}
if r.options != nil && r.options.Prisma7 {
db.SourceFormat = "prisma7"
}
}
// parseDatasource extracts database type from datasource block
func (r *Reader) parseDatasource(lines []string, db *models.Database) {
providerRegex := regexp.MustCompile(`provider\s*=\s*"?(\w+)"?`)

View File

@@ -0,0 +1,77 @@
package prisma
import (
"os"
"path/filepath"
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
func TestReadDatabase_Prisma7GeneratorSetsSourceFormat(t *testing.T) {
t.Parallel()
tmpDir := t.TempDir()
schemaPath := filepath.Join(tmpDir, "schema.prisma")
content := `datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
generator client {
provider = "prisma-client"
output = "./generated"
}
model User {
id Int @id @default(autoincrement())
}`
if err := os.WriteFile(schemaPath, []byte(content), 0644); err != nil {
t.Fatalf("failed to write schema: %v", err)
}
reader := NewReader(&readers.ReaderOptions{FilePath: schemaPath})
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase() failed: %v", err)
}
if db.SourceFormat != "prisma7" {
t.Fatalf("expected SourceFormat prisma7, got %q", db.SourceFormat)
}
}
func TestReadDatabase_Prisma7FlagSetsSourceFormatWithoutGenerator(t *testing.T) {
t.Parallel()
tmpDir := t.TempDir()
schemaPath := filepath.Join(tmpDir, "schema.prisma")
content := `datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
model User {
id Int @id @default(autoincrement())
}`
if err := os.WriteFile(schemaPath, []byte(content), 0644); err != nil {
t.Fatalf("failed to write schema: %v", err)
}
reader := NewReader(&readers.ReaderOptions{
FilePath: schemaPath,
Prisma7: true,
})
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase() failed: %v", err)
}
if db.SourceFormat != "prisma7" {
t.Fatalf("expected SourceFormat prisma7 from flag, got %q", db.SourceFormat)
}
}

View File

@@ -25,6 +25,9 @@ type ReaderOptions struct {
// ConnectionString is the database connection string (for DB readers)
ConnectionString string
// Prisma7 enables Prisma 7-specific handling for Prisma schemas.
Prisma7 bool
// Additional options can be added here as needed
Metadata map[string]interface{}
}

View File

@@ -311,10 +311,11 @@ func (tm *TypeMapper) BuildBunTag(column *models.Column, table *models.Table) st
if column.Type != "" {
// Sanitize type to remove backticks
typeStr := writers.SanitizeStructTagValue(column.Type)
isArray := pgsql.IsArrayType(typeStr)
hasExplicitTypeModifier := pgsql.HasExplicitTypeModifier(typeStr)
if !hasExplicitTypeModifier && column.Length > 0 {
if !hasExplicitTypeModifier && !isArray && column.Length > 0 {
typeStr = fmt.Sprintf("%s(%d)", typeStr, column.Length)
} else if !hasExplicitTypeModifier && column.Precision > 0 {
} else if !hasExplicitTypeModifier && !isArray && column.Precision > 0 {
if column.Scale > 0 {
typeStr = fmt.Sprintf("%s(%d,%d)", typeStr, column.Precision, column.Scale)
} else {
@@ -322,6 +323,9 @@ func (tm *TypeMapper) BuildBunTag(column *models.Column, table *models.Table) st
}
}
parts = append(parts, fmt.Sprintf("type:%s", typeStr))
if isArray && tm.typeStyle == writers.NullableTypeStdlib {
parts = append(parts, "array")
}
}
// Primary key

View File

@@ -574,6 +574,10 @@ func TestTypeMapper_SQLTypeToGoType_Bun(t *testing.T) {
{"boolean", false, "resolvespec_common.SqlBool"},
{"uuid", false, "resolvespec_common.SqlUUID"},
{"jsonb", false, "resolvespec_common.SqlJSONB"},
{"text[]", true, "resolvespec_common.SqlStringArray"},
{"text[]", false, "resolvespec_common.SqlStringArray"},
{"integer[]", true, "resolvespec_common.SqlInt32Array"},
{"bigint[]", false, "resolvespec_common.SqlInt64Array"},
}
for _, tt := range tests {
@@ -685,6 +689,24 @@ func TestTypeMapper_BuildBunTag(t *testing.T) {
},
want: []string{"id,", "type:bigserial,", "pk,", "autoincrement,"},
},
{
name: "text array type",
column: &models.Column{
Name: "tags",
Type: "text[]",
NotNull: false,
},
want: []string{"tags,", "type:text[],"},
},
{
name: "integer array type",
column: &models.Column{
Name: "scores",
Type: "integer[]",
NotNull: true,
},
want: []string{"scores,", "type:integer[],"},
},
}
for _, tt := range tests {
@@ -695,6 +717,30 @@ func TestTypeMapper_BuildBunTag(t *testing.T) {
t.Errorf("BuildBunTag() = %q, missing %q", result, part)
}
}
// resolvespec mode must NOT add "array" — SqlXxxArray uses sql.Scanner
if strings.Contains(result, ",array,") || strings.HasSuffix(result, ",array,") {
t.Errorf("BuildBunTag() = %q, must not contain 'array' in resolvespec mode", result)
}
})
}
}
func TestTypeMapper_BuildBunTag_StdlibArrayHasArrayTag(t *testing.T) {
mapper := NewTypeMapper(writers.NullableTypeStdlib)
cases := []struct {
name string
column *models.Column
}{
{name: "text array", column: &models.Column{Name: "tags", Type: "text[]"}},
{name: "integer array", column: &models.Column{Name: "scores", Type: "integer[]", NotNull: true}},
}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
result := mapper.BuildBunTag(tt.column, nil)
if !strings.Contains(result, "array") {
t.Errorf("BuildBunTag() = %q, expected 'array' in stdlib mode", result)
}
})
}
}

View File

@@ -658,6 +658,10 @@ func TestTypeMapper_SQLTypeToGoType(t *testing.T) {
{"timestamp", false, "sql_types.SqlTimeStamp"},
{"boolean", true, "bool"},
{"boolean", false, "sql_types.SqlBool"},
{"text[]", true, "sql_types.SqlStringArray"},
{"text[]", false, "sql_types.SqlStringArray"},
{"integer[]", true, "sql_types.SqlInt32Array"},
{"bigint[]", false, "sql_types.SqlInt64Array"},
}
for _, tt := range tests {
@@ -670,6 +674,21 @@ func TestTypeMapper_SQLTypeToGoType(t *testing.T) {
}
}
func TestTypeMapper_BuildGormTag_ArrayType(t *testing.T) {
mapper := NewTypeMapper("")
col := &models.Column{
Name: "tags",
Type: "text[]",
NotNull: false,
}
tag := mapper.BuildGormTag(col, nil)
if !strings.Contains(tag, "type:text[]") {
t.Fatalf("expected array type to be preserved, got %q", tag)
}
}
func TestTypeMapper_BuildGormTag_PreservesExplicitTypeModifiers(t *testing.T) {
mapper := NewTypeMapper("")

View File

@@ -1251,6 +1251,9 @@ func isIntegerType(colType string) bool {
func isTextType(colType string) bool {
textTypes := []string{"text", "varchar", "character varying", "char", "character", "string"}
lowerType := strings.ToLower(colType)
if strings.HasSuffix(lowerType, "[]") {
return false
}
for _, t := range textTypes {
if strings.HasPrefix(lowerType, t) {
return true

View File

@@ -87,6 +87,43 @@ func TestWriteDatabase(t *testing.T) {
}
}
func TestWriteDatabase_GinIndexOnTextArrayDoesNotUseTrigramOperatorClass(t *testing.T) {
db := models.InitDatabase("testdb")
schema := models.InitSchema("public")
table := models.InitTable("plans", "public")
tagsCol := models.InitColumn("tags", "plans", "public")
tagsCol.Type = "text[]"
table.Columns["tags"] = tagsCol
index := &models.Index{
Name: "idx_plans_tags",
Type: "gin",
Columns: []string{"tags"},
}
table.Indexes[index.Name] = index
schema.Tables = append(schema.Tables, table)
db.Schemas = append(db.Schemas, schema)
var buf bytes.Buffer
writer := NewWriter(&writers.WriterOptions{})
writer.writer = &buf
if err := writer.WriteDatabase(db); err != nil {
t.Fatalf("WriteDatabase failed: %v", err)
}
output := buf.String()
if !strings.Contains(output, `USING gin (tags)`) {
t.Fatalf("expected GIN index on array column without explicit trigram opclass, got:\n%s", output)
}
if strings.Contains(output, "gin_trgm_ops") {
t.Fatalf("did not expect gin_trgm_ops for text[] column, got:\n%s", output)
}
}
func TestWriteForeignKeys(t *testing.T) {
// Create a test database with two related tables
db := models.InitDatabase("testdb")

View File

@@ -61,7 +61,7 @@ func (w *Writer) databaseToPrisma(db *models.Database) string {
sb.WriteString("\n")
// Write generator block
sb.WriteString(w.generateGenerator())
sb.WriteString(w.generateGenerator(db))
sb.WriteString("\n")
// Process all schemas (typically just one in Prisma)
@@ -114,13 +114,28 @@ func (w *Writer) generateDatasource(db *models.Database) string {
}
// generateGenerator generates the generator block
func (w *Writer) generateGenerator() string {
func (w *Writer) generateGenerator(db *models.Database) string {
if w.usePrisma7Generator(db) {
return `generator client {
provider = "prisma-client"
output = "./generated"
}
`
}
return `generator client {
provider = "prisma-client-js"
}
`
}
func (w *Writer) usePrisma7Generator(db *models.Database) bool {
if w.options != nil && w.options.Prisma7 {
return true
}
return db != nil && db.SourceFormat == "prisma7"
}
// enumToPrisma converts an Enum to Prisma enum block
func (w *Writer) enumToPrisma(enum *models.Enum) string {
var sb strings.Builder

View File

@@ -0,0 +1,52 @@
package prisma
import (
"strings"
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
func TestGenerateGenerator_DefaultsToPrismaClientJS(t *testing.T) {
t.Parallel()
writer := NewWriter(&writers.WriterOptions{})
db := models.InitDatabase("testdb")
got := writer.generateGenerator(db)
if !strings.Contains(got, `provider = "prisma-client-js"`) {
t.Fatalf("expected prisma-client-js generator, got:\n%s", got)
}
if strings.Contains(got, `output = "./generated"`) {
t.Fatalf("did not expect prisma7 output path in default generator:\n%s", got)
}
}
func TestGenerateGenerator_Prisma7FlagUsesPrismaClient(t *testing.T) {
t.Parallel()
writer := NewWriter(&writers.WriterOptions{Prisma7: true})
db := models.InitDatabase("testdb")
got := writer.generateGenerator(db)
if !strings.Contains(got, `provider = "prisma-client"`) {
t.Fatalf("expected prisma-client generator, got:\n%s", got)
}
if !strings.Contains(got, `output = "./generated"`) {
t.Fatalf("expected prisma7 output path, got:\n%s", got)
}
}
func TestGenerateGenerator_Prisma7SourceFormatUsesPrismaClient(t *testing.T) {
t.Parallel()
writer := NewWriter(&writers.WriterOptions{})
db := models.InitDatabase("testdb")
db.SourceFormat = "prisma7"
got := writer.generateGenerator(db)
if !strings.Contains(got, `provider = "prisma-client"`) {
t.Fatalf("expected prisma-client generator from source format, got:\n%s", got)
}
}

View File

@@ -51,6 +51,9 @@ type WriterOptions struct {
// "stdlib" — database/sql (sql.NullString, sql.NullInt32, …)
NullableTypes string
// Prisma7 enables Prisma 7-specific output for Prisma writers.
Prisma7 bool
// Additional options can be added here as needed
Metadata map[string]interface{}
}
@@ -204,7 +207,8 @@ func quoteSQLLiteral(value string) string {
// - Returns a clean identifier safe for use in struct tags and field names
func SanitizeStructTagValue(value string) string {
// Remove DBML/DCTX style comments in brackets (e.g., [note: 'description'])
commentRegex := regexp.MustCompile(`\s*\[.*?\]\s*`)
// Require at least one character inside brackets to avoid stripping PostgreSQL array suffix "[]"
commentRegex := regexp.MustCompile(`\s*\[[^\]]+\]\s*`)
value = commentRegex.ReplaceAllString(value, "")
// Trim whitespace