Compare commits

...

10 Commits

Author SHA1 Message Date
Hein
8b906cf4a3 chore(release): update package version to 1.0.49
All checks were successful
Release / test (push) Successful in -32m39s
Release / release (push) Successful in -31m40s
Release / pkg-aur (push) Successful in -32m46s
Release / pkg-deb (push) Successful in -32m9s
Release / pkg-rpm (push) Successful in -29m53s
2026-04-30 18:16:28 +02:00
Hein
0a3966e6fc fix(pgsql): handle default values for array types in migrations
* update default value quoting logic for PostgreSQL
* add tests for array default value handling
2026-04-30 18:16:21 +02:00
Hein
d30fc24f55 chore(release): update package version to 1.0.48
All checks were successful
Release / pkg-deb (push) Successful in -32m6s
Release / test (push) Successful in -32m44s
Release / release (push) Successful in -32m5s
Release / pkg-aur (push) Successful in -32m38s
Release / pkg-rpm (push) Successful in -30m46s
2026-04-30 16:07:33 +02:00
Hein
16a489d0b8 style(pkg): align json and numeric type mappings 2026-04-30 16:07:16 +02:00
Hein
3524e86282 feat: add --types flag and stdlib nullable type support for bun/gorm writers
* Fix pgsql reader double-quoting defaults: normalizePostgresDefault strips
  surrounding SQL string literal quotes from column_default before storing,
  matching the convention used by every other reader.

* Add NullableTypes field to WriterOptions with NullableTypeResolveSpec
  (default) and NullableTypeStdlib constants.

* Both bun and gorm TypeMappers now accept a typeStyle parameter. stdlib
  mode produces sql.NullString/NullInt32/NullTime etc. for nullable scalars,
  plain Go slices for arrays, and time.Time for NOT NULL timestamps. Default
  resolvespec behaviour is unchanged.

* Add --types flag to convert and split commands.

* Update bun/README.md and gorm/README.md with side-by-side generated code
  examples, updated type mapping tables, and Writer Options documentation.
2026-04-30 16:00:54 +02:00
Hein
1e54fdcd7f Merge branch 'master' of git.warky.dev:wdevs/relspecgo 2026-04-30 15:15:34 +02:00
fb104ea084 feat: PostgreSQL connections opened by relspec set application_name by default to relspecgo/<version>
All checks were successful
Release / test (push) Successful in -31m41s
Release / release (push) Successful in -28m47s
Release / pkg-aur (push) Successful in -32m40s
Release / pkg-deb (push) Successful in -32m25s
Release / pkg-rpm (push) Successful in -28m30s
2026-04-26 17:48:26 +02:00
837160b77a feat(pgsql): implement application_name handling in connection 2026-04-26 17:45:25 +02:00
ed7130bba8 refactor(pkg): canonicalize base types and adjust length handling
* Update base types to keep explicit modifier forms
* Modify length handling for vector types in tests
2026-04-26 17:35:15 +02:00
Hein
3d9cc7ec58 .
All checks were successful
Release / Build and Release (push) Successful in -25m33s
2026-02-20 16:32:19 +02:00
33 changed files with 893 additions and 178 deletions

View File

@@ -42,6 +42,11 @@ relspec convert --from pgsql --from-conn "postgres://..." --to sqlite --to-path
relspec convert --from json --from-list "a.json,b.json" --to yaml --to-path merged.yaml
```
PostgreSQL connections opened by relspec set `application_name` by default to
`relspecgo/<version>` (with component suffixes internally, e.g. readers/writers).
If you need a custom value, provide `application_name` explicitly in the connection
string query parameters.
### `merge` — Additive schema merge (never modifies existing items)
```bash

View File

@@ -52,6 +52,7 @@ var (
convertPackageName string
convertSchemaFilter string
convertFlattenSchema bool
convertNullableTypes string
)
var convertCmd = &cobra.Command{
@@ -175,6 +176,7 @@ func init() {
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
convertCmd.Flags().BoolVar(&convertFlattenSchema, "flatten-schema", false, "Flatten schema.table names to schema_table (useful for databases like SQLite that do not support schemas)")
convertCmd.Flags().StringVar(&convertNullableTypes, "types", "", "Nullable type package for code-gen writers (bun/gorm): 'resolvespec' (default) or 'stdlib' (database/sql)")
err := convertCmd.MarkFlagRequired("from")
if err != nil {
@@ -241,7 +243,7 @@ func runConvert(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
}
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter, convertFlattenSchema); err != nil {
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter, convertFlattenSchema, convertNullableTypes); err != nil {
return fmt.Errorf("failed to write target: %w", err)
}
@@ -381,13 +383,14 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
return db, nil
}
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string, flattenSchema bool) error {
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string, flattenSchema bool, nullableTypes string) error {
var writer writers.Writer
writerOpts := &writers.WriterOptions{
OutputPath: outputPath,
PackageName: packageName,
FlattenSchema: flattenSchema,
NullableTypes: nullableTypes,
}
switch strings.ToLower(dbType) {

View File

@@ -22,6 +22,7 @@ var (
splitDatabaseName string
splitExcludeSchema string
splitExcludeTables string
splitNullableTypes string
)
var splitCmd = &cobra.Command{
@@ -110,6 +111,7 @@ func init() {
splitCmd.Flags().StringVar(&splitTables, "tables", "", "Comma-separated list of table names to include (case-insensitive)")
splitCmd.Flags().StringVar(&splitExcludeSchema, "exclude-schema", "", "Comma-separated list of schema names to exclude")
splitCmd.Flags().StringVar(&splitExcludeTables, "exclude-tables", "", "Comma-separated list of table names to exclude (case-insensitive)")
splitCmd.Flags().StringVar(&splitNullableTypes, "types", "", "Nullable type package for code-gen writers (bun/gorm): 'resolvespec' (default) or 'stdlib' (database/sql)")
err := splitCmd.MarkFlagRequired("from")
if err != nil {
@@ -185,6 +187,7 @@ func runSplit(cmd *cobra.Command, args []string) error {
splitPackageName,
"", // no schema filter for split
false, // no flatten-schema for split
splitNullableTypes,
)
if err != nil {
return fmt.Errorf("failed to write output: %w", err)

View File

@@ -1,6 +1,6 @@
# Maintainer: Hein (Warky Devs) <hein@warky.dev>
pkgname=relspec
pkgver=1.0.44
pkgver=1.0.49
pkgrel=1
pkgdesc="RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs."
arch=('x86_64' 'aarch64')

View File

@@ -1,5 +1,5 @@
Name: relspec
Version: 1.0.44
Version: 1.0.49
Release: 1%{?dist}
Summary: RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.

85
pkg/pgsql/connection.go Normal file
View File

@@ -0,0 +1,85 @@
package pgsql
import (
"context"
"fmt"
"runtime/debug"
"strings"
"github.com/jackc/pgx/v5"
)
const (
defaultApplicationPrefix = "relspecgo"
postgresIdentifierMaxLen = 63
)
// BuildApplicationName returns a PostgreSQL application_name in the form:
// relspecgo/<version>[:<component>]
func BuildApplicationName(component string) string {
appName := fmt.Sprintf("%s/%s", defaultApplicationPrefix, relspecVersion())
component = strings.TrimSpace(component)
if component != "" {
appName = appName + ":" + component
}
if len(appName) > postgresIdentifierMaxLen {
appName = appName[:postgresIdentifierMaxLen]
}
return appName
}
// ParseConfigWithApplicationName parses a connection string and applies a default
// application_name when one is not explicitly provided by the caller.
func ParseConfigWithApplicationName(connString, component string) (*pgx.ConnConfig, error) {
cfg, err := pgx.ParseConfig(connString)
if err != nil {
return nil, err
}
if cfg.RuntimeParams == nil {
cfg.RuntimeParams = map[string]string{}
}
if strings.TrimSpace(cfg.RuntimeParams["application_name"]) == "" {
cfg.RuntimeParams["application_name"] = BuildApplicationName(component)
}
return cfg, nil
}
// Connect establishes a PostgreSQL connection with a default relspec
// application_name when the caller does not provide one in the DSN.
func Connect(ctx context.Context, connString, component string) (*pgx.Conn, error) {
cfg, err := ParseConfigWithApplicationName(connString, component)
if err != nil {
return nil, err
}
return pgx.ConnectConfig(ctx, cfg)
}
func relspecVersion() string {
info, ok := debug.ReadBuildInfo()
if !ok {
return "dev"
}
version := strings.TrimSpace(info.Main.Version)
if version != "" && version != "(devel)" {
return version
}
for _, setting := range info.Settings {
if setting.Key == "vcs.revision" {
revision := strings.TrimSpace(setting.Value)
if len(revision) >= 7 {
return revision[:7]
}
if revision != "" {
return revision
}
}
}
return "dev"
}

View File

@@ -0,0 +1,53 @@
package pgsql
import (
"strings"
"testing"
)
func TestBuildApplicationName_IncludesVersion(t *testing.T) {
got := BuildApplicationName("")
if !strings.HasPrefix(got, "relspecgo/") {
t.Fatalf("BuildApplicationName() = %q, expected prefix relspecgo/", got)
}
}
func TestBuildApplicationName_IncludesComponent(t *testing.T) {
got := BuildApplicationName("reader-pgsql")
if !strings.Contains(got, ":reader-pgsql") {
t.Fatalf("BuildApplicationName(component) = %q, expected component suffix", got)
}
}
func TestBuildApplicationName_RespectsPostgresLengthLimit(t *testing.T) {
got := BuildApplicationName(strings.Repeat("x", 200))
if len(got) > 63 {
t.Fatalf("BuildApplicationName() length = %d, expected <= 63", len(got))
}
}
func TestParseConfigWithApplicationName_AddsWhenMissing(t *testing.T) {
cfg, err := ParseConfigWithApplicationName("postgres://user:pass@localhost:5432/db", "reader-pgsql")
if err != nil {
t.Fatalf("ParseConfigWithApplicationName() error = %v", err)
}
appName := cfg.RuntimeParams["application_name"]
if appName == "" {
t.Fatal("expected application_name to be set")
}
if !strings.HasPrefix(appName, "relspecgo/") {
t.Fatalf("application_name = %q, expected relspecgo/<version> prefix", appName)
}
}
func TestParseConfigWithApplicationName_PreservesExplicitValue(t *testing.T) {
cfg, err := ParseConfigWithApplicationName("postgres://user:pass@localhost:5432/db?application_name=custom-app", "reader-pgsql")
if err != nil {
t.Fatalf("ParseConfigWithApplicationName() error = %v", err)
}
if got := cfg.RuntimeParams["application_name"]; got != "custom-app" {
t.Fatalf("application_name = %q, expected %q", got, "custom-app")
}
}

View File

@@ -1,6 +1,9 @@
package pgsql
import "strings"
import (
"sort"
"strings"
)
// TypeSpec describes PostgreSQL type capabilities used by parsers/writers.
type TypeSpec struct {
@@ -106,9 +109,9 @@ var postgresBaseTypes = map[string]TypeSpec{
"ltree": {},
"lquery": {},
"ltxtquery": {},
"vector": {SupportsLength: true}, // pgvector: vector(dim)
"halfvec": {SupportsLength: true}, // pgvector: halfvec(dim)
"sparsevec": {SupportsLength: true}, // pgvector: sparsevec(dim)
"vector": {}, // pgvector: keep explicit modifier form (vector(dim))
"halfvec": {}, // pgvector: keep explicit modifier form (halfvec(dim))
"sparsevec": {}, // pgvector: keep explicit modifier form (sparsevec(dim))
}
var postgresTypeAliases = map[string]string{
@@ -148,6 +151,7 @@ func GetPostgresBaseTypes() []string {
for t := range postgresBaseTypes {
result = append(result, t)
}
sort.Strings(result)
return result
}

View File

@@ -53,7 +53,7 @@ func TestPostgresTypeRegistry_TypeParsingAndCapabilities(t *testing.T) {
wantBase: "vector",
wantCanonicalBase: "vector",
wantKnown: true,
wantLength: true,
wantLength: false,
},
{
input: "numeric(10,2)",

View File

@@ -711,6 +711,7 @@ func (r *Reader) parseTypeWithLength(typeStr string) (baseType string, length in
rawBaseType := strings.TrimSpace(matches[1])
if pgsql.SupportsLength(rawBaseType) {
if _, err := fmt.Sscanf(matches[2], "%d", &length); err == nil {
baseType = pgsql.CanonicalizeBaseType(rawBaseType)
return
}
}

View File

@@ -367,9 +367,9 @@ func TestParseTypeWithLength_PreservesExplicitTypeModifiers(t *testing.T) {
wantType string
wantLength int
}{
{"varchar(255)", "varchar(255)", 255},
{"character varying(120)", "character varying(120)", 120},
{"vector(1536)", "vector(1536)", 1536},
{"varchar(255)", "varchar", 255},
{"character varying(120)", "character varying", 120},
{"vector(1536)", "vector(1536)", 0},
{"numeric(10,2)", "numeric(10,2)", 0},
}

View File

@@ -664,7 +664,7 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
return column, constraint
}
func splitInlineComment(line string) (string, string) {
func splitInlineComment(line string) (content string, inlineComment string) {
commentStart := strings.Index(line, "//")
if commentStart == -1 {
return line, ""
@@ -673,7 +673,7 @@ func splitInlineComment(line string) (string, string) {
return strings.TrimSpace(line[:commentStart]), strings.TrimSpace(line[commentStart+2:])
}
func splitColumnSignatureAndAttrs(line string) (string, string) {
func splitColumnSignatureAndAttrs(line string) (signature string, attrs string) {
trimmed := strings.TrimSpace(line)
if trimmed == "" || !strings.HasSuffix(trimmed, "]") {
return trimmed, ""
@@ -699,7 +699,7 @@ func splitColumnSignatureAndAttrs(line string) (string, string) {
return trimmed, ""
}
func parseColumnSignature(signature string) (string, string, bool) {
func parseColumnSignature(signature string) (columnName string, columnType string, ok bool) {
signature = strings.TrimSpace(signature)
if signature == "" {
return "", "", false
@@ -726,8 +726,8 @@ func parseColumnSignature(signature string) (string, string, bool) {
return "", "", false
}
columnName := stripQuotes(strings.TrimSpace(signature[:splitAt]))
columnType := stripWrappingQuotes(strings.TrimSpace(signature[splitAt:]))
columnName = stripQuotes(strings.TrimSpace(signature[:splitAt]))
columnType = stripWrappingQuotes(strings.TrimSpace(signature[splitAt:]))
if columnName == "" || columnType == "" {
return "", "", false
}

View File

@@ -317,7 +317,7 @@ func TestConvertToColumn_PreservesExplicitTypeModifiers(t *testing.T) {
name: "custom vector modifier",
fieldType: "vector(1536)",
wantType: "vector(1536)",
wantLength: 1536,
wantLength: 0,
},
}

View File

@@ -677,19 +677,8 @@ func (r *Reader) extractTableFromGormTag(tag string) (tablename string, schemaNa
// deriveTableName derives a table name from struct name
func (r *Reader) deriveTableName(structName string) string {
// Remove "Model" prefix if present
name := strings.TrimPrefix(structName, "Model")
// Convert PascalCase to snake_case
var result strings.Builder
for i, r := range name {
if i > 0 && r >= 'A' && r <= 'Z' {
result.WriteRune('_')
}
result.WriteRune(r)
}
return strings.ToLower(result.String())
// Remove "Model" prefix if present, use the name as-is without transformation
return strings.TrimPrefix(structName, "Model")
}
// parseColumn parses a struct field into a Column model
@@ -804,6 +793,7 @@ func (r *Reader) parseTypeWithLength(typeStr string) (baseType string, length in
// This avoids converting custom modifiers like vector(1536) into Length.
if pgsql.SupportsLength(rawBaseType) && !strings.Contains(parens, ",") {
if _, err := fmt.Sscanf(parens, "%d", &length); err == nil {
baseType = pgsql.CanonicalizeBaseType(rawBaseType)
return
}
}

View File

@@ -374,9 +374,9 @@ func TestParseTypeWithLength_PreservesExplicitTypeModifiers(t *testing.T) {
wantType string
wantLength int
}{
{"varchar(255)", "varchar(255)", 255},
{"character varying(120)", "character varying(120)", 120},
{"vector(1536)", "vector(1536)", 1536},
{"varchar(255)", "varchar", 255},
{"character varying(120)", "character varying", 120},
{"vector(1536)", "vector(1536)", 0},
{"numeric(10,2)", "numeric(10,2)", 0},
}

View File

@@ -89,6 +89,10 @@ postgres://user@localhost/mydb?sslmode=disable
postgres://user:pass@db.example.com:5432/production?sslmode=require
```
By default, relspec sets `application_name` to `relspecgo/<version>` for PostgreSQL
sessions so they are identifiable in `pg_stat_activity`. If you provide
`application_name` in the connection string, your explicit value is preserved.
## Extracted Information
### Tables

View File

@@ -252,7 +252,7 @@ func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.
column.AutoIncrement = true
column.Default = defaultVal
} else {
column.Default = defaultVal
column.Default = normalizePostgresDefault(defaultVal)
}
}
@@ -613,3 +613,30 @@ func (r *Reader) parseIndexDefinition(indexName, tableName, schema, indexDef str
return index, nil
}
// normalizePostgresDefault converts a raw PostgreSQL column_default expression into the
// unquoted string value that the model convention expects. PostgreSQL stores string
// literal defaults as 'value' or 'value'::type (e.g. '{}'::text[]), while every other
// reader stores the bare value so the writer can re-quote it correctly.
func normalizePostgresDefault(defaultVal string) string {
if !strings.HasPrefix(defaultVal, "'") {
return defaultVal
}
// Decode the SQL string literal: skip the leading quote, unescape '' → ', stop at
// the first unescaped closing quote (any trailing ::cast is ignored).
rest := defaultVal[1:]
var buf strings.Builder
for i := 0; i < len(rest); i++ {
if rest[i] == '\'' {
if i+1 < len(rest) && rest[i+1] == '\'' {
buf.WriteByte('\'')
i++
} else {
break
}
} else {
buf.WriteByte(rest[i])
}
}
return buf.String()
}

View File

@@ -244,7 +244,7 @@ func (r *Reader) ReadTable() (*models.Table, error) {
// connect establishes a connection to the PostgreSQL database
func (r *Reader) connect() error {
conn, err := pgx.Connect(r.ctx, r.options.ConnectionString)
conn, err := pgsql.Connect(r.ctx, r.options.ConnectionString, "reader-pgsql")
if err != nil {
return err
}

View File

@@ -46,54 +46,67 @@ func main() {
### CLI Examples
```bash
# Generate Bun models from PostgreSQL database
relspec --input pgsql \
--conn "postgres://localhost/mydb" \
--output bun \
--out-file models.go \
--package models
# Generate Bun models from a DBML schema (default: resolvespec types)
relspec convert --from dbml --from-path schema.dbml \
--to bun --to-path models.go --package models
# Convert GORM models to Bun
relspec --input gorm --in-file gorm_models.go --output bun --out-file bun_models.go
# Use standard library database/sql nullable types instead of resolvespec
relspec convert --from dbml --from-path schema.dbml \
--to bun --to-path models.go --package models \
--types stdlib
# Multi-file output
relspec --input json --in-file schema.json --output bun --out-file models/
# Explicitly select resolvespec types (same as omitting --types)
relspec convert --from pgsql --from-conn "postgres://localhost/mydb" \
--to bun --to-path models.go --package models \
--types resolvespec
# Multi-file output (one file per table)
relspec convert --from json --from-path schema.json \
--to bun --to-path models/ --package models
```
## Generated Code Example
## Generated Code Examples
### Default — resolvespec types (`--types resolvespec`)
```go
package models
import (
"time"
"database/sql"
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
"github.com/uptrace/bun"
)
type User struct {
bun.BaseModel `bun:"table:users,alias:u"`
ID int64 `bun:"id,pk,autoincrement" json:"id"`
Username string `bun:"username,notnull,unique" json:"username"`
Email string `bun:"email,notnull" json:"email"`
Bio sql.NullString `bun:"bio" json:"bio,omitempty"`
CreatedAt time.Time `bun:"created_at,notnull,default:now()" json:"created_at"`
// Relationships
Posts []*Post `bun:"rel:has-many,join:id=user_id" json:"posts,omitempty"`
ID int64 `bun:"id,type:uuid,pk," json:"id"`
Username string `bun:"username,type:text,notnull," json:"username"`
Email resolvespec_common.SqlString `bun:"email,type:text,nullzero," json:"email"`
Tags resolvespec_common.SqlStringArray `bun:"tags,type:text[],default:'{}',notnull," json:"tags"`
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
}
```
type Post struct {
bun.BaseModel `bun:"table:posts,alias:p"`
### Standard library — `--types stdlib`
ID int64 `bun:"id,pk" json:"id"`
UserID int64 `bun:"user_id,notnull" json:"user_id"`
Title string `bun:"title,notnull" json:"title"`
Content sql.NullString `bun:"content" json:"content,omitempty"`
```go
package models
// Belongs to
User *User `bun:"rel:belongs-to,join:user_id=id" json:"user,omitempty"`
import (
"database/sql"
"time"
"github.com/uptrace/bun"
)
type User struct {
bun.BaseModel `bun:"table:users,alias:u"`
ID string `bun:"id,type:uuid,pk," json:"id"`
Username string `bun:"username,type:text,notnull," json:"username"`
Email sql.NullString `bun:"email,type:text,nullzero," json:"email"`
Tags []string `bun:"tags,type:text[],default:'{}',notnull," json:"tags"`
CreatedAt time.Time `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
}
```
@@ -111,19 +124,68 @@ type Post struct {
## Type Mapping
| SQL Type | Go Type | Nullable Type |
|----------|---------|---------------|
| bigint | int64 | sql.NullInt64 |
| integer | int | sql.NullInt32 |
| varchar, text | string | sql.NullString |
| boolean | bool | sql.NullBool |
| timestamp | time.Time | sql.NullTime |
| numeric | float64 | sql.NullFloat64 |
The nullable type package is selected with `--types` (or `WriterOptions.NullableTypes`).
| SQL Type | NOT NULL (both) | Nullable — resolvespec | Nullable — stdlib |
|---|---|---|---|
| `bigint` | `int64` | `SqlInt64` | `sql.NullInt64` |
| `integer` | `int32` | `SqlInt32` | `sql.NullInt32` |
| `smallint` | `int16` | `SqlInt16` | `sql.NullInt16` |
| `text`, `varchar` | `string` | `SqlString` | `sql.NullString` |
| `boolean` | `bool` | `SqlBool` | `sql.NullBool` |
| `timestamp`, `timestamptz` | `time.Time`* | `SqlTimeStamp` | `sql.NullTime` |
| `numeric`, `decimal` | `float64` | `SqlFloat64` | `sql.NullFloat64` |
| `uuid` | `string` | `SqlUUID` | `sql.NullString` |
| `jsonb` | `string` | `SqlJSONB` | `sql.NullString` |
| `text[]` | `SqlStringArray` | `SqlStringArray` | `[]string` |
| `integer[]` | `SqlInt32Array` | `SqlInt32Array` | `[]int32` |
| `uuid[]` | `SqlUUIDArray` | `SqlUUIDArray` | `[]string` |
| `vector` | `SqlVector` | `SqlVector` | `[]float32` |
\* In resolvespec mode, NOT NULL timestamps use `SqlTimeStamp` (not `time.Time`) unless the base type is a simple integer or boolean. In stdlib mode, NOT NULL timestamps use `time.Time`.
## Writer Options
### NullableTypes
Controls which Go package is used for nullable column types. Set via the `--types` CLI flag or `WriterOptions.NullableTypes`:
```go
// Use resolvespec types (default — omit NullableTypes or set to "resolvespec")
options := &writers.WriterOptions{
OutputPath: "models.go",
PackageName: "models",
NullableTypes: writers.NullableTypeResolveSpec,
}
// Use standard library database/sql types
options := &writers.WriterOptions{
OutputPath: "models.go",
PackageName: "models",
NullableTypes: writers.NullableTypeStdlib,
}
```
### Metadata Options
```go
options := &writers.WriterOptions{
OutputPath: "models.go",
PackageName: "models",
Metadata: map[string]any{
"multi_file": true, // Enable multi-file mode
"populate_refs": true, // Populate RefDatabase/RefSchema
"generate_get_id_str": true, // Generate GetIDStr() methods
},
}
```
## Notes
- Model names are derived from table names (singularized, PascalCase)
- Table aliases are auto-generated from table names
- Nullable columns use `resolvespec_common.SqlString`, `resolvespec_common.SqlTimeStamp`, etc. by default; pass `--types stdlib` to use `sql.NullString`, `sql.NullTime`, etc. instead
- Array columns use `resolvespec_common.SqlStringArray`, `resolvespec_common.SqlInt32Array`, etc. by default; `--types stdlib` produces plain Go slices (`[]string`, `[]int32`, …)
- Multi-file mode: one file per table named `sql_{schema}_{table}.go`
- Generated code is auto-formatted
- JSON tags are automatically added

View File

@@ -11,30 +11,43 @@ import (
// TypeMapper handles type conversions between SQL and Go types for Bun
type TypeMapper struct {
// Package alias for sql_types import
sqlTypesAlias string
typeStyle string // writers.NullableTypeResolveSpec | writers.NullableTypeStdlib
}
// NewTypeMapper creates a new TypeMapper with default settings
func NewTypeMapper() *TypeMapper {
// NewTypeMapper creates a new TypeMapper.
// typeStyle should be writers.NullableTypeResolveSpec or writers.NullableTypeStdlib;
// an empty string defaults to resolvespec.
func NewTypeMapper(typeStyle string) *TypeMapper {
if typeStyle == "" {
typeStyle = writers.NullableTypeResolveSpec
}
return &TypeMapper{
sqlTypesAlias: "resolvespec_common",
typeStyle: typeStyle,
}
}
// SQLTypeToGoType converts a SQL type to its Go equivalent
// Uses ResolveSpec common package types (all are nullable by default in Bun)
// SQLTypeToGoType converts a SQL type to its Go equivalent.
func (tm *TypeMapper) SQLTypeToGoType(sqlType string, notNull bool) string {
// Normalize SQL type (lowercase, remove length/precision)
// Array types are handled separately for both styles.
if pgsql.IsArrayType(sqlType) {
return tm.arrayGoType(tm.extractBaseType(sqlType))
}
baseType := tm.extractBaseType(sqlType)
// For Bun, we typically use resolvespec_common types for most fields
// unless they're explicitly NOT NULL and we want to avoid null handling
if tm.typeStyle == writers.NullableTypeStdlib {
if notNull {
return tm.rawGoType(baseType)
}
return tm.stdlibNullableGoType(baseType)
}
// resolvespec (default): use base Go types only for simple NOT NULL fields.
if notNull && tm.isSimpleType(baseType) {
return tm.baseGoType(baseType)
}
// Use resolvespec_common types for nullable fields
return tm.bunGoType(baseType)
}
@@ -154,6 +167,9 @@ func (tm *TypeMapper) bunGoType(sqlType string) string {
// Other
"money": tm.sqlTypesAlias + ".SqlFloat64",
// pgvector
"vector": tm.sqlTypesAlias + ".SqlVector",
}
if goType, ok := typeMap[sqlType]; ok {
@@ -164,6 +180,123 @@ func (tm *TypeMapper) bunGoType(sqlType string) string {
return tm.sqlTypesAlias + ".SqlString"
}
// arrayGoType returns the Go type for a PostgreSQL array column.
// The baseElemType is the canonical base type (e.g. "text", "integer").
func (tm *TypeMapper) arrayGoType(baseElemType string) string {
if tm.typeStyle == writers.NullableTypeStdlib {
return tm.stdlibArrayGoType(baseElemType)
}
typeMap := map[string]string{
"text": tm.sqlTypesAlias + ".SqlStringArray", "varchar": tm.sqlTypesAlias + ".SqlStringArray",
"char": tm.sqlTypesAlias + ".SqlStringArray", "character": tm.sqlTypesAlias + ".SqlStringArray",
"citext": tm.sqlTypesAlias + ".SqlStringArray", "bpchar": tm.sqlTypesAlias + ".SqlStringArray",
"inet": tm.sqlTypesAlias + ".SqlStringArray", "cidr": tm.sqlTypesAlias + ".SqlStringArray",
"macaddr": tm.sqlTypesAlias + ".SqlStringArray",
"json": tm.sqlTypesAlias + ".SqlStringArray", "jsonb": tm.sqlTypesAlias + ".SqlStringArray",
"integer": tm.sqlTypesAlias + ".SqlInt32Array", "int": tm.sqlTypesAlias + ".SqlInt32Array",
"int4": tm.sqlTypesAlias + ".SqlInt32Array", "serial": tm.sqlTypesAlias + ".SqlInt32Array",
"smallint": tm.sqlTypesAlias + ".SqlInt16Array", "int2": tm.sqlTypesAlias + ".SqlInt16Array",
"smallserial": tm.sqlTypesAlias + ".SqlInt16Array",
"bigint": tm.sqlTypesAlias + ".SqlInt64Array", "int8": tm.sqlTypesAlias + ".SqlInt64Array",
"bigserial": tm.sqlTypesAlias + ".SqlInt64Array",
"real": tm.sqlTypesAlias + ".SqlFloat32Array", "float4": tm.sqlTypesAlias + ".SqlFloat32Array",
"double precision": tm.sqlTypesAlias + ".SqlFloat64Array", "float8": tm.sqlTypesAlias + ".SqlFloat64Array",
"numeric": tm.sqlTypesAlias + ".SqlFloat64Array", "decimal": tm.sqlTypesAlias + ".SqlFloat64Array",
"money": tm.sqlTypesAlias + ".SqlFloat64Array",
"boolean": tm.sqlTypesAlias + ".SqlBoolArray", "bool": tm.sqlTypesAlias + ".SqlBoolArray",
"uuid": tm.sqlTypesAlias + ".SqlUUIDArray",
}
if goType, ok := typeMap[baseElemType]; ok {
return goType
}
return tm.sqlTypesAlias + ".SqlStringArray"
}
// rawGoType returns the plain Go type for a NOT NULL column in stdlib mode.
func (tm *TypeMapper) rawGoType(sqlType string) string {
typeMap := map[string]string{
"integer": "int32", "int": "int32", "int4": "int32", "serial": "int32",
"smallint": "int16", "int2": "int16", "smallserial": "int16",
"bigint": "int64", "int8": "int64", "bigserial": "int64",
"boolean": "bool", "bool": "bool",
"real": "float32", "float4": "float32",
"double precision": "float64", "float8": "float64",
"numeric": "float64", "decimal": "float64", "money": "float64",
"text": "string", "varchar": "string", "char": "string",
"character": "string", "citext": "string", "bpchar": "string",
"inet": "string", "cidr": "string", "macaddr": "string",
"uuid": "string", "json": "string", "jsonb": "string",
"timestamp": "time.Time",
"timestamp without time zone": "time.Time",
"timestamp with time zone": "time.Time",
"timestamptz": "time.Time",
"date": "time.Time",
"time": "time.Time",
"time without time zone": "time.Time",
"time with time zone": "time.Time",
"timetz": "time.Time",
"bytea": "[]byte",
"vector": "[]float32",
}
if goType, ok := typeMap[sqlType]; ok {
return goType
}
return "string"
}
// stdlibNullableGoType returns the database/sql nullable type for a column.
func (tm *TypeMapper) stdlibNullableGoType(sqlType string) string {
typeMap := map[string]string{
"integer": "sql.NullInt32", "int": "sql.NullInt32", "int4": "sql.NullInt32", "serial": "sql.NullInt32",
"smallint": "sql.NullInt16", "int2": "sql.NullInt16", "smallserial": "sql.NullInt16",
"bigint": "sql.NullInt64", "int8": "sql.NullInt64", "bigserial": "sql.NullInt64",
"boolean": "sql.NullBool", "bool": "sql.NullBool",
"real": "sql.NullFloat64", "float4": "sql.NullFloat64",
"double precision": "sql.NullFloat64", "float8": "sql.NullFloat64",
"numeric": "sql.NullFloat64", "decimal": "sql.NullFloat64", "money": "sql.NullFloat64",
"text": "sql.NullString", "varchar": "sql.NullString", "char": "sql.NullString",
"character": "sql.NullString", "citext": "sql.NullString", "bpchar": "sql.NullString",
"inet": "sql.NullString", "cidr": "sql.NullString", "macaddr": "sql.NullString",
"uuid": "sql.NullString", "json": "sql.NullString", "jsonb": "sql.NullString",
"timestamp": "sql.NullTime",
"timestamp without time zone": "sql.NullTime",
"timestamp with time zone": "sql.NullTime",
"timestamptz": "sql.NullTime",
"date": "sql.NullTime",
"time": "sql.NullTime",
"time without time zone": "sql.NullTime",
"time with time zone": "sql.NullTime",
"timetz": "sql.NullTime",
"bytea": "[]byte",
"vector": "[]float32",
}
if goType, ok := typeMap[sqlType]; ok {
return goType
}
return "sql.NullString"
}
// stdlibArrayGoType returns a plain Go slice type for array columns in stdlib mode.
func (tm *TypeMapper) stdlibArrayGoType(baseElemType string) string {
typeMap := map[string]string{
"text": "[]string", "varchar": "[]string", "char": "[]string",
"character": "[]string", "citext": "[]string", "bpchar": "[]string",
"inet": "[]string", "cidr": "[]string", "macaddr": "[]string",
"uuid": "[]string", "json": "[]string", "jsonb": "[]string",
"integer": "[]int32", "int": "[]int32", "int4": "[]int32", "serial": "[]int32",
"smallint": "[]int16", "int2": "[]int16", "smallserial": "[]int16",
"bigint": "[]int64", "int8": "[]int64", "bigserial": "[]int64",
"real": "[]float32", "float4": "[]float32",
"double precision": "[]float64", "float8": "[]float64",
"numeric": "[]float64", "decimal": "[]float64", "money": "[]float64",
"boolean": "[]bool", "bool": "[]bool",
}
if goType, ok := typeMap[baseElemType]; ok {
return goType
}
return "[]string"
}
// BuildBunTag generates a complete Bun tag string for a column
// Bun format: bun:"column_name,type:type_name,pk,default:value"
func (tm *TypeMapper) BuildBunTag(column *models.Column, table *models.Table) string {
@@ -286,11 +419,20 @@ func (tm *TypeMapper) NeedsFmtImport(generateGetIDStr bool) bool {
return generateGetIDStr
}
// GetSQLTypesImport returns the import path for sql_types (ResolveSpec common)
// GetSQLTypesImport returns the import path for the ResolveSpec spectypes package.
func (tm *TypeMapper) GetSQLTypesImport() string {
return "github.com/bitechdev/ResolveSpec/pkg/spectypes"
}
// GetNullableTypeImportLine returns the full Go import line for the nullable type
// package (ready to pass to AddImport). Returns empty string when no import is needed.
func (tm *TypeMapper) GetNullableTypeImportLine() string {
if tm.typeStyle == writers.NullableTypeStdlib {
return "\"database/sql\""
}
return fmt.Sprintf("%s \"%s\"", tm.sqlTypesAlias, tm.GetSQLTypesImport())
}
// GetBunImport returns the import path for Bun
func (tm *TypeMapper) GetBunImport() string {
return "github.com/uptrace/bun"

View File

@@ -24,7 +24,7 @@ type Writer struct {
func NewWriter(options *writers.WriterOptions) *Writer {
w := &Writer{
options: options,
typeMapper: NewTypeMapper(),
typeMapper: NewTypeMapper(options.NullableTypes),
config: LoadMethodConfigFromMetadata(options.Metadata),
}
@@ -80,8 +80,8 @@ func (w *Writer) writeSingleFile(db *models.Database) error {
// Add bun import (always needed)
templateData.AddImport(fmt.Sprintf("\"%s\"", w.typeMapper.GetBunImport()))
// Add resolvespec_common import (always needed for nullable types)
templateData.AddImport(fmt.Sprintf("resolvespec_common \"%s\"", w.typeMapper.GetSQLTypesImport()))
// Add nullable types import (resolvespec or stdlib depending on options)
templateData.AddImport(w.typeMapper.GetNullableTypeImportLine())
// Collect all models
for _, schema := range db.Schemas {
@@ -177,8 +177,8 @@ func (w *Writer) writeMultiFile(db *models.Database) error {
// Add bun import
templateData.AddImport(fmt.Sprintf("\"%s\"", w.typeMapper.GetBunImport()))
// Add resolvespec_common import
templateData.AddImport(fmt.Sprintf("resolvespec_common \"%s\"", w.typeMapper.GetSQLTypesImport()))
// Add nullable types import (resolvespec or stdlib depending on options)
templateData.AddImport(w.typeMapper.GetNullableTypeImportLine())
// Create model data
modelData := NewModelData(table, schema.Name, w.typeMapper, w.options.FlattenSchema)

View File

@@ -556,7 +556,7 @@ func TestWriter_FieldNameCollision(t *testing.T) {
}
func TestTypeMapper_SQLTypeToGoType_Bun(t *testing.T) {
mapper := NewTypeMapper()
mapper := NewTypeMapper("")
tests := []struct {
sqlType string
@@ -587,7 +587,7 @@ func TestTypeMapper_SQLTypeToGoType_Bun(t *testing.T) {
}
func TestTypeMapper_BuildBunTag(t *testing.T) {
mapper := NewTypeMapper()
mapper := NewTypeMapper("")
tests := []struct {
name string
@@ -700,7 +700,7 @@ func TestTypeMapper_BuildBunTag(t *testing.T) {
}
func TestTypeMapper_BuildBunTag_PreservesExplicitTypeModifiers(t *testing.T) {
mapper := NewTypeMapper()
mapper := NewTypeMapper("")
col := &models.Column{
Name: "embedding",

View File

@@ -48,22 +48,23 @@ func main() {
### CLI Examples
```bash
# Generate GORM models from PostgreSQL database (single file)
relspec --input pgsql \
--conn "postgres://localhost/mydb" \
--output gorm \
--out-file models.go \
--package models
# Generate GORM models from a DBML schema (default: resolvespec types)
relspec convert --from dbml --from-path schema.dbml \
--to gorm --to-path models.go --package models
# Generate GORM models with multi-file output (one file per table)
relspec --input json \
--in-file schema.json \
--output gorm \
--out-file models/ \
--package models
# Use standard library database/sql nullable types instead of resolvespec
relspec convert --from dbml --from-path schema.dbml \
--to gorm --to-path models.go --package models \
--types stdlib
# Convert DBML to GORM models
relspec --input dbml --in-file schema.dbml --output gorm --out-file models.go
# Explicitly select resolvespec types (same as omitting --types)
relspec convert --from pgsql --from-conn "postgres://localhost/mydb" \
--to gorm --to-path models.go --package models \
--types resolvespec
# Multi-file output (one file per table)
relspec convert --from json --from-path schema.json \
--to gorm --to-path models/ --package models
```
## Output Modes
@@ -86,58 +87,86 @@ relspec --input pgsql --conn "..." --output gorm --out-file models/
Files are named: `sql_{schema}_{table}.go`
## Generated Code Example
## Generated Code Examples
### Default — resolvespec types (`--types resolvespec`)
```go
package models
import (
"time"
sql_types "git.warky.dev/wdevs/sql_types"
sql_types "github.com/bitechdev/ResolveSpec/pkg/spectypes"
)
type ModelUser struct {
ID int64 `gorm:"column:id;type:bigint;primaryKey;autoIncrement" json:"id"`
Username string `gorm:"column:username;type:varchar(50);not null;uniqueIndex" json:"username"`
Email string `gorm:"column:email;type:varchar(100);not null" json:"email"`
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;not null;default:now()" json:"created_at"`
// Relationships
Pos []*ModelPost `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE" json:"pos,omitempty"`
ID string `gorm:"column:id;type:uuid;primaryKey" json:"id"`
Username string `gorm:"column:username;type:text;not null" json:"username"`
Email sql_types.SqlString `gorm:"column:email;type:text" json:"email,omitempty"`
Tags sql_types.SqlStringArray `gorm:"column:tags;type:text[];not null;default:'{}'" json:"tags"`
CreatedAt sql_types.SqlTimeStamp `gorm:"column:created_at;type:timestamptz;not null;default:now()" json:"created_at"`
}
func (ModelUser) TableName() string {
return "public.users"
}
```
type ModelPost struct {
ID int64 `gorm:"column:id;type:bigint;primaryKey" json:"id"`
UserID int64 `gorm:"column:user_id;type:bigint;not null" json:"user_id"`
Title string `gorm:"column:title;type:varchar(200);not null" json:"title"`
Content sql_types.SqlString `gorm:"column:content;type:text" json:"content,omitempty"`
### Standard library — `--types stdlib`
// Belongs to
Use *ModelUser `gorm:"foreignKey:UserID;references:ID" json:"use,omitempty"`
```go
package models
import (
"database/sql"
"time"
)
type ModelUser struct {
ID string `gorm:"column:id;type:uuid;primaryKey" json:"id"`
Username string `gorm:"column:username;type:text;not null" json:"username"`
Email sql.NullString `gorm:"column:email;type:text" json:"email,omitempty"`
Tags []string `gorm:"column:tags;type:text[];not null;default:'{}'" json:"tags"`
CreatedAt time.Time `gorm:"column:created_at;type:timestamptz;not null;default:now()" json:"created_at"`
}
func (ModelPost) TableName() string {
return "public.posts"
func (ModelUser) TableName() string {
return "public.users"
}
```
## Writer Options
### NullableTypes
Controls which Go package is used for nullable column types. Set via the `--types` CLI flag or `WriterOptions.NullableTypes`:
```go
// Use resolvespec types (default — omit NullableTypes or set to "resolvespec")
options := &writers.WriterOptions{
OutputPath: "models.go",
PackageName: "models",
NullableTypes: writers.NullableTypeResolveSpec,
}
// Use standard library database/sql types
options := &writers.WriterOptions{
OutputPath: "models.go",
PackageName: "models",
NullableTypes: writers.NullableTypeStdlib,
}
```
### Metadata Options
Configure the writer behavior using metadata in `WriterOptions`:
Configure additional writer behavior using metadata in `WriterOptions`:
```go
options := &writers.WriterOptions{
OutputPath: "models.go",
PackageName: "models",
Metadata: map[string]interface{}{
"multi_file": true, // Enable multi-file mode
"populate_refs": true, // Populate RefDatabase/RefSchema
Metadata: map[string]any{
"multi_file": true, // Enable multi-file mode
"populate_refs": true, // Populate RefDatabase/RefSchema
"generate_get_id_str": true, // Generate GetIDStr() methods
},
}
@@ -145,18 +174,23 @@ options := &writers.WriterOptions{
## Type Mapping
| SQL Type | Go Type | Notes |
|----------|---------|-------|
| bigint, int8 | int64 | - |
| integer, int, int4 | int | - |
| smallint, int2 | int16 | - |
| varchar, text | string | Not nullable |
| varchar, text (nullable) | sql_types.SqlString | Nullable |
| boolean, bool | bool | - |
| timestamp, timestamptz | time.Time | - |
| numeric, decimal | float64 | - |
| uuid | string | - |
| json, jsonb | string | - |
The nullable type package is selected with `--types` (or `WriterOptions.NullableTypes`).
| SQL Type | NOT NULL — both | Nullable — resolvespec | Nullable — stdlib |
|---|---|---|---|
| `bigint` | `int64` | `SqlInt64` | `sql.NullInt64` |
| `integer` | `int32` | `SqlInt32` | `sql.NullInt32` |
| `smallint` | `int16` | `SqlInt16` | `sql.NullInt16` |
| `text`, `varchar` | `string` | `SqlString` | `sql.NullString` |
| `boolean` | `bool` | `SqlBool` | `sql.NullBool` |
| `timestamp`, `timestamptz` | `time.Time` | `SqlTimeStamp` | `sql.NullTime` |
| `numeric`, `decimal` | `float64` | `SqlFloat64` | `sql.NullFloat64` |
| `uuid` | `string` | `SqlUUID` | `sql.NullString` |
| `jsonb` | `string` | `SqlString` | `sql.NullString` |
| `text[]` | `SqlStringArray` | `SqlStringArray` | `[]string` |
| `integer[]` | `SqlInt32Array` | `SqlInt32Array` | `[]int32` |
| `uuid[]` | `SqlUUIDArray` | `SqlUUIDArray` | `[]string` |
| `vector` | `SqlVector` | `SqlVector` | `[]float32` |
## Relationship Generation
@@ -170,7 +204,8 @@ The writer automatically generates relationship fields:
## Notes
- Model names are prefixed with "Model" (e.g., `ModelUser`)
- Nullable columns use `sql_types.SqlString`, `sql_types.SqlInt64`, etc.
- Nullable columns use `sql_types.SqlString`, `sql_types.SqlInt64`, etc. by default; pass `--types stdlib` to use `sql.NullString`, `sql.NullInt64`, etc. instead
- Array columns use `sql_types.SqlStringArray`, `sql_types.SqlInt32Array`, etc. by default; `--types stdlib` produces plain Go slices (`[]string`, `[]int32`, …)
- Generated code is auto-formatted with `go fmt`
- JSON tags are automatically added
- Supports schema-qualified table names in `TableName()` method

View File

@@ -11,29 +11,43 @@ import (
// TypeMapper handles type conversions between SQL and Go types
type TypeMapper struct {
// Package alias for sql_types import
sqlTypesAlias string
typeStyle string // writers.NullableTypeResolveSpec | writers.NullableTypeStdlib
}
// NewTypeMapper creates a new TypeMapper with default settings
func NewTypeMapper() *TypeMapper {
// NewTypeMapper creates a new TypeMapper.
// typeStyle should be writers.NullableTypeResolveSpec or writers.NullableTypeStdlib;
// an empty string defaults to resolvespec.
func NewTypeMapper(typeStyle string) *TypeMapper {
if typeStyle == "" {
typeStyle = writers.NullableTypeResolveSpec
}
return &TypeMapper{
sqlTypesAlias: "sql_types",
typeStyle: typeStyle,
}
}
// SQLTypeToGoType converts a SQL type to its Go equivalent
// Handles nullable types using ResolveSpec sql_types package
// SQLTypeToGoType converts a SQL type to its Go equivalent.
func (tm *TypeMapper) SQLTypeToGoType(sqlType string, notNull bool) string {
// Normalize SQL type (lowercase, remove length/precision)
// Array types are handled separately for both styles.
if pgsql.IsArrayType(sqlType) {
return tm.arrayGoType(tm.extractBaseType(sqlType))
}
baseType := tm.extractBaseType(sqlType)
// If not null, use base Go types
if tm.typeStyle == writers.NullableTypeStdlib {
if notNull {
return tm.rawGoType(baseType)
}
return tm.stdlibNullableGoType(baseType)
}
// resolvespec (default)
if notNull {
return tm.baseGoType(baseType)
}
// For nullable fields, use sql_types
return tm.nullableGoType(baseType)
}
@@ -106,6 +120,9 @@ func (tm *TypeMapper) baseGoType(sqlType string) string {
// Other
"money": "float64",
// pgvector — always uses SqlVector even when NOT NULL
"vector": tm.sqlTypesAlias + ".SqlVector",
}
if goType, ok := typeMap[sqlType]; ok {
@@ -179,6 +196,9 @@ func (tm *TypeMapper) nullableGoType(sqlType string) string {
// Other
"money": tm.sqlTypesAlias + ".SqlFloat64",
// pgvector
"vector": tm.sqlTypesAlias + ".SqlVector",
}
if goType, ok := typeMap[sqlType]; ok {
@@ -189,6 +209,123 @@ func (tm *TypeMapper) nullableGoType(sqlType string) string {
return tm.sqlTypesAlias + ".SqlString"
}
// arrayGoType returns the Go type for a PostgreSQL array column.
// The baseElemType is the canonical base type (e.g. "text", "integer").
func (tm *TypeMapper) arrayGoType(baseElemType string) string {
if tm.typeStyle == writers.NullableTypeStdlib {
return tm.stdlibArrayGoType(baseElemType)
}
typeMap := map[string]string{
"text": tm.sqlTypesAlias + ".SqlStringArray", "varchar": tm.sqlTypesAlias + ".SqlStringArray",
"char": tm.sqlTypesAlias + ".SqlStringArray", "character": tm.sqlTypesAlias + ".SqlStringArray",
"citext": tm.sqlTypesAlias + ".SqlStringArray", "bpchar": tm.sqlTypesAlias + ".SqlStringArray",
"inet": tm.sqlTypesAlias + ".SqlStringArray", "cidr": tm.sqlTypesAlias + ".SqlStringArray",
"macaddr": tm.sqlTypesAlias + ".SqlStringArray",
"json": tm.sqlTypesAlias + ".SqlStringArray", "jsonb": tm.sqlTypesAlias + ".SqlStringArray",
"integer": tm.sqlTypesAlias + ".SqlInt32Array", "int": tm.sqlTypesAlias + ".SqlInt32Array",
"int4": tm.sqlTypesAlias + ".SqlInt32Array", "serial": tm.sqlTypesAlias + ".SqlInt32Array",
"smallint": tm.sqlTypesAlias + ".SqlInt16Array", "int2": tm.sqlTypesAlias + ".SqlInt16Array",
"smallserial": tm.sqlTypesAlias + ".SqlInt16Array",
"bigint": tm.sqlTypesAlias + ".SqlInt64Array", "int8": tm.sqlTypesAlias + ".SqlInt64Array",
"bigserial": tm.sqlTypesAlias + ".SqlInt64Array",
"real": tm.sqlTypesAlias + ".SqlFloat32Array", "float4": tm.sqlTypesAlias + ".SqlFloat32Array",
"double precision": tm.sqlTypesAlias + ".SqlFloat64Array", "float8": tm.sqlTypesAlias + ".SqlFloat64Array",
"numeric": tm.sqlTypesAlias + ".SqlFloat64Array", "decimal": tm.sqlTypesAlias + ".SqlFloat64Array",
"money": tm.sqlTypesAlias + ".SqlFloat64Array",
"boolean": tm.sqlTypesAlias + ".SqlBoolArray", "bool": tm.sqlTypesAlias + ".SqlBoolArray",
"uuid": tm.sqlTypesAlias + ".SqlUUIDArray",
}
if goType, ok := typeMap[baseElemType]; ok {
return goType
}
return tm.sqlTypesAlias + ".SqlStringArray"
}
// rawGoType returns the plain Go type for a NOT NULL column in stdlib mode.
func (tm *TypeMapper) rawGoType(sqlType string) string {
typeMap := map[string]string{
"integer": "int32", "int": "int32", "int4": "int32", "serial": "int32",
"smallint": "int16", "int2": "int16", "smallserial": "int16",
"bigint": "int64", "int8": "int64", "bigserial": "int64",
"boolean": "bool", "bool": "bool",
"real": "float32", "float4": "float32",
"double precision": "float64", "float8": "float64",
"numeric": "float64", "decimal": "float64", "money": "float64",
"text": "string", "varchar": "string", "char": "string",
"character": "string", "citext": "string", "bpchar": "string",
"inet": "string", "cidr": "string", "macaddr": "string",
"uuid": "string", "json": "string", "jsonb": "string",
"timestamp": "time.Time",
"timestamp without time zone": "time.Time",
"timestamp with time zone": "time.Time",
"timestamptz": "time.Time",
"date": "time.Time",
"time": "time.Time",
"time without time zone": "time.Time",
"time with time zone": "time.Time",
"timetz": "time.Time",
"bytea": "[]byte",
"vector": "[]float32",
}
if goType, ok := typeMap[sqlType]; ok {
return goType
}
return "string"
}
// stdlibNullableGoType returns the database/sql nullable type for a column.
func (tm *TypeMapper) stdlibNullableGoType(sqlType string) string {
typeMap := map[string]string{
"integer": "sql.NullInt32", "int": "sql.NullInt32", "int4": "sql.NullInt32", "serial": "sql.NullInt32",
"smallint": "sql.NullInt16", "int2": "sql.NullInt16", "smallserial": "sql.NullInt16",
"bigint": "sql.NullInt64", "int8": "sql.NullInt64", "bigserial": "sql.NullInt64",
"boolean": "sql.NullBool", "bool": "sql.NullBool",
"real": "sql.NullFloat64", "float4": "sql.NullFloat64",
"double precision": "sql.NullFloat64", "float8": "sql.NullFloat64",
"numeric": "sql.NullFloat64", "decimal": "sql.NullFloat64", "money": "sql.NullFloat64",
"text": "sql.NullString", "varchar": "sql.NullString", "char": "sql.NullString",
"character": "sql.NullString", "citext": "sql.NullString", "bpchar": "sql.NullString",
"inet": "sql.NullString", "cidr": "sql.NullString", "macaddr": "sql.NullString",
"uuid": "sql.NullString", "json": "sql.NullString", "jsonb": "sql.NullString",
"timestamp": "sql.NullTime",
"timestamp without time zone": "sql.NullTime",
"timestamp with time zone": "sql.NullTime",
"timestamptz": "sql.NullTime",
"date": "sql.NullTime",
"time": "sql.NullTime",
"time without time zone": "sql.NullTime",
"time with time zone": "sql.NullTime",
"timetz": "sql.NullTime",
"bytea": "[]byte",
"vector": "[]float32",
}
if goType, ok := typeMap[sqlType]; ok {
return goType
}
return "sql.NullString"
}
// stdlibArrayGoType returns a plain Go slice type for array columns in stdlib mode.
func (tm *TypeMapper) stdlibArrayGoType(baseElemType string) string {
typeMap := map[string]string{
"text": "[]string", "varchar": "[]string", "char": "[]string",
"character": "[]string", "citext": "[]string", "bpchar": "[]string",
"inet": "[]string", "cidr": "[]string", "macaddr": "[]string",
"uuid": "[]string", "json": "[]string", "jsonb": "[]string",
"integer": "[]int32", "int": "[]int32", "int4": "[]int32", "serial": "[]int32",
"smallint": "[]int16", "int2": "[]int16", "smallserial": "[]int16",
"bigint": "[]int64", "int8": "[]int64", "bigserial": "[]int64",
"real": "[]float32", "float4": "[]float32",
"double precision": "[]float64", "float8": "[]float64",
"numeric": "[]float64", "decimal": "[]float64", "money": "[]float64",
"boolean": "[]bool", "bool": "[]bool",
}
if goType, ok := typeMap[baseElemType]; ok {
return goType
}
return "[]string"
}
// BuildGormTag generates a complete GORM tag string for a column
func (tm *TypeMapper) BuildGormTag(column *models.Column, table *models.Table) string {
var parts []string
@@ -330,7 +467,16 @@ func (tm *TypeMapper) NeedsFmtImport(generateGetIDStr bool) bool {
return generateGetIDStr
}
// GetSQLTypesImport returns the import path for sql_types
// GetSQLTypesImport returns the import path for the ResolveSpec spectypes package.
func (tm *TypeMapper) GetSQLTypesImport() string {
return "github.com/bitechdev/ResolveSpec/pkg/spectypes"
}
// GetNullableTypeImportLine returns the full Go import line for the nullable type
// package (ready to pass to AddImport). Returns empty string when no import is needed.
func (tm *TypeMapper) GetNullableTypeImportLine() string {
if tm.typeStyle == writers.NullableTypeStdlib {
return "\"database/sql\""
}
return fmt.Sprintf("%s \"%s\"", tm.sqlTypesAlias, tm.GetSQLTypesImport())
}

View File

@@ -24,7 +24,7 @@ type Writer struct {
func NewWriter(options *writers.WriterOptions) *Writer {
w := &Writer{
options: options,
typeMapper: NewTypeMapper(),
typeMapper: NewTypeMapper(options.NullableTypes),
config: LoadMethodConfigFromMetadata(options.Metadata),
}
@@ -77,8 +77,8 @@ func (w *Writer) writeSingleFile(db *models.Database) error {
packageName := w.getPackageName()
templateData := NewTemplateData(packageName, w.config)
// Add sql_types import (always needed for nullable types)
templateData.AddImport(fmt.Sprintf("sql_types \"%s\"", w.typeMapper.GetSQLTypesImport()))
// Add nullable types import (resolvespec or stdlib depending on options)
templateData.AddImport(w.typeMapper.GetNullableTypeImportLine())
// Collect all models
for _, schema := range db.Schemas {
@@ -171,8 +171,8 @@ func (w *Writer) writeMultiFile(db *models.Database) error {
// Create template data for this single table
templateData := NewTemplateData(packageName, w.config)
// Add sql_types import
templateData.AddImport(fmt.Sprintf("sql_types \"%s\"", w.typeMapper.GetSQLTypesImport()))
// Add nullable types import (resolvespec or stdlib depending on options)
templateData.AddImport(w.typeMapper.GetNullableTypeImportLine())
// Create model data
modelData := NewModelData(table, schema.Name, w.typeMapper, w.options.FlattenSchema)

View File

@@ -643,7 +643,7 @@ func TestNameConverter_Pluralize(t *testing.T) {
}
func TestTypeMapper_SQLTypeToGoType(t *testing.T) {
mapper := NewTypeMapper()
mapper := NewTypeMapper("")
tests := []struct {
sqlType string
@@ -671,7 +671,7 @@ func TestTypeMapper_SQLTypeToGoType(t *testing.T) {
}
func TestTypeMapper_BuildGormTag_PreservesExplicitTypeModifiers(t *testing.T) {
mapper := NewTypeMapper()
mapper := NewTypeMapper("")
col := &models.Column{
Name: "embedding",

View File

@@ -329,7 +329,11 @@ func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, model
// Column doesn't exist, add it
defaultVal := ""
if modelCol.Default != nil {
defaultVal = fmt.Sprintf("%v", modelCol.Default)
if value, ok := modelCol.Default.(string); ok {
defaultVal = writers.QuoteDefaultValue(value, modelCol.Type)
} else {
defaultVal = fmt.Sprintf("%v", modelCol.Default)
}
}
sql, err := w.executor.ExecuteAddColumn(AddColumnData{
@@ -382,7 +386,11 @@ func (w *MigrationWriter) generateAlterTableScripts(schema *models.Schema, model
setDefault := modelCol.Default != nil
defaultVal := ""
if setDefault {
defaultVal = fmt.Sprintf("%v", modelCol.Default)
if value, ok := modelCol.Default.(string); ok {
defaultVal = writers.QuoteDefaultValue(value, modelCol.Type)
} else {
defaultVal = fmt.Sprintf("%v", modelCol.Default)
}
}
sql, err := w.executor.ExecuteAlterColumnDefault(AlterColumnDefaultData{

View File

@@ -57,6 +57,46 @@ func TestWriteMigration_NewTable(t *testing.T) {
}
}
func TestWriteMigration_ArrayDefault(t *testing.T) {
current := models.InitDatabase("testdb")
currentSchema := models.InitSchema("public")
current.Schemas = append(current.Schemas, currentSchema)
model := models.InitDatabase("testdb")
modelSchema := models.InitSchema("public")
table := models.InitTable("plans", "public")
tagsCol := models.InitColumn("tags", "plans", "public")
tagsCol.Type = "text[]"
tagsCol.NotNull = true
tagsCol.Default = "''{}''"
table.Columns["tags"] = tagsCol
modelSchema.Tables = append(modelSchema.Tables, table)
model.Schemas = append(model.Schemas, modelSchema)
var buf bytes.Buffer
writer, err := NewMigrationWriter(&writers.WriterOptions{})
if err != nil {
t.Fatalf("Failed to create writer: %v", err)
}
writer.writer = &buf
err = writer.WriteMigration(model, current)
if err != nil {
t.Fatalf("WriteMigration failed: %v", err)
}
output := buf.String()
if !strings.Contains(output, "tags text[] DEFAULT '{}' NOT NULL") {
t.Fatalf("expected normalized array default in migration, got:\n%s", output)
}
if strings.Contains(output, "'''{}'''") {
t.Fatalf("migration still contains triple-quoted array default:\n%s", output)
}
}
func TestWriteMigration_WithAudit(t *testing.T) {
// Current database (empty)
current := models.InitDatabase("testdb")

View File

@@ -8,6 +8,7 @@ import (
"text/template"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
//go:embed templates/*.tmpl
@@ -495,7 +496,11 @@ func BuildCreateTableData(schemaName string, table *models.Table) CreateTableDat
NotNull: col.NotNull,
}
if col.Default != nil {
colData.Default = fmt.Sprintf("%v", col.Default)
if value, ok := col.Default.(string); ok {
colData.Default = writers.QuoteDefaultValue(value, col.Type)
} else {
colData.Default = fmt.Sprintf("%v", col.Default)
}
}
columns = append(columns, colData)
}

View File

@@ -10,8 +10,6 @@ import (
"strings"
"time"
"github.com/jackc/pgx/v5"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
@@ -525,15 +523,7 @@ func (w *Writer) generateColumnDefinition(col *models.Column) string {
if col.Default != nil {
switch v := col.Default.(type) {
case string:
// Strip backticks - DBML uses them for SQL expressions but PostgreSQL doesn't
cleanDefault := stripBackticks(v)
if strings.HasPrefix(cleanDefault, "nextval") || strings.HasPrefix(cleanDefault, "CURRENT_") || strings.Contains(cleanDefault, "()") {
parts = append(parts, fmt.Sprintf("DEFAULT %s", cleanDefault))
} else if cleanDefault == "true" || cleanDefault == "false" {
parts = append(parts, fmt.Sprintf("DEFAULT %s", cleanDefault))
} else {
parts = append(parts, fmt.Sprintf("DEFAULT '%s'", escapeQuote(cleanDefault)))
}
parts = append(parts, fmt.Sprintf("DEFAULT %s", writers.QuoteDefaultValue(stripBackticks(v), col.Type)))
case bool:
parts = append(parts, fmt.Sprintf("DEFAULT %v", v))
default:
@@ -1353,7 +1343,7 @@ func (w *Writer) executeDatabaseSQL(db *models.Database, connString string) erro
// Connect to database
ctx := context.Background()
conn, err := pgx.Connect(ctx, connString)
conn, err := pgsql.Connect(ctx, connString, "writer-pgsql")
if err != nil {
return fmt.Errorf("failed to connect to database: %w", err)
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/jackc/pgx/v5"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
@@ -42,7 +43,7 @@ func (w *Writer) WriteDatabase(db *models.Database) error {
// Connect to database
ctx := context.Background()
conn, err := pgx.Connect(ctx, connString)
conn, err := pgsql.Connect(ctx, connString, "writer-sqlexec")
if err != nil {
return fmt.Errorf("failed to connect to database: %w", err)
}
@@ -72,7 +73,7 @@ func (w *Writer) WriteSchema(schema *models.Schema) error {
// Connect to database
ctx := context.Background()
conn, err := pgx.Connect(ctx, connString)
conn, err := pgsql.Connect(ctx, connString, "writer-sqlexec")
if err != nil {
return fmt.Errorf("failed to connect to database: %w", err)
}

View File

@@ -20,6 +20,18 @@ type Writer interface {
WriteTable(table *models.Table) error
}
// NullableType constants control which Go package is used for nullable column types
// in code-generation writers (Bun, GORM).
const (
// NullableTypeResolveSpec uses github.com/bitechdev/ResolveSpec/pkg/spectypes
// (SqlString, SqlInt32, SqlVector, SqlStringArray, …). This is the default.
NullableTypeResolveSpec = "resolvespec"
// NullableTypeStdlib uses the standard library database/sql nullable types
// (sql.NullString, sql.NullInt32, …) and plain Go slices for arrays.
NullableTypeStdlib = "stdlib"
)
// WriterOptions contains common options for writers
type WriterOptions struct {
// OutputPath is the path where the output should be written
@@ -33,6 +45,12 @@ type WriterOptions struct {
// Useful for databases like SQLite that do not support schemas.
FlattenSchema bool
// NullableTypes selects the Go type package used for nullable columns in
// code-generation writers (bun, gorm). Accepted values:
// "resolvespec" (default) — github.com/bitechdev/ResolveSpec/pkg/spectypes
// "stdlib" — database/sql (sql.NullString, sql.NullInt32, …)
NullableTypes string
// Additional options can be added here as needed
Metadata map[string]interface{}
}
@@ -92,8 +110,12 @@ func SanitizeFilename(name string) string {
// Examples (bigint): "0" → "0"
// Examples (timestamp): "now()" → "now()" (function call never quoted)
func QuoteDefaultValue(value, sqlType string) string {
value = strings.TrimSpace(value)
// Function calls are never quoted regardless of column type.
if strings.Contains(value, "(") || strings.Contains(value, ")") {
if strings.Contains(value, "(") || strings.Contains(value, ")") ||
strings.Contains(value, "::") ||
strings.HasPrefix(strings.ToUpper(value), "ARRAY[") {
return value
}
@@ -103,6 +125,16 @@ func QuoteDefaultValue(value, sqlType string) string {
baseType = baseType[:idx]
}
if isArraySQLType(baseType) {
if arrayLiteral, ok := normalizeArrayDefaultLiteral(value); ok {
return quoteSQLLiteral(arrayLiteral)
}
}
if isQuotedSQLLiteral(value) {
return value
}
// Types whose default values must NOT be quoted.
unquotedTypes := map[string]bool{
// Integer types
@@ -136,7 +168,32 @@ func QuoteDefaultValue(value, sqlType string) string {
// Everything else (text, varchar, char, uuid, date, time, timestamp, json, …)
// is treated as a quoted literal.
return "'" + value + "'"
return quoteSQLLiteral(value)
}
func isArraySQLType(sqlType string) bool {
return strings.HasSuffix(sqlType, "[]")
}
func normalizeArrayDefaultLiteral(value string) (string, bool) {
switch {
case strings.HasPrefix(value, "''{") && strings.HasSuffix(value, "}''"):
return value[2 : len(value)-2], true
case strings.HasPrefix(value, "'{") && strings.HasSuffix(value, "}'"):
return value[1 : len(value)-1], true
case strings.HasPrefix(value, "{") && strings.HasSuffix(value, "}"):
return value, true
default:
return "", false
}
}
func isQuotedSQLLiteral(value string) bool {
return len(value) >= 2 && strings.HasPrefix(value, "'") && strings.HasSuffix(value, "'")
}
func quoteSQLLiteral(value string) string {
return "'" + strings.ReplaceAll(value, "'", "''") + "'"
}
// SanitizeStructTagValue sanitizes a value to be safely used inside Go struct tags.

View File

@@ -0,0 +1,54 @@
package writers
import "testing"
func TestQuoteDefaultValue(t *testing.T) {
t.Parallel()
tests := []struct {
name string
value string
sqlType string
want string
}{
{
name: "text default is quoted",
value: "active",
sqlType: "text",
want: "'active'",
},
{
name: "array default from bare literal is quoted once",
value: "{}",
sqlType: "text[]",
want: "'{}'",
},
{
name: "array default from quoted literal is preserved",
value: "'{}'",
sqlType: "text[]",
want: "'{}'",
},
{
name: "array default from double quoted literal is normalized",
value: "''{}''",
sqlType: "text[]",
want: "'{}'",
},
{
name: "function default is left alone",
value: "now()",
sqlType: "timestamptz",
want: "now()",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := QuoteDefaultValue(tt.value, tt.sqlType)
if got != tt.want {
t.Fatalf("QuoteDefaultValue(%q, %q) = %q, want %q", tt.value, tt.sqlType, got, tt.want)
}
})
}
}