feat(relations): 🎉 add flatten schema option for output
All checks were successful
CI / Test (1.24) (push) Successful in -25m5s
CI / Test (1.25) (push) Successful in -24m57s
CI / Build (push) Successful in -26m5s
CI / Lint (push) Successful in -25m51s
Integration Tests / Integration Tests (push) Successful in -25m42s
Release / Build and Release (push) Successful in -24m39s
All checks were successful
CI / Test (1.24) (push) Successful in -25m5s
CI / Test (1.25) (push) Successful in -24m57s
CI / Build (push) Successful in -26m5s
CI / Lint (push) Successful in -25m51s
Integration Tests / Integration Tests (push) Successful in -25m42s
Release / Build and Release (push) Successful in -24m39s
* Introduce `--flatten-schema` flag to convert, merge, and split commands. * Modify database writing functions to support flattened schema names. * Update template functions to handle schema.table naming convention. * Enhance PostgreSQL writer to utilize flattened schema in generated SQL. * Update tests to ensure compatibility with new flattening feature. * Dependencies updated for improved functionality.
This commit is contained in:
@@ -45,6 +45,7 @@ var (
|
|||||||
convertTargetPath string
|
convertTargetPath string
|
||||||
convertPackageName string
|
convertPackageName string
|
||||||
convertSchemaFilter string
|
convertSchemaFilter string
|
||||||
|
convertFlattenSchema bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var convertCmd = &cobra.Command{
|
var convertCmd = &cobra.Command{
|
||||||
@@ -148,6 +149,7 @@ func init() {
|
|||||||
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||||
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||||
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
||||||
|
convertCmd.Flags().BoolVar(&convertFlattenSchema, "flatten-schema", false, "Flatten schema.table names to schema_table (useful for databases like SQLite that do not support schemas)")
|
||||||
|
|
||||||
err := convertCmd.MarkFlagRequired("from")
|
err := convertCmd.MarkFlagRequired("from")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -202,7 +204,7 @@ func runConvert(cmd *cobra.Command, args []string) error {
|
|||||||
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
|
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter); err != nil {
|
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter, convertFlattenSchema); err != nil {
|
||||||
return fmt.Errorf("failed to write target: %w", err)
|
return fmt.Errorf("failed to write target: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -301,12 +303,13 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
|||||||
return db, nil
|
return db, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string) error {
|
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string, flattenSchema bool) error {
|
||||||
var writer writers.Writer
|
var writer writers.Writer
|
||||||
|
|
||||||
writerOpts := &writers.WriterOptions{
|
writerOpts := &writers.WriterOptions{
|
||||||
OutputPath: outputPath,
|
OutputPath: outputPath,
|
||||||
PackageName: packageName,
|
PackageName: packageName,
|
||||||
|
FlattenSchema: flattenSchema,
|
||||||
}
|
}
|
||||||
|
|
||||||
switch strings.ToLower(dbType) {
|
switch strings.ToLower(dbType) {
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ var (
|
|||||||
mergeSkipTables string // Comma-separated table names to skip
|
mergeSkipTables string // Comma-separated table names to skip
|
||||||
mergeVerbose bool
|
mergeVerbose bool
|
||||||
mergeReportPath string // Path to write merge report
|
mergeReportPath string // Path to write merge report
|
||||||
|
mergeFlattenSchema bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var mergeCmd = &cobra.Command{
|
var mergeCmd = &cobra.Command{
|
||||||
@@ -123,6 +124,7 @@ func init() {
|
|||||||
mergeCmd.Flags().StringVar(&mergeSkipTables, "skip-tables", "", "Comma-separated list of table names to skip during merge")
|
mergeCmd.Flags().StringVar(&mergeSkipTables, "skip-tables", "", "Comma-separated list of table names to skip during merge")
|
||||||
mergeCmd.Flags().BoolVar(&mergeVerbose, "verbose", false, "Show verbose output")
|
mergeCmd.Flags().BoolVar(&mergeVerbose, "verbose", false, "Show verbose output")
|
||||||
mergeCmd.Flags().StringVar(&mergeReportPath, "merge-report", "", "Path to write merge report (JSON format)")
|
mergeCmd.Flags().StringVar(&mergeReportPath, "merge-report", "", "Path to write merge report (JSON format)")
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeFlattenSchema, "flatten-schema", false, "Flatten schema.table names to schema_table (useful for databases like SQLite that do not support schemas)")
|
||||||
}
|
}
|
||||||
|
|
||||||
func runMerge(cmd *cobra.Command, args []string) error {
|
func runMerge(cmd *cobra.Command, args []string) error {
|
||||||
@@ -237,7 +239,7 @@ func runMerge(cmd *cobra.Command, args []string) error {
|
|||||||
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeOutputPath)
|
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeOutputPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = writeDatabaseForMerge(mergeOutputType, mergeOutputPath, mergeOutputConn, targetDB, "Output")
|
err = writeDatabaseForMerge(mergeOutputType, mergeOutputPath, mergeOutputConn, targetDB, "Output", mergeFlattenSchema)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to write output: %w", err)
|
return fmt.Errorf("failed to write output: %w", err)
|
||||||
}
|
}
|
||||||
@@ -324,7 +326,7 @@ func readDatabaseForMerge(dbType, filePath, connString, label string) (*models.D
|
|||||||
return db, nil
|
return db, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Database, label string) error {
|
func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Database, label string, flattenSchema bool) error {
|
||||||
var writer writers.Writer
|
var writer writers.Writer
|
||||||
|
|
||||||
switch strings.ToLower(dbType) {
|
switch strings.ToLower(dbType) {
|
||||||
@@ -332,59 +334,59 @@ func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Datab
|
|||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for DBML format", label)
|
return fmt.Errorf("%s: file path is required for DBML format", label)
|
||||||
}
|
}
|
||||||
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "dctx":
|
case "dctx":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for DCTX format", label)
|
return fmt.Errorf("%s: file path is required for DCTX format", label)
|
||||||
}
|
}
|
||||||
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "drawdb":
|
case "drawdb":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for DrawDB format", label)
|
return fmt.Errorf("%s: file path is required for DrawDB format", label)
|
||||||
}
|
}
|
||||||
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "graphql":
|
case "graphql":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for GraphQL format", label)
|
return fmt.Errorf("%s: file path is required for GraphQL format", label)
|
||||||
}
|
}
|
||||||
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "json":
|
case "json":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for JSON format", label)
|
return fmt.Errorf("%s: file path is required for JSON format", label)
|
||||||
}
|
}
|
||||||
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "yaml":
|
case "yaml":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for YAML format", label)
|
return fmt.Errorf("%s: file path is required for YAML format", label)
|
||||||
}
|
}
|
||||||
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "gorm":
|
case "gorm":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for GORM format", label)
|
return fmt.Errorf("%s: file path is required for GORM format", label)
|
||||||
}
|
}
|
||||||
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "bun":
|
case "bun":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for Bun format", label)
|
return fmt.Errorf("%s: file path is required for Bun format", label)
|
||||||
}
|
}
|
||||||
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "drizzle":
|
case "drizzle":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for Drizzle format", label)
|
return fmt.Errorf("%s: file path is required for Drizzle format", label)
|
||||||
}
|
}
|
||||||
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "prisma":
|
case "prisma":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for Prisma format", label)
|
return fmt.Errorf("%s: file path is required for Prisma format", label)
|
||||||
}
|
}
|
||||||
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "typeorm":
|
case "typeorm":
|
||||||
if filePath == "" {
|
if filePath == "" {
|
||||||
return fmt.Errorf("%s: file path is required for TypeORM format", label)
|
return fmt.Errorf("%s: file path is required for TypeORM format", label)
|
||||||
}
|
}
|
||||||
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
case "pgsql":
|
case "pgsql":
|
||||||
writerOpts := &writers.WriterOptions{OutputPath: filePath}
|
writerOpts := &writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema}
|
||||||
if connString != "" {
|
if connString != "" {
|
||||||
writerOpts.Metadata = map[string]interface{}{
|
writerOpts.Metadata = map[string]interface{}{
|
||||||
"connection_string": connString,
|
"connection_string": connString,
|
||||||
|
|||||||
@@ -184,6 +184,7 @@ func runSplit(cmd *cobra.Command, args []string) error {
|
|||||||
splitTargetPath,
|
splitTargetPath,
|
||||||
splitPackageName,
|
splitPackageName,
|
||||||
"", // no schema filter for split
|
"", // no schema filter for split
|
||||||
|
false, // no flatten-schema for split
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to write output: %w", err)
|
return fmt.Errorf("failed to write output: %w", err)
|
||||||
|
|||||||
@@ -106,11 +106,8 @@ func (td *TemplateData) FinalizeImports() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewModelData creates a new ModelData from a models.Table
|
// NewModelData creates a new ModelData from a models.Table
|
||||||
func NewModelData(table *models.Table, schema string, typeMapper *TypeMapper) *ModelData {
|
func NewModelData(table *models.Table, schema string, typeMapper *TypeMapper, flattenSchema bool) *ModelData {
|
||||||
tableName := table.Name
|
tableName := writers.QualifiedTableName(schema, table.Name, flattenSchema)
|
||||||
if schema != "" {
|
|
||||||
tableName = schema + "." + table.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate model name: Model + Schema + Table (all PascalCase)
|
// Generate model name: Model + Schema + Table (all PascalCase)
|
||||||
singularTable := Singularize(table.Name)
|
singularTable := Singularize(table.Name)
|
||||||
|
|||||||
@@ -86,7 +86,7 @@ func (w *Writer) writeSingleFile(db *models.Database) error {
|
|||||||
// Collect all models
|
// Collect all models
|
||||||
for _, schema := range db.Schemas {
|
for _, schema := range db.Schemas {
|
||||||
for _, table := range schema.Tables {
|
for _, table := range schema.Tables {
|
||||||
modelData := NewModelData(table, schema.Name, w.typeMapper)
|
modelData := NewModelData(table, schema.Name, w.typeMapper, w.options.FlattenSchema)
|
||||||
|
|
||||||
// Add relationship fields
|
// Add relationship fields
|
||||||
w.addRelationshipFields(modelData, table, schema, db)
|
w.addRelationshipFields(modelData, table, schema, db)
|
||||||
@@ -181,7 +181,7 @@ func (w *Writer) writeMultiFile(db *models.Database) error {
|
|||||||
templateData.AddImport(fmt.Sprintf("resolvespec_common \"%s\"", w.typeMapper.GetSQLTypesImport()))
|
templateData.AddImport(fmt.Sprintf("resolvespec_common \"%s\"", w.typeMapper.GetSQLTypesImport()))
|
||||||
|
|
||||||
// Create model data
|
// Create model data
|
||||||
modelData := NewModelData(table, schema.Name, w.typeMapper)
|
modelData := NewModelData(table, schema.Name, w.typeMapper, w.options.FlattenSchema)
|
||||||
|
|
||||||
// Add relationship fields
|
// Add relationship fields
|
||||||
w.addRelationshipFields(modelData, table, schema, db)
|
w.addRelationshipFields(modelData, table, schema, db)
|
||||||
|
|||||||
@@ -105,11 +105,8 @@ func (td *TemplateData) FinalizeImports() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewModelData creates a new ModelData from a models.Table
|
// NewModelData creates a new ModelData from a models.Table
|
||||||
func NewModelData(table *models.Table, schema string, typeMapper *TypeMapper) *ModelData {
|
func NewModelData(table *models.Table, schema string, typeMapper *TypeMapper, flattenSchema bool) *ModelData {
|
||||||
tableName := table.Name
|
tableName := writers.QualifiedTableName(schema, table.Name, flattenSchema)
|
||||||
if schema != "" {
|
|
||||||
tableName = schema + "." + table.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate model name: Model + Schema + Table (all PascalCase)
|
// Generate model name: Model + Schema + Table (all PascalCase)
|
||||||
singularTable := Singularize(table.Name)
|
singularTable := Singularize(table.Name)
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ func (w *Writer) writeSingleFile(db *models.Database) error {
|
|||||||
// Collect all models
|
// Collect all models
|
||||||
for _, schema := range db.Schemas {
|
for _, schema := range db.Schemas {
|
||||||
for _, table := range schema.Tables {
|
for _, table := range schema.Tables {
|
||||||
modelData := NewModelData(table, schema.Name, w.typeMapper)
|
modelData := NewModelData(table, schema.Name, w.typeMapper, w.options.FlattenSchema)
|
||||||
|
|
||||||
// Add relationship fields
|
// Add relationship fields
|
||||||
w.addRelationshipFields(modelData, table, schema, db)
|
w.addRelationshipFields(modelData, table, schema, db)
|
||||||
@@ -175,7 +175,7 @@ func (w *Writer) writeMultiFile(db *models.Database) error {
|
|||||||
templateData.AddImport(fmt.Sprintf("sql_types \"%s\"", w.typeMapper.GetSQLTypesImport()))
|
templateData.AddImport(fmt.Sprintf("sql_types \"%s\"", w.typeMapper.GetSQLTypesImport()))
|
||||||
|
|
||||||
// Create model data
|
// Create model data
|
||||||
modelData := NewModelData(table, schema.Name, w.typeMapper)
|
modelData := NewModelData(table, schema.Name, w.typeMapper, w.options.FlattenSchema)
|
||||||
|
|
||||||
// Add relationship fields
|
// Add relationship fields
|
||||||
w.addRelationshipFields(modelData, table, schema, db)
|
w.addRelationshipFields(modelData, table, schema, db)
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ type MigrationWriter struct {
|
|||||||
|
|
||||||
// NewMigrationWriter creates a new templated migration writer
|
// NewMigrationWriter creates a new templated migration writer
|
||||||
func NewMigrationWriter(options *writers.WriterOptions) (*MigrationWriter, error) {
|
func NewMigrationWriter(options *writers.WriterOptions) (*MigrationWriter, error) {
|
||||||
executor, err := NewTemplateExecutor()
|
executor, err := NewTemplateExecutor(options.FlattenSchema)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create template executor: %w", err)
|
return nil, fmt.Errorf("failed to create template executor: %w", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -137,7 +137,7 @@ func TestWriteMigration_WithAudit(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTemplateExecutor_CreateTable(t *testing.T) {
|
func TestTemplateExecutor_CreateTable(t *testing.T) {
|
||||||
executor, err := NewTemplateExecutor()
|
executor, err := NewTemplateExecutor(false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Failed to create executor: %v", err)
|
t.Fatalf("Failed to create executor: %v", err)
|
||||||
}
|
}
|
||||||
@@ -170,7 +170,7 @@ func TestTemplateExecutor_CreateTable(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTemplateExecutor_AuditFunction(t *testing.T) {
|
func TestTemplateExecutor_AuditFunction(t *testing.T) {
|
||||||
executor, err := NewTemplateExecutor()
|
executor, err := NewTemplateExecutor(false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Failed to create executor: %v", err)
|
t.Fatalf("Failed to create executor: %v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -314,7 +314,7 @@ func TestFormatType(t *testing.T) {
|
|||||||
|
|
||||||
// Test that template functions work in actual templates
|
// Test that template functions work in actual templates
|
||||||
func TestTemplateFunctionsInTemplate(t *testing.T) {
|
func TestTemplateFunctionsInTemplate(t *testing.T) {
|
||||||
executor, err := NewTemplateExecutor()
|
executor, err := NewTemplateExecutor(false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Failed to create executor: %v", err)
|
t.Fatalf("Failed to create executor: %v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,14 +18,39 @@ type TemplateExecutor struct {
|
|||||||
templates *template.Template
|
templates *template.Template
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewTemplateExecutor creates a new template executor
|
// NewTemplateExecutor creates a new template executor.
|
||||||
func NewTemplateExecutor() (*TemplateExecutor, error) {
|
// flattenSchema controls whether schema.table identifiers use dot or underscore separation.
|
||||||
|
func NewTemplateExecutor(flattenSchema bool) (*TemplateExecutor, error) {
|
||||||
// Create template with custom functions
|
// Create template with custom functions
|
||||||
funcMap := make(template.FuncMap)
|
funcMap := make(template.FuncMap)
|
||||||
for k, v := range TemplateFunctions() {
|
for k, v := range TemplateFunctions() {
|
||||||
funcMap[k] = v
|
funcMap[k] = v
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// qual_table returns a quoted, schema-qualified identifier.
|
||||||
|
// With flatten=false: "schema"."table" (or unquoted equivalents).
|
||||||
|
// With flatten=true: "schema_table".
|
||||||
|
funcMap["qual_table"] = func(schema, name string) string {
|
||||||
|
if schema == "" {
|
||||||
|
return quoteIdent(name)
|
||||||
|
}
|
||||||
|
if flattenSchema {
|
||||||
|
return quoteIdent(schema + "_" + name)
|
||||||
|
}
|
||||||
|
return quoteIdent(schema) + "." + quoteIdent(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// qual_table_raw is the same as qual_table but without identifier quoting.
|
||||||
|
funcMap["qual_table_raw"] = func(schema, name string) string {
|
||||||
|
if schema == "" {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
if flattenSchema {
|
||||||
|
return schema + "_" + name
|
||||||
|
}
|
||||||
|
return schema + "." + name
|
||||||
|
}
|
||||||
|
|
||||||
tmpl, err := template.New("").Funcs(funcMap).ParseFS(templateFS, "templates/*.tmpl")
|
tmpl, err := template.New("").Funcs(funcMap).ParseFS(templateFS, "templates/*.tmpl")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to parse templates: %w", err)
|
return nil, fmt.Errorf("failed to parse templates: %w", err)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
ADD COLUMN IF NOT EXISTS {{quote_ident .ColumnName}} {{.ColumnType}}
|
ADD COLUMN IF NOT EXISTS {{quote_ident .ColumnName}} {{.ColumnType}}
|
||||||
{{- if .Default}} DEFAULT {{.Default}}{{end}}
|
{{- if .Default}} DEFAULT {{.Default}}{{end}}
|
||||||
{{- if .NotNull}} NOT NULL{{end}};
|
{{- if .NotNull}} NOT NULL{{end}};
|
||||||
@@ -6,7 +6,7 @@ BEGIN
|
|||||||
AND table_name = '{{.TableName}}'
|
AND table_name = '{{.TableName}}'
|
||||||
AND column_name = '{{.ColumnName}}'
|
AND column_name = '{{.ColumnName}}'
|
||||||
) THEN
|
) THEN
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD COLUMN {{.ColumnDefinition}};
|
ALTER TABLE {{qual_table .SchemaName .TableName}} ADD COLUMN {{.ColumnDefinition}};
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{{- if .SetDefault -}}
|
{{- if .SetDefault -}}
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
ALTER COLUMN {{quote_ident .ColumnName}} SET DEFAULT {{.DefaultValue}};
|
ALTER COLUMN {{quote_ident .ColumnName}} SET DEFAULT {{.DefaultValue}};
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
ALTER COLUMN {{quote_ident .ColumnName}} DROP DEFAULT;
|
ALTER COLUMN {{quote_ident .ColumnName}} DROP DEFAULT;
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
@@ -1,2 +1,2 @@
|
|||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
ALTER COLUMN {{quote_ident .ColumnName}} TYPE {{.NewType}};
|
ALTER COLUMN {{quote_ident .ColumnName}} TYPE {{.NewType}};
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
CREATE OR REPLACE FUNCTION {{.SchemaName}}.{{.FunctionName}}()
|
CREATE OR REPLACE FUNCTION {{qual_table_raw .SchemaName .FunctionName}}()
|
||||||
RETURNS trigger AS
|
RETURNS trigger AS
|
||||||
$body$
|
$body$
|
||||||
DECLARE
|
DECLARE
|
||||||
@@ -81,4 +81,4 @@ LANGUAGE plpgsql
|
|||||||
VOLATILE
|
VOLATILE
|
||||||
SECURITY DEFINER;
|
SECURITY DEFINER;
|
||||||
|
|
||||||
COMMENT ON FUNCTION {{.SchemaName}}.{{.FunctionName}}() IS 'Audit trigger function for table {{.SchemaName}}.{{.TableName}}';
|
COMMENT ON FUNCTION {{qual_table_raw .SchemaName .FunctionName}}() IS 'Audit trigger function for table {{qual_table_raw .SchemaName .TableName}}';
|
||||||
@@ -4,13 +4,13 @@ BEGIN
|
|||||||
SELECT 1
|
SELECT 1
|
||||||
FROM pg_trigger
|
FROM pg_trigger
|
||||||
WHERE tgname = '{{.TriggerName}}'
|
WHERE tgname = '{{.TriggerName}}'
|
||||||
AND tgrelid = '{{.SchemaName}}.{{.TableName}}'::regclass
|
AND tgrelid = '{{qual_table_raw .SchemaName .TableName}}'::regclass
|
||||||
) THEN
|
) THEN
|
||||||
CREATE TRIGGER {{.TriggerName}}
|
CREATE TRIGGER {{.TriggerName}}
|
||||||
AFTER {{.Events}}
|
AFTER {{.Events}}
|
||||||
ON {{.SchemaName}}.{{.TableName}}
|
ON {{qual_table_raw .SchemaName .TableName}}
|
||||||
FOR EACH ROW
|
FOR EACH ROW
|
||||||
EXECUTE FUNCTION {{.SchemaName}}.{{.FunctionName}}();
|
EXECUTE FUNCTION {{qual_table_raw .SchemaName .FunctionName}}();
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{{/* Base constraint template */}}
|
{{/* Base constraint template */}}
|
||||||
{{- define "constraint_base" -}}
|
{{- define "constraint_base" -}}
|
||||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
ALTER TABLE {{qual_table_raw .SchemaName .TableName}}
|
||||||
ADD CONSTRAINT {{.ConstraintName}}
|
ADD CONSTRAINT {{.ConstraintName}}
|
||||||
{{block "constraint_definition" .}}{{end}};
|
{{block "constraint_definition" .}}{{end}};
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
@@ -15,7 +15,7 @@ BEGIN
|
|||||||
AND table_name = '{{.TableName}}'
|
AND table_name = '{{.TableName}}'
|
||||||
AND constraint_name = '{{.ConstraintName}}'
|
AND constraint_name = '{{.ConstraintName}}'
|
||||||
) THEN
|
) THEN
|
||||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
ALTER TABLE {{qual_table_raw .SchemaName .TableName}}
|
||||||
DROP CONSTRAINT {{.ConstraintName}};
|
DROP CONSTRAINT {{.ConstraintName}};
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
{{/* Base ALTER TABLE structure */}}
|
{{/* Base ALTER TABLE structure */}}
|
||||||
{{- define "alter_table_base" -}}
|
{{- define "alter_table_base" -}}
|
||||||
ALTER TABLE {{.SchemaName}}.{{.TableName}}
|
ALTER TABLE {{qual_table_raw .SchemaName .TableName}}
|
||||||
{{block "alter_operation" .}}{{end}};
|
{{block "alter_operation" .}}{{end}};
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
|
||||||
@@ -30,5 +30,5 @@ $$;
|
|||||||
|
|
||||||
{{/* Common drop pattern */}}
|
{{/* Common drop pattern */}}
|
||||||
{{- define "drop_if_exists" -}}
|
{{- define "drop_if_exists" -}}
|
||||||
{{block "drop_type" .}}{{end}} IF EXISTS {{.SchemaName}}.{{.ObjectName}};
|
{{block "drop_type" .}}{{end}} IF EXISTS {{qual_table_raw .SchemaName .ObjectName}};
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
@@ -1 +1 @@
|
|||||||
COMMENT ON COLUMN {{quote_ident .SchemaName}}.{{quote_ident .TableName}}.{{quote_ident .ColumnName}} IS '{{.Comment}}';
|
COMMENT ON COLUMN {{qual_table .SchemaName .TableName}}.{{quote_ident .ColumnName}} IS '{{.Comment}}';
|
||||||
@@ -1 +1 @@
|
|||||||
COMMENT ON TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} IS '{{.Comment}}';
|
COMMENT ON TABLE {{qual_table .SchemaName .TableName}} IS '{{.Comment}}';
|
||||||
@@ -6,7 +6,7 @@ BEGIN
|
|||||||
AND table_name = '{{.TableName}}'
|
AND table_name = '{{.TableName}}'
|
||||||
AND constraint_name = '{{.ConstraintName}}'
|
AND constraint_name = '{{.ConstraintName}}'
|
||||||
) THEN
|
) THEN
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} CHECK ({{.Expression}});
|
ALTER TABLE {{qual_table .SchemaName .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} CHECK ({{.Expression}});
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
DROP CONSTRAINT IF EXISTS {{quote_ident .ConstraintName}};
|
DROP CONSTRAINT IF EXISTS {{quote_ident .ConstraintName}};
|
||||||
|
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
ADD CONSTRAINT {{quote_ident .ConstraintName}}
|
ADD CONSTRAINT {{quote_ident .ConstraintName}}
|
||||||
FOREIGN KEY ({{.SourceColumns}})
|
FOREIGN KEY ({{.SourceColumns}})
|
||||||
REFERENCES {{quote_ident .TargetSchema}}.{{quote_ident .TargetTable}} ({{.TargetColumns}})
|
REFERENCES {{qual_table .TargetSchema .TargetTable}} ({{.TargetColumns}})
|
||||||
ON DELETE {{.OnDelete}}
|
ON DELETE {{.OnDelete}}
|
||||||
ON UPDATE {{.OnUpdate}}
|
ON UPDATE {{.OnUpdate}}
|
||||||
DEFERRABLE;
|
DEFERRABLE;
|
||||||
@@ -6,10 +6,10 @@ BEGIN
|
|||||||
AND table_name = '{{.TableName}}'
|
AND table_name = '{{.TableName}}'
|
||||||
AND constraint_name = '{{.ConstraintName}}'
|
AND constraint_name = '{{.ConstraintName}}'
|
||||||
) THEN
|
) THEN
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
ADD CONSTRAINT {{quote_ident .ConstraintName}}
|
ADD CONSTRAINT {{quote_ident .ConstraintName}}
|
||||||
FOREIGN KEY ({{.SourceColumns}})
|
FOREIGN KEY ({{.SourceColumns}})
|
||||||
REFERENCES {{quote_ident .TargetSchema}}.{{quote_ident .TargetTable}} ({{.TargetColumns}})
|
REFERENCES {{qual_table .TargetSchema .TargetTable}} ({{.TargetColumns}})
|
||||||
ON DELETE {{.OnDelete}}
|
ON DELETE {{.OnDelete}}
|
||||||
ON UPDATE {{.OnUpdate}}{{if .Deferrable}}
|
ON UPDATE {{.OnUpdate}}{{if .Deferrable}}
|
||||||
DEFERRABLE{{end}};
|
DEFERRABLE{{end}};
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{quote_ident .IndexName}}
|
CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{quote_ident .IndexName}}
|
||||||
ON {{quote_ident .SchemaName}}.{{quote_ident .TableName}} USING {{.IndexType}} ({{.Columns}});
|
ON {{qual_table .SchemaName .TableName}} USING {{.IndexType}} ({{.Columns}});
|
||||||
@@ -6,7 +6,7 @@ BEGIN
|
|||||||
AND table_name = '{{.TableName}}'
|
AND table_name = '{{.TableName}}'
|
||||||
AND constraint_name = '{{.ConstraintName}}'
|
AND constraint_name = '{{.ConstraintName}}'
|
||||||
) THEN
|
) THEN
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
ALTER TABLE {{qual_table .SchemaName .TableName}}
|
||||||
ADD CONSTRAINT {{quote_ident .ConstraintName}} PRIMARY KEY ({{.Columns}});
|
ADD CONSTRAINT {{quote_ident .ConstraintName}} PRIMARY KEY ({{.Columns}});
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ BEGIN
|
|||||||
AND constraint_name IN ({{.AutoGenNames}});
|
AND constraint_name IN ({{.AutoGenNames}});
|
||||||
|
|
||||||
IF auto_pk_name IS NOT NULL THEN
|
IF auto_pk_name IS NOT NULL THEN
|
||||||
EXECUTE 'ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} DROP CONSTRAINT ' || quote_ident(auto_pk_name);
|
EXECUTE 'ALTER TABLE {{qual_table .SchemaName .TableName}} DROP CONSTRAINT ' || quote_ident(auto_pk_name);
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
-- Add named primary key if it doesn't exist
|
-- Add named primary key if it doesn't exist
|
||||||
@@ -21,7 +21,7 @@ BEGIN
|
|||||||
AND table_name = '{{.TableName}}'
|
AND table_name = '{{.TableName}}'
|
||||||
AND constraint_name = '{{.ConstraintName}}'
|
AND constraint_name = '{{.ConstraintName}}'
|
||||||
) THEN
|
) THEN
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} PRIMARY KEY ({{.Columns}});
|
ALTER TABLE {{qual_table .SchemaName .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} PRIMARY KEY ({{.Columns}});
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
CREATE SEQUENCE IF NOT EXISTS {{quote_ident .SchemaName}}.{{quote_ident .SequenceName}}
|
CREATE SEQUENCE IF NOT EXISTS {{qual_table .SchemaName .SequenceName}}
|
||||||
INCREMENT {{.Increment}}
|
INCREMENT {{.Increment}}
|
||||||
MINVALUE {{.MinValue}}
|
MINVALUE {{.MinValue}}
|
||||||
MAXVALUE {{.MaxValue}}
|
MAXVALUE {{.MaxValue}}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
CREATE TABLE IF NOT EXISTS {{quote_ident .SchemaName}}.{{quote_ident .TableName}} (
|
CREATE TABLE IF NOT EXISTS {{qual_table .SchemaName .TableName}} (
|
||||||
{{- range $i, $col := .Columns}}
|
{{- range $i, $col := .Columns}}
|
||||||
{{- if $i}},{{end}}
|
{{- if $i}},{{end}}
|
||||||
{{quote_ident $col.Name}} {{$col.Type}}
|
{{quote_ident $col.Name}} {{$col.Type}}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ BEGIN
|
|||||||
AND table_name = '{{.TableName}}'
|
AND table_name = '{{.TableName}}'
|
||||||
AND constraint_name = '{{.ConstraintName}}'
|
AND constraint_name = '{{.ConstraintName}}'
|
||||||
) THEN
|
) THEN
|
||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} UNIQUE ({{.Columns}});
|
ALTER TABLE {{qual_table .SchemaName .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} UNIQUE ({{.Columns}});
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
@@ -1 +1 @@
|
|||||||
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} DROP CONSTRAINT IF EXISTS {{quote_ident .ConstraintName}};
|
ALTER TABLE {{qual_table .SchemaName .TableName}} DROP CONSTRAINT IF EXISTS {{quote_ident .ConstraintName}};
|
||||||
@@ -1 +1 @@
|
|||||||
DROP INDEX IF EXISTS {{quote_ident .SchemaName}}.{{quote_ident .IndexName}} CASCADE;
|
DROP INDEX IF EXISTS {{qual_table .SchemaName .IndexName}} CASCADE;
|
||||||
@@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
{{/* Qualified table name */}}
|
{{/* Qualified table name */}}
|
||||||
{{- define "qualified_table" -}}
|
{{- define "qualified_table" -}}
|
||||||
{{.SchemaName}}.{{.TableName}}
|
{{qual_table_raw .SchemaName .TableName}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
|
||||||
{{/* Index method clause */}}
|
{{/* Index method clause */}}
|
||||||
|
|||||||
@@ -10,10 +10,10 @@ BEGIN
|
|||||||
AND c.relkind = 'S'
|
AND c.relkind = 'S'
|
||||||
) THEN
|
) THEN
|
||||||
SELECT COALESCE(MAX({{quote_ident .ColumnName}}), 0) + 1
|
SELECT COALESCE(MAX({{quote_ident .ColumnName}}), 0) + 1
|
||||||
FROM {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
|
FROM {{qual_table .SchemaName .TableName}}
|
||||||
INTO m_cnt;
|
INTO m_cnt;
|
||||||
|
|
||||||
PERFORM setval('{{quote_ident .SchemaName}}.{{quote_ident .SequenceName}}'::regclass, m_cnt);
|
PERFORM setval('{{qual_table_raw .SchemaName .SequenceName}}'::regclass, m_cnt);
|
||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
@@ -58,13 +58,18 @@ type ExecutionError struct {
|
|||||||
|
|
||||||
// NewWriter creates a new PostgreSQL SQL writer
|
// NewWriter creates a new PostgreSQL SQL writer
|
||||||
func NewWriter(options *writers.WriterOptions) *Writer {
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
executor, _ := NewTemplateExecutor()
|
executor, _ := NewTemplateExecutor(options.FlattenSchema)
|
||||||
return &Writer{
|
return &Writer{
|
||||||
options: options,
|
options: options,
|
||||||
executor: executor,
|
executor: executor,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// qualTable returns a schema-qualified name using the writer's FlattenSchema setting.
|
||||||
|
func (w *Writer) qualTable(schema, name string) string {
|
||||||
|
return writers.QualifiedTableName(schema, name, w.options.FlattenSchema)
|
||||||
|
}
|
||||||
|
|
||||||
// WriteDatabase writes the entire database schema as SQL
|
// WriteDatabase writes the entire database schema as SQL
|
||||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
// Check if we should execute SQL directly on a database
|
// Check if we should execute SQL directly on a database
|
||||||
@@ -134,8 +139,8 @@ func (w *Writer) GenerateDatabaseStatements(db *models.Database) ([]string, erro
|
|||||||
func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, error) {
|
func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, error) {
|
||||||
statements := []string{}
|
statements := []string{}
|
||||||
|
|
||||||
// Phase 1: Create schema
|
// Phase 1: Create schema (skip entirely when flattening)
|
||||||
if schema.Name != "public" {
|
if schema.Name != "public" && !w.options.FlattenSchema {
|
||||||
statements = append(statements, fmt.Sprintf("-- Schema: %s", schema.Name))
|
statements = append(statements, fmt.Sprintf("-- Schema: %s", schema.Name))
|
||||||
statements = append(statements, fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %s", schema.SQLName()))
|
statements = append(statements, fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %s", schema.SQLName()))
|
||||||
}
|
}
|
||||||
@@ -157,8 +162,8 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt := fmt.Sprintf("CREATE SEQUENCE IF NOT EXISTS %s.%s\n INCREMENT 1\n MINVALUE 1\n MAXVALUE 9223372036854775807\n START 1\n CACHE 1",
|
stmt := fmt.Sprintf("CREATE SEQUENCE IF NOT EXISTS %s\n INCREMENT 1\n MINVALUE 1\n MAXVALUE 9223372036854775807\n START 1\n CACHE 1",
|
||||||
schema.SQLName(), seqName)
|
w.qualTable(schema.SQLName(), seqName))
|
||||||
statements = append(statements, stmt)
|
statements = append(statements, stmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -275,8 +280,8 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
|
|||||||
whereClause = fmt.Sprintf(" WHERE %s", index.Where)
|
whereClause = fmt.Sprintf(" WHERE %s", index.Where)
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt := fmt.Sprintf("CREATE %sINDEX IF NOT EXISTS %s ON %s.%s USING %s (%s)%s",
|
stmt := fmt.Sprintf("CREATE %sINDEX IF NOT EXISTS %s ON %s USING %s (%s)%s",
|
||||||
uniqueStr, quoteIdentifier(index.Name), schema.SQLName(), table.SQLName(), indexType, strings.Join(columnExprs, ", "), whereClause)
|
uniqueStr, quoteIdentifier(index.Name), w.qualTable(schema.SQLName(), table.SQLName()), indexType, strings.Join(columnExprs, ", "), whereClause)
|
||||||
statements = append(statements, stmt)
|
statements = append(statements, stmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -374,15 +379,15 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
|
|||||||
// Phase 7: Comments
|
// Phase 7: Comments
|
||||||
for _, table := range schema.Tables {
|
for _, table := range schema.Tables {
|
||||||
if table.Comment != "" {
|
if table.Comment != "" {
|
||||||
stmt := fmt.Sprintf("COMMENT ON TABLE %s.%s IS '%s'",
|
stmt := fmt.Sprintf("COMMENT ON TABLE %s IS '%s'",
|
||||||
schema.SQLName(), table.SQLName(), escapeQuote(table.Comment))
|
w.qualTable(schema.SQLName(), table.SQLName()), escapeQuote(table.Comment))
|
||||||
statements = append(statements, stmt)
|
statements = append(statements, stmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, column := range table.Columns {
|
for _, column := range table.Columns {
|
||||||
if column.Comment != "" {
|
if column.Comment != "" {
|
||||||
stmt := fmt.Sprintf("COMMENT ON COLUMN %s.%s.%s IS '%s'",
|
stmt := fmt.Sprintf("COMMENT ON COLUMN %s.%s IS '%s'",
|
||||||
schema.SQLName(), table.SQLName(), column.SQLName(), escapeQuote(column.Comment))
|
w.qualTable(schema.SQLName(), table.SQLName()), column.SQLName(), escapeQuote(column.Comment))
|
||||||
statements = append(statements, stmt)
|
statements = append(statements, stmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -474,8 +479,8 @@ func (w *Writer) generateCreateTableStatement(schema *models.Schema, table *mode
|
|||||||
columnDefs = append(columnDefs, " "+def)
|
columnDefs = append(columnDefs, " "+def)
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s.%s (\n%s\n)",
|
stmt := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (\n%s\n)",
|
||||||
schema.SQLName(), table.SQLName(), strings.Join(columnDefs, ",\n"))
|
w.qualTable(schema.SQLName(), table.SQLName()), strings.Join(columnDefs, ",\n"))
|
||||||
statements = append(statements, stmt)
|
statements = append(statements, stmt)
|
||||||
|
|
||||||
return statements, nil
|
return statements, nil
|
||||||
@@ -655,8 +660,7 @@ func (w *Writer) WriteAddColumnStatements(db *models.Database) error {
|
|||||||
|
|
||||||
// writeCreateSchema generates CREATE SCHEMA statement
|
// writeCreateSchema generates CREATE SCHEMA statement
|
||||||
func (w *Writer) writeCreateSchema(schema *models.Schema) error {
|
func (w *Writer) writeCreateSchema(schema *models.Schema) error {
|
||||||
if schema.Name == "public" {
|
if schema.Name == "public" || w.options.FlattenSchema {
|
||||||
// public schema exists by default
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -708,8 +712,8 @@ func (w *Writer) writeCreateTables(schema *models.Schema) error {
|
|||||||
fmt.Fprintf(w.writer, "-- Tables for schema: %s\n", schema.Name)
|
fmt.Fprintf(w.writer, "-- Tables for schema: %s\n", schema.Name)
|
||||||
|
|
||||||
for _, table := range schema.Tables {
|
for _, table := range schema.Tables {
|
||||||
fmt.Fprintf(w.writer, "CREATE TABLE IF NOT EXISTS %s.%s (\n",
|
fmt.Fprintf(w.writer, "CREATE TABLE IF NOT EXISTS %s (\n",
|
||||||
schema.SQLName(), table.SQLName())
|
w.qualTable(schema.SQLName(), table.SQLName()))
|
||||||
|
|
||||||
// Write columns
|
// Write columns
|
||||||
columns := getSortedColumns(table.Columns)
|
columns := getSortedColumns(table.Columns)
|
||||||
@@ -893,8 +897,8 @@ func (w *Writer) writeIndexes(schema *models.Schema) error {
|
|||||||
|
|
||||||
fmt.Fprintf(w.writer, "CREATE %sINDEX IF NOT EXISTS %s\n",
|
fmt.Fprintf(w.writer, "CREATE %sINDEX IF NOT EXISTS %s\n",
|
||||||
unique, indexName)
|
unique, indexName)
|
||||||
fmt.Fprintf(w.writer, " ON %s.%s USING %s (%s)%s;\n\n",
|
fmt.Fprintf(w.writer, " ON %s USING %s (%s)%s;\n\n",
|
||||||
schema.SQLName(), table.SQLName(), indexType, strings.Join(columnExprs, ", "), whereClause)
|
w.qualTable(schema.SQLName(), table.SQLName()), indexType, strings.Join(columnExprs, ", "), whereClause)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1203,16 +1207,16 @@ func (w *Writer) writeComments(schema *models.Schema) error {
|
|||||||
for _, table := range schema.Tables {
|
for _, table := range schema.Tables {
|
||||||
// Table comment
|
// Table comment
|
||||||
if table.Description != "" {
|
if table.Description != "" {
|
||||||
fmt.Fprintf(w.writer, "COMMENT ON TABLE %s.%s IS '%s';\n",
|
fmt.Fprintf(w.writer, "COMMENT ON TABLE %s IS '%s';\n",
|
||||||
schema.SQLName(), table.SQLName(),
|
w.qualTable(schema.SQLName(), table.SQLName()),
|
||||||
escapeQuote(table.Description))
|
escapeQuote(table.Description))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Column comments
|
// Column comments
|
||||||
for _, col := range getSortedColumns(table.Columns) {
|
for _, col := range getSortedColumns(table.Columns) {
|
||||||
if col.Description != "" {
|
if col.Description != "" {
|
||||||
fmt.Fprintf(w.writer, "COMMENT ON COLUMN %s.%s.%s IS '%s';\n",
|
fmt.Fprintf(w.writer, "COMMENT ON COLUMN %s.%s IS '%s';\n",
|
||||||
schema.SQLName(), table.SQLName(), col.SQLName(),
|
w.qualTable(schema.SQLName(), table.SQLName()), col.SQLName(),
|
||||||
escapeQuote(col.Description))
|
escapeQuote(col.Description))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,10 +28,29 @@ type WriterOptions struct {
|
|||||||
// PackageName is the Go package name (for code generation)
|
// PackageName is the Go package name (for code generation)
|
||||||
PackageName string
|
PackageName string
|
||||||
|
|
||||||
|
// FlattenSchema disables schema.table dot notation and instead joins
|
||||||
|
// schema and table with an underscore (e.g., "public_users").
|
||||||
|
// Useful for databases like SQLite that do not support schemas.
|
||||||
|
FlattenSchema bool
|
||||||
|
|
||||||
// Additional options can be added here as needed
|
// Additional options can be added here as needed
|
||||||
Metadata map[string]interface{}
|
Metadata map[string]interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QualifiedTableName returns a schema-qualified table name.
|
||||||
|
// When flatten is true, schema and table are joined with underscore (e.g., "schema_table").
|
||||||
|
// When flatten is false, they are dot-separated (e.g., "schema.table").
|
||||||
|
// If schema is empty, just the table name is returned regardless of flatten.
|
||||||
|
func QualifiedTableName(schema, table string, flatten bool) string {
|
||||||
|
if schema == "" {
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
if flatten {
|
||||||
|
return schema + "_" + table
|
||||||
|
}
|
||||||
|
return schema + "." + table
|
||||||
|
}
|
||||||
|
|
||||||
// SanitizeFilename removes quotes, comments, and invalid characters from identifiers
|
// SanitizeFilename removes quotes, comments, and invalid characters from identifiers
|
||||||
// to make them safe for use in filenames. This handles:
|
// to make them safe for use in filenames. This handles:
|
||||||
// - Double and single quotes: "table_name" or 'table_name' -> table_name
|
// - Double and single quotes: "table_name" or 'table_name' -> table_name
|
||||||
|
|||||||
Reference in New Issue
Block a user