Compare commits
3 Commits
666eab7cec
...
e61204cb3c
| Author | SHA1 | Date | |
|---|---|---|---|
| e61204cb3c | |||
| d52b9cdc14 | |||
| f98b278d72 |
@@ -34,6 +34,7 @@ RelSpec can read database schemas from multiple sources:
|
|||||||
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
|
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
|
||||||
- [DCTX](pkg/readers/dctx/README.md) - Clarion database dictionary format
|
- [DCTX](pkg/readers/dctx/README.md) - Clarion database dictionary format
|
||||||
- [DrawDB](pkg/readers/drawdb/README.md) - DrawDB JSON format
|
- [DrawDB](pkg/readers/drawdb/README.md) - DrawDB JSON format
|
||||||
|
- [GraphQL](pkg/readers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||||
- [JSON](pkg/readers/json/README.md) - RelSpec canonical JSON format
|
- [JSON](pkg/readers/json/README.md) - RelSpec canonical JSON format
|
||||||
- [YAML](pkg/readers/yaml/README.md) - RelSpec canonical YAML format
|
- [YAML](pkg/readers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
@@ -55,6 +56,7 @@ RelSpec can write database schemas to multiple formats:
|
|||||||
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
|
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
|
||||||
- [DCTX](pkg/writers/dctx/README.md) - Clarion database dictionary format
|
- [DCTX](pkg/writers/dctx/README.md) - Clarion database dictionary format
|
||||||
- [DrawDB](pkg/writers/drawdb/README.md) - DrawDB JSON format
|
- [DrawDB](pkg/writers/drawdb/README.md) - DrawDB JSON format
|
||||||
|
- [GraphQL](pkg/writers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||||
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
||||||
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
|
|||||||
4
TODO.md
4
TODO.md
@@ -18,7 +18,7 @@
|
|||||||
- [] .hbm.xml / schema.xml: Hibernate/Propel mappings (Java/PHP) (💲 Someone can do this, not me)
|
- [] .hbm.xml / schema.xml: Hibernate/Propel mappings (Java/PHP) (💲 Someone can do this, not me)
|
||||||
- [ ] Django models.py (Python classes), Sequelize migrations (JS) (💲 Someone can do this, not me)
|
- [ ] Django models.py (Python classes), Sequelize migrations (JS) (💲 Someone can do this, not me)
|
||||||
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
|
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
|
||||||
|
- [✔️] GraphQL schema generation
|
||||||
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
@@ -37,7 +37,7 @@
|
|||||||
- [ ] Web UI for visual editing
|
- [ ] Web UI for visual editing
|
||||||
- [ ] REST API server mode
|
- [ ] REST API server mode
|
||||||
- [ ] Support for NoSQL databases
|
- [ ] Support for NoSQL databases
|
||||||
- [ ] GraphQL schema generation
|
|
||||||
|
|
||||||
## Performance
|
## Performance
|
||||||
- [ ] Concurrent processing for multiple tables
|
- [ ] Concurrent processing for multiple tables
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import (
|
|||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
@@ -28,6 +29,7 @@ import (
|
|||||||
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
@@ -58,6 +60,7 @@ Input formats:
|
|||||||
- dbml: DBML schema files
|
- dbml: DBML schema files
|
||||||
- dctx: DCTX schema files
|
- dctx: DCTX schema files
|
||||||
- drawdb: DrawDB JSON files
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
- json: JSON database schema
|
- json: JSON database schema
|
||||||
- yaml: YAML database schema
|
- yaml: YAML database schema
|
||||||
- gorm: GORM model files (Go, file or directory)
|
- gorm: GORM model files (Go, file or directory)
|
||||||
@@ -71,6 +74,7 @@ Output formats:
|
|||||||
- dbml: DBML schema files
|
- dbml: DBML schema files
|
||||||
- dctx: DCTX schema files
|
- dctx: DCTX schema files
|
||||||
- drawdb: DrawDB JSON files
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
- json: JSON database schema
|
- json: JSON database schema
|
||||||
- yaml: YAML database schema
|
- yaml: YAML database schema
|
||||||
- gorm: GORM model files (Go)
|
- gorm: GORM model files (Go)
|
||||||
@@ -136,11 +140,11 @@ Examples:
|
|||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||||
|
|
||||||
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||||
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||||
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
||||||
@@ -279,6 +283,12 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
|||||||
}
|
}
|
||||||
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "graphql", "gql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
||||||
}
|
}
|
||||||
@@ -339,6 +349,9 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
|||||||
case "typeorm":
|
case "typeorm":
|
||||||
writer = wtypeorm.NewWriter(writerOpts)
|
writer = wtypeorm.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "graphql", "gql":
|
||||||
|
writer = wgraphql.NewWriter(writerOpts)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unsupported target format: %s", dbType)
|
return fmt.Errorf("unsupported target format: %s", dbType)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,13 @@ package models
|
|||||||
|
|
||||||
import "encoding/xml"
|
import "encoding/xml"
|
||||||
|
|
||||||
// DCTXDictionary represents the root element of a DCTX file
|
// DCTX File Format Models
|
||||||
|
//
|
||||||
|
// This file defines the data structures for parsing and generating DCTX
|
||||||
|
// (Data Dictionary) XML files, which are used by Clarion development tools
|
||||||
|
// for database schema definitions.
|
||||||
|
|
||||||
|
// DCTXDictionary represents the root element of a DCTX file.
|
||||||
type DCTXDictionary struct {
|
type DCTXDictionary struct {
|
||||||
XMLName xml.Name `xml:"Dictionary"`
|
XMLName xml.Name `xml:"Dictionary"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -11,7 +17,7 @@ type DCTXDictionary struct {
|
|||||||
Relations []DCTXRelation `xml:"Relation,omitempty"`
|
Relations []DCTXRelation `xml:"Relation,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXTable represents a table definition in DCTX
|
// DCTXTable represents a table definition in DCTX format.
|
||||||
type DCTXTable struct {
|
type DCTXTable struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -25,7 +31,8 @@ type DCTXTable struct {
|
|||||||
Options []DCTXOption `xml:"Option,omitempty"`
|
Options []DCTXOption `xml:"Option,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXField represents a field/column definition in DCTX
|
// DCTXField represents a field/column definition in DCTX format.
|
||||||
|
// Fields can be nested for GROUP structures.
|
||||||
type DCTXField struct {
|
type DCTXField struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -37,7 +44,7 @@ type DCTXField struct {
|
|||||||
Options []DCTXOption `xml:"Option,omitempty"`
|
Options []DCTXOption `xml:"Option,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXKey represents an index or key definition in DCTX
|
// DCTXKey represents an index or key definition in DCTX format.
|
||||||
type DCTXKey struct {
|
type DCTXKey struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -49,7 +56,7 @@ type DCTXKey struct {
|
|||||||
Components []DCTXComponent `xml:"Component"`
|
Components []DCTXComponent `xml:"Component"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXComponent represents a component of a key (field reference)
|
// DCTXComponent represents a component of a key, referencing a field in the index.
|
||||||
type DCTXComponent struct {
|
type DCTXComponent struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
FieldId string `xml:"FieldId,attr,omitempty"`
|
FieldId string `xml:"FieldId,attr,omitempty"`
|
||||||
@@ -57,14 +64,14 @@ type DCTXComponent struct {
|
|||||||
Ascend bool `xml:"Ascend,attr,omitempty"`
|
Ascend bool `xml:"Ascend,attr,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXOption represents a property option in DCTX
|
// DCTXOption represents a property option in DCTX format for metadata storage.
|
||||||
type DCTXOption struct {
|
type DCTXOption struct {
|
||||||
Property string `xml:"Property,attr"`
|
Property string `xml:"Property,attr"`
|
||||||
PropertyType string `xml:"PropertyType,attr,omitempty"`
|
PropertyType string `xml:"PropertyType,attr,omitempty"`
|
||||||
PropertyValue string `xml:"PropertyValue,attr"`
|
PropertyValue string `xml:"PropertyValue,attr"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXRelation represents a relationship/foreign key in DCTX
|
// DCTXRelation represents a relationship/foreign key in DCTX format.
|
||||||
type DCTXRelation struct {
|
type DCTXRelation struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
PrimaryTable string `xml:"PrimaryTable,attr"`
|
PrimaryTable string `xml:"PrimaryTable,attr"`
|
||||||
@@ -77,7 +84,7 @@ type DCTXRelation struct {
|
|||||||
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping,omitempty"`
|
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXFieldMapping represents a field mapping in a relation
|
// DCTXFieldMapping represents a field mapping in a relation for multi-column foreign keys.
|
||||||
type DCTXFieldMapping struct {
|
type DCTXFieldMapping struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Field string `xml:"Field,attr"`
|
Field string `xml:"Field,attr"`
|
||||||
|
|||||||
@@ -2,11 +2,14 @@ package models
|
|||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
// =============================================================================
|
// Flat/Denormalized Views
|
||||||
// Flat/Denormalized Views - Flattened structures with fully qualified names
|
//
|
||||||
// =============================================================================
|
// This file provides flattened data structures with fully qualified names
|
||||||
|
// for easier querying and analysis of database schemas without navigating
|
||||||
|
// nested hierarchies.
|
||||||
|
|
||||||
// FlatColumn represents a column with full context in a single structure
|
// FlatColumn represents a column with full database context in a single structure.
|
||||||
|
// It includes fully qualified names for easy identification and querying.
|
||||||
type FlatColumn struct {
|
type FlatColumn struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -25,7 +28,7 @@ type FlatColumn struct {
|
|||||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatColumns converts a Database to a slice of FlatColumns
|
// ToFlatColumns converts a Database to a slice of FlatColumns for denormalized access to all columns.
|
||||||
func (d *Database) ToFlatColumns() []*FlatColumn {
|
func (d *Database) ToFlatColumns() []*FlatColumn {
|
||||||
flatColumns := make([]*FlatColumn, 0)
|
flatColumns := make([]*FlatColumn, 0)
|
||||||
|
|
||||||
@@ -56,7 +59,7 @@ func (d *Database) ToFlatColumns() []*FlatColumn {
|
|||||||
return flatColumns
|
return flatColumns
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatTable represents a table with full context
|
// FlatTable represents a table with full database context and aggregated counts.
|
||||||
type FlatTable struct {
|
type FlatTable struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -70,7 +73,7 @@ type FlatTable struct {
|
|||||||
IndexCount int `json:"index_count" yaml:"index_count" xml:"index_count"`
|
IndexCount int `json:"index_count" yaml:"index_count" xml:"index_count"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatTables converts a Database to a slice of FlatTables
|
// ToFlatTables converts a Database to a slice of FlatTables for denormalized access to all tables.
|
||||||
func (d *Database) ToFlatTables() []*FlatTable {
|
func (d *Database) ToFlatTables() []*FlatTable {
|
||||||
flatTables := make([]*FlatTable, 0)
|
flatTables := make([]*FlatTable, 0)
|
||||||
|
|
||||||
@@ -94,7 +97,7 @@ func (d *Database) ToFlatTables() []*FlatTable {
|
|||||||
return flatTables
|
return flatTables
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatConstraint represents a constraint with full context
|
// FlatConstraint represents a constraint with full database context and resolved references.
|
||||||
type FlatConstraint struct {
|
type FlatConstraint struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -112,7 +115,7 @@ type FlatConstraint struct {
|
|||||||
OnUpdate string `json:"on_update,omitempty" yaml:"on_update,omitempty" xml:"on_update,omitempty"`
|
OnUpdate string `json:"on_update,omitempty" yaml:"on_update,omitempty" xml:"on_update,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatConstraints converts a Database to a slice of FlatConstraints
|
// ToFlatConstraints converts a Database to a slice of FlatConstraints for denormalized access to all constraints.
|
||||||
func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
||||||
flatConstraints := make([]*FlatConstraint, 0)
|
flatConstraints := make([]*FlatConstraint, 0)
|
||||||
|
|
||||||
@@ -148,7 +151,7 @@ func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
|||||||
return flatConstraints
|
return flatConstraints
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatRelationship represents a relationship with full context
|
// FlatRelationship represents a relationship with full database context and fully qualified table names.
|
||||||
type FlatRelationship struct {
|
type FlatRelationship struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
RelationshipName string `json:"relationship_name" yaml:"relationship_name" xml:"relationship_name"`
|
RelationshipName string `json:"relationship_name" yaml:"relationship_name" xml:"relationship_name"`
|
||||||
@@ -164,7 +167,7 @@ type FlatRelationship struct {
|
|||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatRelationships converts a Database to a slice of FlatRelationships
|
// ToFlatRelationships converts a Database to a slice of FlatRelationships for denormalized access to all relationships.
|
||||||
func (d *Database) ToFlatRelationships() []*FlatRelationship {
|
func (d *Database) ToFlatRelationships() []*FlatRelationship {
|
||||||
flatRelationships := make([]*FlatRelationship, 0)
|
flatRelationships := make([]*FlatRelationship, 0)
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
|
// Package models provides the core data structures for representing database schemas.
|
||||||
|
// It defines types for databases, schemas, tables, columns, relationships, constraints,
|
||||||
|
// indexes, views, sequences, and other database objects. These models serve as the
|
||||||
|
// intermediate representation for converting between various database schema formats.
|
||||||
package models
|
package models
|
||||||
|
|
||||||
import "strings"
|
import "strings"
|
||||||
|
|
||||||
|
// DatabaseType represents the type of database system.
|
||||||
type DatabaseType string
|
type DatabaseType string
|
||||||
|
|
||||||
|
// Supported database types.
|
||||||
const (
|
const (
|
||||||
PostgresqlDatabaseType DatabaseType = "pgsql"
|
PostgresqlDatabaseType DatabaseType = "pgsql" // PostgreSQL database
|
||||||
MSSQLDatabaseType DatabaseType = "mssql"
|
MSSQLDatabaseType DatabaseType = "mssql" // Microsoft SQL Server database
|
||||||
SqlLiteDatabaseType DatabaseType = "sqlite"
|
SqlLiteDatabaseType DatabaseType = "sqlite" // SQLite database
|
||||||
)
|
)
|
||||||
|
|
||||||
// Database represents the complete database schema
|
// Database represents the complete database schema
|
||||||
@@ -21,11 +27,13 @@ type Database struct {
|
|||||||
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLNamer returns the database name in lowercase
|
// SQLName returns the database name in lowercase for SQL compatibility.
|
||||||
func (d *Database) SQLName() string {
|
func (d *Database) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Schema represents a database schema, which is a logical grouping of database objects
|
||||||
|
// such as tables, views, sequences, and relationships within a database.
|
||||||
type Schema struct {
|
type Schema struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -43,11 +51,13 @@ type Schema struct {
|
|||||||
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
|
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the schema name in lowercase
|
// SQLName returns the schema name in lowercase for SQL compatibility.
|
||||||
func (d *Schema) SQLName() string {
|
func (d *Schema) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Table represents a database table with its columns, constraints, indexes,
|
||||||
|
// and relationships. Tables are the primary data storage structures in a database.
|
||||||
type Table struct {
|
type Table struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -63,11 +73,12 @@ type Table struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the table name in lowercase
|
// SQLName returns the table name in lowercase for SQL compatibility.
|
||||||
func (d *Table) SQLName() string {
|
func (d *Table) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetPrimaryKey returns the primary key column for the table, or nil if none exists.
|
||||||
func (m Table) GetPrimaryKey() *Column {
|
func (m Table) GetPrimaryKey() *Column {
|
||||||
for _, column := range m.Columns {
|
for _, column := range m.Columns {
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
@@ -77,6 +88,7 @@ func (m Table) GetPrimaryKey() *Column {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetForeignKeys returns all foreign key constraints for the table.
|
||||||
func (m Table) GetForeignKeys() []*Constraint {
|
func (m Table) GetForeignKeys() []*Constraint {
|
||||||
keys := make([]*Constraint, 0)
|
keys := make([]*Constraint, 0)
|
||||||
|
|
||||||
@@ -101,7 +113,7 @@ type View struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the view name in lowercase
|
// SQLName returns the view name in lowercase for SQL compatibility.
|
||||||
func (d *View) SQLName() string {
|
func (d *View) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
@@ -124,7 +136,7 @@ type Sequence struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the sequence name in lowercase
|
// SQLName returns the sequence name in lowercase for SQL compatibility.
|
||||||
func (d *Sequence) SQLName() string {
|
func (d *Sequence) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
@@ -148,11 +160,13 @@ type Column struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the table name in lowercase
|
// SQLName returns the column name in lowercase for SQL compatibility.
|
||||||
func (d *Column) SQLName() string {
|
func (d *Column) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Index represents a database index for optimizing query performance.
|
||||||
|
// Indexes can be unique, partial, or include additional columns.
|
||||||
type Index struct {
|
type Index struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -168,19 +182,23 @@ type Index struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the Indexin lowercase
|
// SQLName returns the index name in lowercase for SQL compatibility.
|
||||||
func (d *Index) SQLName() string {
|
func (d *Index) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RelationType represents the type of relationship between database tables.
|
||||||
type RelationType string
|
type RelationType string
|
||||||
|
|
||||||
|
// Supported relationship types.
|
||||||
const (
|
const (
|
||||||
OneToOne RelationType = "one_to_one"
|
OneToOne RelationType = "one_to_one" // One record in table A relates to one record in table B
|
||||||
OneToMany RelationType = "one_to_many"
|
OneToMany RelationType = "one_to_many" // One record in table A relates to many records in table B
|
||||||
ManyToMany RelationType = "many_to_many"
|
ManyToMany RelationType = "many_to_many" // Many records in table A relate to many records in table B
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Relationship represents a relationship between two database tables.
|
||||||
|
// Relationships can be one-to-one, one-to-many, or many-to-many.
|
||||||
type Relationship struct {
|
type Relationship struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Type RelationType `json:"type" yaml:"type" xml:"type"`
|
Type RelationType `json:"type" yaml:"type" xml:"type"`
|
||||||
@@ -198,11 +216,13 @@ type Relationship struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the Relationship lowercase
|
// SQLName returns the relationship name in lowercase for SQL compatibility.
|
||||||
func (d *Relationship) SQLName() string {
|
func (d *Relationship) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Constraint represents a database constraint that enforces data integrity rules.
|
||||||
|
// Constraints can be primary keys, foreign keys, unique constraints, check constraints, or not-null constraints.
|
||||||
type Constraint struct {
|
type Constraint struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Type ConstraintType `json:"type" yaml:"type" xml:"type"`
|
Type ConstraintType `json:"type" yaml:"type" xml:"type"`
|
||||||
@@ -220,30 +240,37 @@ type Constraint struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLName returns the constraint name in lowercase for SQL compatibility.
|
||||||
func (d *Constraint) SQLName() string {
|
func (d *Constraint) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ConstraintType represents the type of database constraint.
|
||||||
type ConstraintType string
|
type ConstraintType string
|
||||||
|
|
||||||
|
// Enum represents a database enumeration type with a set of allowed values.
|
||||||
type Enum struct {
|
type Enum struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Values []string `json:"values" yaml:"values" xml:"values"`
|
Values []string `json:"values" yaml:"values" xml:"values"`
|
||||||
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
|
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLName returns the enum name in lowercase for SQL compatibility.
|
||||||
func (d *Enum) SQLName() string {
|
func (d *Enum) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Supported constraint types.
|
||||||
const (
|
const (
|
||||||
PrimaryKeyConstraint ConstraintType = "primary_key"
|
PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record
|
||||||
ForeignKeyConstraint ConstraintType = "foreign_key"
|
ForeignKeyConstraint ConstraintType = "foreign_key" // Foreign key references another table
|
||||||
UniqueConstraint ConstraintType = "unique"
|
UniqueConstraint ConstraintType = "unique" // Unique constraint ensures all values are different
|
||||||
CheckConstraint ConstraintType = "check"
|
CheckConstraint ConstraintType = "check" // Check constraint validates data against an expression
|
||||||
NotNullConstraint ConstraintType = "not_null"
|
NotNullConstraint ConstraintType = "not_null" // Not null constraint requires a value
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Script represents a database migration or initialization script.
|
||||||
|
// Scripts can have dependencies and rollback capabilities.
|
||||||
type Script struct {
|
type Script struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description" yaml:"description" xml:"description"`
|
Description string `json:"description" yaml:"description" xml:"description"`
|
||||||
@@ -256,11 +283,12 @@ type Script struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLName returns the script name in lowercase for SQL compatibility.
|
||||||
func (d *Script) SQLName() string {
|
func (d *Script) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize functions
|
// Initialization functions for creating new model instances with proper defaults.
|
||||||
|
|
||||||
// InitDatabase initializes a new Database with empty slices
|
// InitDatabase initializes a new Database with empty slices
|
||||||
func InitDatabase(name string) *Database {
|
func InitDatabase(name string) *Database {
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
// =============================================================================
|
// Summary/Compact Views
|
||||||
// Summary/Compact Views - Lightweight views with essential fields
|
//
|
||||||
// =============================================================================
|
// This file provides lightweight summary structures with essential fields
|
||||||
|
// and aggregated counts for quick database schema overviews without loading
|
||||||
|
// full object graphs.
|
||||||
|
|
||||||
// DatabaseSummary provides a compact overview of a database
|
// DatabaseSummary provides a compact overview of a database with aggregated statistics.
|
||||||
type DatabaseSummary struct {
|
type DatabaseSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -15,7 +17,7 @@ type DatabaseSummary struct {
|
|||||||
TotalColumns int `json:"total_columns" yaml:"total_columns" xml:"total_columns"`
|
TotalColumns int `json:"total_columns" yaml:"total_columns" xml:"total_columns"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Database to a DatabaseSummary
|
// ToSummary converts a Database to a DatabaseSummary with calculated counts.
|
||||||
func (d *Database) ToSummary() *DatabaseSummary {
|
func (d *Database) ToSummary() *DatabaseSummary {
|
||||||
summary := &DatabaseSummary{
|
summary := &DatabaseSummary{
|
||||||
Name: d.Name,
|
Name: d.Name,
|
||||||
@@ -36,7 +38,7 @@ func (d *Database) ToSummary() *DatabaseSummary {
|
|||||||
return summary
|
return summary
|
||||||
}
|
}
|
||||||
|
|
||||||
// SchemaSummary provides a compact overview of a schema
|
// SchemaSummary provides a compact overview of a schema with aggregated statistics.
|
||||||
type SchemaSummary struct {
|
type SchemaSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -47,7 +49,7 @@ type SchemaSummary struct {
|
|||||||
TotalConstraints int `json:"total_constraints" yaml:"total_constraints" xml:"total_constraints"`
|
TotalConstraints int `json:"total_constraints" yaml:"total_constraints" xml:"total_constraints"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Schema to a SchemaSummary
|
// ToSummary converts a Schema to a SchemaSummary with calculated counts.
|
||||||
func (s *Schema) ToSummary() *SchemaSummary {
|
func (s *Schema) ToSummary() *SchemaSummary {
|
||||||
summary := &SchemaSummary{
|
summary := &SchemaSummary{
|
||||||
Name: s.Name,
|
Name: s.Name,
|
||||||
@@ -66,7 +68,7 @@ func (s *Schema) ToSummary() *SchemaSummary {
|
|||||||
return summary
|
return summary
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableSummary provides a compact overview of a table
|
// TableSummary provides a compact overview of a table with aggregated statistics.
|
||||||
type TableSummary struct {
|
type TableSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
||||||
@@ -79,7 +81,7 @@ type TableSummary struct {
|
|||||||
ForeignKeyCount int `json:"foreign_key_count" yaml:"foreign_key_count" xml:"foreign_key_count"`
|
ForeignKeyCount int `json:"foreign_key_count" yaml:"foreign_key_count" xml:"foreign_key_count"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Table to a TableSummary
|
// ToSummary converts a Table to a TableSummary with calculated counts.
|
||||||
func (t *Table) ToSummary() *TableSummary {
|
func (t *Table) ToSummary() *TableSummary {
|
||||||
summary := &TableSummary{
|
summary := &TableSummary{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
|
|||||||
@@ -12,14 +12,6 @@ import (
|
|||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
)
|
)
|
||||||
|
|
||||||
// min returns the minimum of two integers
|
|
||||||
func min(a, b int) int {
|
|
||||||
if a < b {
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reader implements the readers.Reader interface for Drizzle schema format
|
// Reader implements the readers.Reader interface for Drizzle schema format
|
||||||
type Reader struct {
|
type Reader struct {
|
||||||
options *readers.ReaderOptions
|
options *readers.ReaderOptions
|
||||||
|
|||||||
203
pkg/readers/graphql/README.md
Normal file
203
pkg/readers/graphql/README.md
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
# GraphQL Schema Reader
|
||||||
|
|
||||||
|
The GraphQL reader parses GraphQL Schema Definition Language (SDL) files and converts them into RelSpec's internal database model.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Standard GraphQL SDL** support (generic, non-framework-specific)
|
||||||
|
- **Type to Table mapping**: GraphQL types become database tables
|
||||||
|
- **Field to Column mapping**: GraphQL fields become table columns
|
||||||
|
- **Enum support**: GraphQL enums are preserved
|
||||||
|
- **Custom scalars**: DateTime, JSON, Date automatically mapped to appropriate SQL types
|
||||||
|
- **Implicit relationships**: Detects relationships from field types
|
||||||
|
- **Many-to-many support**: Creates junction tables for bidirectional array relationships
|
||||||
|
- **Configurable ID mapping**: Choose between bigint (default) or UUID for ID fields
|
||||||
|
|
||||||
|
## Supported GraphQL Features
|
||||||
|
|
||||||
|
### Built-in Scalars
|
||||||
|
- `ID` → bigint (default) or uuid (configurable)
|
||||||
|
- `String` → text
|
||||||
|
- `Int` → integer
|
||||||
|
- `Float` → double precision
|
||||||
|
- `Boolean` → boolean
|
||||||
|
|
||||||
|
### Custom Scalars
|
||||||
|
- `DateTime` → timestamp
|
||||||
|
- `JSON` → jsonb
|
||||||
|
- `Date` → date
|
||||||
|
- `Time` → time
|
||||||
|
- `Decimal` → numeric
|
||||||
|
|
||||||
|
Additional custom scalars can be mapped via metadata.
|
||||||
|
|
||||||
|
### Relationships
|
||||||
|
|
||||||
|
Relationships are inferred from field types:
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
author: User! # Many-to-one (creates authorId FK column, NOT NULL)
|
||||||
|
reviewer: User # Many-to-one nullable (creates reviewerId FK column, NULL)
|
||||||
|
tags: [Tag!]! # One-to-many or many-to-many (depending on reverse)
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Reverse of Post.author (no FK created)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Many-to-many with Post (creates PostTag junction table)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Relationship Detection Rules:**
|
||||||
|
- Single type reference (`user: User`) → Creates FK column (e.g., `userId`)
|
||||||
|
- Array type reference (`posts: [Post!]!`) → One-to-many reverse (no FK on this table)
|
||||||
|
- Bidirectional arrays → Many-to-many (creates junction table)
|
||||||
|
|
||||||
|
### Enums
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
GUEST
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
role: Role!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Enums are preserved in the schema and can be used as column types.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
)
|
||||||
|
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := graphql.NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
```
|
||||||
|
|
||||||
|
### With UUID ID Type
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"idType": "uuid", // Map ID scalar to uuid instead of bigint
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := graphql.NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Per-Type ID Mapping
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"typeIdMappings": map[string]string{
|
||||||
|
"User": "uuid", // User.id → uuid
|
||||||
|
"Post": "bigint", // Post.id → bigint
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Custom Scalar Mappings
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"customScalarMappings": map[string]string{
|
||||||
|
"Upload": "bytea",
|
||||||
|
"Decimal": "numeric(10,2)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Convert GraphQL to JSON
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to json --to-path schema.json
|
||||||
|
|
||||||
|
# Convert GraphQL to GORM models
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to gorm --to-path models/ --package models
|
||||||
|
|
||||||
|
# Convert GraphQL to PostgreSQL SQL
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to pgsql --to-path schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Metadata Options
|
||||||
|
|
||||||
|
| Option | Type | Description | Default |
|
||||||
|
|--------|------|-------------|---------|
|
||||||
|
| `idType` | string | Global ID type mapping ("bigint" or "uuid") | "bigint" |
|
||||||
|
| `typeIdMappings` | map[string]string | Per-type ID mappings | {} |
|
||||||
|
| `customScalarMappings` | map[string]string | Custom scalar to SQL type mappings | {} |
|
||||||
|
| `schemaName` | string | Schema name for all tables | "public" |
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- Only supports GraphQL SDL (Schema Definition Language), not queries or mutations
|
||||||
|
- Directives are ignored (except for future extensibility)
|
||||||
|
- Interfaces and Unions are not supported
|
||||||
|
- GraphQL's concept of "schema" is different from database schemas; all types go into a single database schema (default: "public")
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
**Input** (`schema.graphql`):
|
||||||
|
```graphql
|
||||||
|
scalar DateTime
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
role: Role!
|
||||||
|
createdAt: DateTime!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result**: Database with:
|
||||||
|
- 2 tables: `User` and `Post`
|
||||||
|
- `Post` table has `authorId` foreign key to `User.id`
|
||||||
|
- `Role` enum with values: ADMIN, USER
|
||||||
|
- Custom scalar `DateTime` mapped to `timestamp`
|
||||||
279
pkg/readers/graphql/reader.go
Normal file
279
pkg/readers/graphql/reader.go
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parseGraphQL(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type parseContext struct {
|
||||||
|
inType bool
|
||||||
|
inEnum bool
|
||||||
|
currentType string
|
||||||
|
typeLines []string
|
||||||
|
currentEnum string
|
||||||
|
enumLines []string
|
||||||
|
customScalars map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseGraphQL(content string) (*models.Database, error) {
|
||||||
|
dbName := "database"
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
dbName = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db := models.InitDatabase(dbName)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
ctx := &parseContext{
|
||||||
|
customScalars: make(map[string]bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
// First pass: collect custom scalars and enums
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
scalarRegex := regexp.MustCompile(`^\s*scalar\s+(\w+)`)
|
||||||
|
enumRegex := regexp.MustCompile(`^\s*enum\s+(\w+)\s*\{`)
|
||||||
|
closingBraceRegex := regexp.MustCompile(`^\s*\}`)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := scalarRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.customScalars[matches[1]] = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.inEnum = true
|
||||||
|
ctx.currentEnum = matches[1]
|
||||||
|
ctx.enumLines = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if closingBraceRegex.MatchString(trimmed) && ctx.inEnum {
|
||||||
|
r.parseEnum(ctx.currentEnum, ctx.enumLines, schema)
|
||||||
|
// Add enum name to custom scalars for type detection
|
||||||
|
ctx.customScalars[ctx.currentEnum] = true
|
||||||
|
ctx.inEnum = false
|
||||||
|
ctx.currentEnum = ""
|
||||||
|
ctx.enumLines = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.inEnum {
|
||||||
|
ctx.enumLines = append(ctx.enumLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanner error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: parse types
|
||||||
|
scanner = bufio.NewScanner(strings.NewReader(content))
|
||||||
|
typeRegex := regexp.MustCompile(`^\s*type\s+(\w+)\s*\{`)
|
||||||
|
ctx.inType = false
|
||||||
|
ctx.inEnum = false
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := typeRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.inType = true
|
||||||
|
ctx.currentType = matches[1]
|
||||||
|
ctx.typeLines = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if closingBraceRegex.MatchString(trimmed) && ctx.inType {
|
||||||
|
if err := r.parseType(ctx.currentType, ctx.typeLines, schema, ctx); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse type %s: %w", ctx.currentType, err)
|
||||||
|
}
|
||||||
|
ctx.inType = false
|
||||||
|
ctx.currentType = ""
|
||||||
|
ctx.typeLines = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.inType {
|
||||||
|
ctx.typeLines = append(ctx.typeLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanner error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
// Third pass: detect and create relationships
|
||||||
|
if err := r.detectAndCreateRelationships(schema, ctx); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create relationships: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type fieldInfo struct {
|
||||||
|
name string
|
||||||
|
typeName string
|
||||||
|
isArray bool
|
||||||
|
isNullable bool
|
||||||
|
innerNullable bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseType(typeName string, lines []string, schema *models.Schema, ctx *parseContext) error {
|
||||||
|
table := models.InitTable(typeName, schema.Name)
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
|
||||||
|
// Store field info for relationship detection
|
||||||
|
relationFields := make(map[string]*fieldInfo)
|
||||||
|
|
||||||
|
fieldRegex := regexp.MustCompile(`^\s*(\w+)\s*:\s*(\[)?(\w+)(!)?(\])?(!)?\s*`)
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
matches := fieldRegex.FindStringSubmatch(trimmed)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldName := matches[1]
|
||||||
|
hasOpenBracket := matches[2] == "["
|
||||||
|
baseType := matches[3]
|
||||||
|
innerNonNull := matches[4] == "!"
|
||||||
|
hasCloseBracket := matches[5] == "]"
|
||||||
|
outerNonNull := matches[6] == "!"
|
||||||
|
|
||||||
|
isArray := hasOpenBracket && hasCloseBracket
|
||||||
|
|
||||||
|
// Determine if this is a scalar or a relation
|
||||||
|
if r.isScalarType(baseType, ctx) {
|
||||||
|
// This is a scalar field
|
||||||
|
column := models.InitColumn(fieldName, table.Name, schema.Name)
|
||||||
|
column.Type = r.graphQLTypeToSQL(baseType, fieldName, typeName)
|
||||||
|
|
||||||
|
if isArray {
|
||||||
|
// Array of scalars: use array type
|
||||||
|
column.Type += "[]"
|
||||||
|
column.NotNull = outerNonNull
|
||||||
|
} else {
|
||||||
|
column.NotNull = !isArray && innerNonNull
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a primary key (convention: field named "id")
|
||||||
|
if fieldName == "id" {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Columns[fieldName] = column
|
||||||
|
} else {
|
||||||
|
// This is a relation field - store for later processing
|
||||||
|
relationFields[fieldName] = &fieldInfo{
|
||||||
|
name: fieldName,
|
||||||
|
typeName: baseType,
|
||||||
|
isArray: isArray,
|
||||||
|
isNullable: !innerNonNull && !isArray,
|
||||||
|
innerNullable: !innerNonNull && isArray,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store relation fields in table metadata for relationship detection
|
||||||
|
if len(relationFields) > 0 {
|
||||||
|
table.Metadata["relationFields"] = relationFields
|
||||||
|
}
|
||||||
|
|
||||||
|
schema.Tables = append(schema.Tables, table)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
|
||||||
|
enum := &models.Enum{
|
||||||
|
Name: enumName,
|
||||||
|
Schema: schema.Name,
|
||||||
|
Values: make([]string, 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Enum values are simple identifiers
|
||||||
|
enum.Values = append(enum.Values, trimmed)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema.Enums = append(schema.Enums, enum)
|
||||||
|
}
|
||||||
362
pkg/readers/graphql/reader_test.go
Normal file
362
pkg/readers/graphql/reader_test.go
Normal file
@@ -0,0 +1,362 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
if userTable.Name != "User" {
|
||||||
|
t.Errorf("Expected table name 'User', got '%s'", userTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify columns
|
||||||
|
expectedColumns := map[string]struct {
|
||||||
|
sqlType string
|
||||||
|
notNull bool
|
||||||
|
isPK bool
|
||||||
|
}{
|
||||||
|
"id": {"bigint", true, true},
|
||||||
|
"email": {"text", true, false},
|
||||||
|
"name": {"text", false, false},
|
||||||
|
"age": {"integer", false, false},
|
||||||
|
"active": {"boolean", true, false},
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(userTable.Columns) != len(expectedColumns) {
|
||||||
|
t.Fatalf("Expected %d columns, got %d", len(expectedColumns), len(userTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
for colName, expected := range expectedColumns {
|
||||||
|
col, exists := userTable.Columns[colName]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected column '%s' not found", colName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.Type != expected.sqlType {
|
||||||
|
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expected.sqlType, col.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.NotNull != expected.notNull {
|
||||||
|
t.Errorf("Column '%s': expected NotNull=%v, got %v", colName, expected.notNull, col.NotNull)
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.IsPrimaryKey != expected.isPK {
|
||||||
|
t.Errorf("Column '%s': expected IsPrimaryKey=%v, got %v", colName, expected.isPK, col.IsPrimaryKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_WithRelations(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "relations.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
if len(schema.Tables) != 2 {
|
||||||
|
t.Fatalf("Expected 2 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find Post table (should have FK to User)
|
||||||
|
var postTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "Post" {
|
||||||
|
postTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if postTable == nil {
|
||||||
|
t.Fatal("Post table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify authorId FK column was created
|
||||||
|
authorIdCol, exists := postTable.Columns["authorId"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'authorId' FK column not found in Post table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if authorIdCol.Type != "bigint" {
|
||||||
|
t.Errorf("Expected authorId type 'bigint', got '%s'", authorIdCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !authorIdCol.NotNull {
|
||||||
|
t.Error("Expected authorId to be NOT NULL")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify FK constraint
|
||||||
|
fkConstraintFound := false
|
||||||
|
for _, constraint := range postTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
if constraint.ReferencedTable == "User" && len(constraint.Columns) > 0 && constraint.Columns[0] == "authorId" {
|
||||||
|
fkConstraintFound = true
|
||||||
|
if constraint.OnDelete != "CASCADE" {
|
||||||
|
t.Errorf("Expected OnDelete CASCADE, got %s", constraint.OnDelete)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !fkConstraintFound {
|
||||||
|
t.Error("Foreign key constraint from Post to User not found")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_WithEnums(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "enums.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
if len(schema.Enums) != 1 {
|
||||||
|
t.Fatalf("Expected 1 enum, got %d", len(schema.Enums))
|
||||||
|
}
|
||||||
|
|
||||||
|
roleEnum := schema.Enums[0]
|
||||||
|
if roleEnum.Name != "Role" {
|
||||||
|
t.Errorf("Expected enum name 'Role', got '%s'", roleEnum.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedValues := []string{"ADMIN", "USER", "GUEST"}
|
||||||
|
if len(roleEnum.Values) != len(expectedValues) {
|
||||||
|
t.Fatalf("Expected %d enum values, got %d", len(expectedValues), len(roleEnum.Values))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, expected := range expectedValues {
|
||||||
|
if roleEnum.Values[i] != expected {
|
||||||
|
t.Errorf("Expected enum value '%s' at index %d, got '%s'", expected, i, roleEnum.Values[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify role column in User table
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
roleCol, exists := userTable.Columns["role"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'role' column not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
if roleCol.Type != "Role" {
|
||||||
|
t.Errorf("Expected role type 'Role', got '%s'", roleCol.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_CustomScalars(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "custom_scalars.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
|
||||||
|
// Verify custom scalar mappings
|
||||||
|
expectedTypes := map[string]string{
|
||||||
|
"createdAt": "timestamp",
|
||||||
|
"metadata": "jsonb",
|
||||||
|
"birthDate": "date",
|
||||||
|
}
|
||||||
|
|
||||||
|
for colName, expectedType := range expectedTypes {
|
||||||
|
col, exists := userTable.Columns[colName]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected column '%s' not found", colName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.Type != expectedType {
|
||||||
|
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expectedType, col.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_UUIDMetadata(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"idType": "uuid",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
|
||||||
|
idCol, exists := userTable.Columns["id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'id' column not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
if idCol.Type != "uuid" {
|
||||||
|
t.Errorf("Expected id type 'uuid' with metadata, got '%s'", idCol.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "complex.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
// Should have 5 tables: User, Profile, Post, Tag, and PostTag (join table)
|
||||||
|
expectedTableCount := 5
|
||||||
|
if len(schema.Tables) != expectedTableCount {
|
||||||
|
t.Fatalf("Expected %d tables, got %d", expectedTableCount, len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify PostTag join table exists (many-to-many between Post and Tag)
|
||||||
|
var joinTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "PostTag" {
|
||||||
|
joinTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if joinTable == nil {
|
||||||
|
t.Fatal("Expected PostTag join table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify join table has both FK columns
|
||||||
|
if _, exists := joinTable.Columns["postId"]; !exists {
|
||||||
|
t.Error("Expected 'postId' column in PostTag join table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := joinTable.Columns["tagId"]; !exists {
|
||||||
|
t.Error("Expected 'tagId' column in PostTag join table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify composite primary key
|
||||||
|
pkFound := false
|
||||||
|
for _, constraint := range joinTable.Constraints {
|
||||||
|
if constraint.Type == models.PrimaryKeyConstraint {
|
||||||
|
if len(constraint.Columns) == 2 {
|
||||||
|
pkFound = true
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pkFound {
|
||||||
|
t.Error("Expected composite primary key in PostTag join table")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadTable() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Name != "User" {
|
||||||
|
t.Errorf("Expected table name 'User', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/path.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for invalid path, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_EmptyPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for empty path, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
225
pkg/readers/graphql/relationships.go
Normal file
225
pkg/readers/graphql/relationships.go
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *Reader) detectAndCreateRelationships(schema *models.Schema, ctx *parseContext) error {
|
||||||
|
// Build table lookup map
|
||||||
|
tableMap := make(map[string]*models.Table)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableMap[table.Name] = table
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each table's relation fields
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||||
|
if !ok || len(relationFields) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for fieldName, fieldInfo := range relationFields {
|
||||||
|
targetTable, exists := tableMap[fieldInfo.typeName]
|
||||||
|
if !exists {
|
||||||
|
// Referenced type doesn't exist - might be an interface/union, skip
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldInfo.isArray {
|
||||||
|
// This is a one-to-many or many-to-many reverse side
|
||||||
|
// Check if target table has a reverse array field
|
||||||
|
if r.hasReverseArrayField(targetTable, table.Name) {
|
||||||
|
// Bidirectional array = many-to-many
|
||||||
|
// Only create join table once (lexicographically first table creates it)
|
||||||
|
if table.Name < targetTable.Name {
|
||||||
|
if err := r.createManyToManyJoinTable(schema, table, targetTable, fieldName, tableMap); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For one-to-many, no action needed (FK is on the other table)
|
||||||
|
} else {
|
||||||
|
// This is a many-to-one or one-to-one
|
||||||
|
// Create FK column on this table
|
||||||
|
if err := r.createForeignKeyColumn(table, targetTable, fieldName, fieldInfo.isNullable, schema); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up metadata
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
delete(table.Metadata, "relationFields")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) hasReverseArrayField(table *models.Table, targetTypeName string) bool {
|
||||||
|
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fieldInfo := range relationFields {
|
||||||
|
if fieldInfo.typeName == targetTypeName && fieldInfo.isArray {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) createForeignKeyColumn(fromTable, toTable *models.Table, fieldName string, nullable bool, schema *models.Schema) error {
|
||||||
|
// Get primary key from target table
|
||||||
|
pkCol := toTable.GetPrimaryKey()
|
||||||
|
if pkCol == nil {
|
||||||
|
return fmt.Errorf("target table %s has no primary key for relationship", toTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column name: {fieldName}Id
|
||||||
|
fkColName := fieldName + "Id"
|
||||||
|
|
||||||
|
// Check if column already exists (shouldn't happen but be safe)
|
||||||
|
if _, exists := fromTable.Columns[fkColName]; exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column
|
||||||
|
fkCol := models.InitColumn(fkColName, fromTable.Name, schema.Name)
|
||||||
|
fkCol.Type = pkCol.Type
|
||||||
|
fkCol.NotNull = !nullable
|
||||||
|
|
||||||
|
fromTable.Columns[fkColName] = fkCol
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", fromTable.Name, fieldName),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
constraint.Schema = schema.Name
|
||||||
|
constraint.Table = fromTable.Name
|
||||||
|
constraint.Columns = []string{fkColName}
|
||||||
|
constraint.ReferencedSchema = schema.Name
|
||||||
|
constraint.ReferencedTable = toTable.Name
|
||||||
|
constraint.ReferencedColumns = []string{pkCol.Name}
|
||||||
|
constraint.OnDelete = "CASCADE"
|
||||||
|
constraint.OnUpdate = "RESTRICT"
|
||||||
|
|
||||||
|
fromTable.Constraints[constraint.Name] = constraint
|
||||||
|
|
||||||
|
// Create relationship
|
||||||
|
relationship := models.InitRelationship(
|
||||||
|
fmt.Sprintf("rel_%s_%s", fromTable.Name, fieldName),
|
||||||
|
models.OneToMany,
|
||||||
|
)
|
||||||
|
relationship.FromTable = fromTable.Name
|
||||||
|
relationship.FromSchema = schema.Name
|
||||||
|
relationship.FromColumns = []string{fkColName}
|
||||||
|
relationship.ToTable = toTable.Name
|
||||||
|
relationship.ToSchema = schema.Name
|
||||||
|
relationship.ToColumns = []string{pkCol.Name}
|
||||||
|
relationship.ForeignKey = constraint.Name
|
||||||
|
|
||||||
|
fromTable.Relationships[relationship.Name] = relationship
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) createManyToManyJoinTable(schema *models.Schema, table1, table2 *models.Table, fieldName string, tableMap map[string]*models.Table) error {
|
||||||
|
// Create join table name
|
||||||
|
joinTableName := table1.Name + table2.Name
|
||||||
|
|
||||||
|
// Check if join table already exists
|
||||||
|
if _, exists := tableMap[joinTableName]; exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get primary keys
|
||||||
|
pk1 := table1.GetPrimaryKey()
|
||||||
|
pk2 := table2.GetPrimaryKey()
|
||||||
|
|
||||||
|
if pk1 == nil || pk2 == nil {
|
||||||
|
return fmt.Errorf("cannot create many-to-many: tables must have primary keys")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join table
|
||||||
|
joinTable := models.InitTable(joinTableName, schema.Name)
|
||||||
|
|
||||||
|
// Create FK column for table1
|
||||||
|
fkCol1Name := strings.ToLower(table1.Name) + "Id"
|
||||||
|
fkCol1 := models.InitColumn(fkCol1Name, joinTable.Name, schema.Name)
|
||||||
|
fkCol1.Type = pk1.Type
|
||||||
|
fkCol1.NotNull = true
|
||||||
|
joinTable.Columns[fkCol1Name] = fkCol1
|
||||||
|
|
||||||
|
// Create FK column for table2
|
||||||
|
fkCol2Name := strings.ToLower(table2.Name) + "Id"
|
||||||
|
fkCol2 := models.InitColumn(fkCol2Name, joinTable.Name, schema.Name)
|
||||||
|
fkCol2.Type = pk2.Type
|
||||||
|
fkCol2.NotNull = true
|
||||||
|
joinTable.Columns[fkCol2Name] = fkCol2
|
||||||
|
|
||||||
|
// Create composite primary key
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", joinTableName),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = schema.Name
|
||||||
|
pkConstraint.Table = joinTable.Name
|
||||||
|
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||||
|
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
// Create FK constraint to table1
|
||||||
|
fk1 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, table1.Name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk1.Schema = schema.Name
|
||||||
|
fk1.Table = joinTable.Name
|
||||||
|
fk1.Columns = []string{fkCol1Name}
|
||||||
|
fk1.ReferencedSchema = schema.Name
|
||||||
|
fk1.ReferencedTable = table1.Name
|
||||||
|
fk1.ReferencedColumns = []string{pk1.Name}
|
||||||
|
fk1.OnDelete = "CASCADE"
|
||||||
|
fk1.OnUpdate = "RESTRICT"
|
||||||
|
joinTable.Constraints[fk1.Name] = fk1
|
||||||
|
|
||||||
|
// Create FK constraint to table2
|
||||||
|
fk2 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, table2.Name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk2.Schema = schema.Name
|
||||||
|
fk2.Table = joinTable.Name
|
||||||
|
fk2.Columns = []string{fkCol2Name}
|
||||||
|
fk2.ReferencedSchema = schema.Name
|
||||||
|
fk2.ReferencedTable = table2.Name
|
||||||
|
fk2.ReferencedColumns = []string{pk2.Name}
|
||||||
|
fk2.OnDelete = "CASCADE"
|
||||||
|
fk2.OnUpdate = "RESTRICT"
|
||||||
|
joinTable.Constraints[fk2.Name] = fk2
|
||||||
|
|
||||||
|
// Create relationships
|
||||||
|
rel1 := models.InitRelationship(
|
||||||
|
fmt.Sprintf("rel_%s_%s_%s", joinTableName, table1.Name, table2.Name),
|
||||||
|
models.ManyToMany,
|
||||||
|
)
|
||||||
|
rel1.FromTable = table1.Name
|
||||||
|
rel1.FromSchema = schema.Name
|
||||||
|
rel1.ToTable = table2.Name
|
||||||
|
rel1.ToSchema = schema.Name
|
||||||
|
rel1.ThroughTable = joinTableName
|
||||||
|
rel1.ThroughSchema = schema.Name
|
||||||
|
joinTable.Relationships[rel1.Name] = rel1
|
||||||
|
|
||||||
|
// Add join table to schema
|
||||||
|
schema.Tables = append(schema.Tables, joinTable)
|
||||||
|
tableMap[joinTableName] = joinTable
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
97
pkg/readers/graphql/type_mapping.go
Normal file
97
pkg/readers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
func (r *Reader) isScalarType(typeName string, ctx *parseContext) bool {
|
||||||
|
// Built-in GraphQL scalars
|
||||||
|
builtInScalars := map[string]bool{
|
||||||
|
"ID": true,
|
||||||
|
"String": true,
|
||||||
|
"Int": true,
|
||||||
|
"Float": true,
|
||||||
|
"Boolean": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if builtInScalars[typeName] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalars declared in the schema
|
||||||
|
if ctx.customScalars[typeName] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common custom scalars (even if not declared)
|
||||||
|
commonCustomScalars := map[string]bool{
|
||||||
|
"DateTime": true,
|
||||||
|
"JSON": true,
|
||||||
|
"Date": true,
|
||||||
|
"Time": true,
|
||||||
|
"Upload": true,
|
||||||
|
"Decimal": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return commonCustomScalars[typeName]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) graphQLTypeToSQL(gqlType string, fieldName string, typeName string) string {
|
||||||
|
// Check for ID type with configurable mapping
|
||||||
|
if gqlType == "ID" {
|
||||||
|
// Check metadata for ID type preference
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
// Global idType setting
|
||||||
|
if idType, ok := r.options.Metadata["idType"].(string); ok {
|
||||||
|
if idType == "uuid" {
|
||||||
|
return "uuid"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Per-type ID mapping
|
||||||
|
if typeIdMappings, ok := r.options.Metadata["typeIdMappings"].(map[string]string); ok {
|
||||||
|
if idType, ok := typeIdMappings[typeName]; ok {
|
||||||
|
if idType == "uuid" {
|
||||||
|
return "uuid"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "bigint" // Default
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalar mappings
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if customMappings, ok := r.options.Metadata["customScalarMappings"].(map[string]string); ok {
|
||||||
|
if sqlType, ok := customMappings[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Built-in custom scalar mappings
|
||||||
|
customScalars := map[string]string{
|
||||||
|
"DateTime": "timestamp",
|
||||||
|
"JSON": "jsonb",
|
||||||
|
"Date": "date",
|
||||||
|
"Time": "time",
|
||||||
|
"Decimal": "numeric",
|
||||||
|
"Upload": "bytea",
|
||||||
|
}
|
||||||
|
if sqlType, ok := customScalars[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard scalar mappings
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"String": "text",
|
||||||
|
"Int": "integer",
|
||||||
|
"Float": "double precision",
|
||||||
|
"Boolean": "boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sqlType, ok := typeMap[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a known scalar, assume it's an enum or custom type
|
||||||
|
// Return as-is (might be an enum)
|
||||||
|
return gqlType
|
||||||
|
}
|
||||||
272
pkg/writers/graphql/README.md
Normal file
272
pkg/writers/graphql/README.md
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
# GraphQL Schema Writer
|
||||||
|
|
||||||
|
The GraphQL writer converts RelSpec's internal database model into GraphQL Schema Definition Language (SDL) files.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Table to Type mapping**: Database tables become GraphQL types
|
||||||
|
- **Column to Field mapping**: Table columns become type fields
|
||||||
|
- **Enum support**: Database enums are preserved
|
||||||
|
- **Custom scalar declarations**: Automatically declares DateTime, JSON, Date scalars
|
||||||
|
- **Implicit relationships**: Generates relationship fields from foreign keys
|
||||||
|
- **Many-to-many support**: Handles junction tables intelligently
|
||||||
|
- **Clean output**: Proper formatting, field ordering, and comments
|
||||||
|
|
||||||
|
## Type Mappings
|
||||||
|
|
||||||
|
### SQL to GraphQL
|
||||||
|
|
||||||
|
| SQL Type | GraphQL Type | Notes |
|
||||||
|
|----------|--------------|-------|
|
||||||
|
| bigint, integer, serial (PK) | ID | Primary keys map to ID |
|
||||||
|
| bigint, integer, int | Int | |
|
||||||
|
| text, varchar, char | String | |
|
||||||
|
| uuid (PK) | ID | UUID primary keys also map to ID |
|
||||||
|
| uuid | String | Non-PK UUIDs map to String |
|
||||||
|
| double precision, numeric, float | Float | |
|
||||||
|
| boolean | Boolean | |
|
||||||
|
| timestamp, timestamptz | DateTime | Custom scalar |
|
||||||
|
| jsonb, json | JSON | Custom scalar |
|
||||||
|
| date | Date | Custom scalar |
|
||||||
|
| Enum types | Enum | Preserves enum name |
|
||||||
|
| Arrays (e.g., text[]) | [Type] | Mapped to GraphQL lists |
|
||||||
|
|
||||||
|
## Relationship Handling
|
||||||
|
|
||||||
|
The writer intelligently generates relationship fields based on foreign key constraints:
|
||||||
|
|
||||||
|
### Forward Relationships (FK on this table)
|
||||||
|
```sql
|
||||||
|
-- Post table has authorId FK to User.id
|
||||||
|
CREATE TABLE post (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
title text NOT NULL,
|
||||||
|
author_id bigint NOT NULL REFERENCES user(id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
author: User! # Generated from authorId FK
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reverse Relationships (FK on other table)
|
||||||
|
```graphql
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
posts: [Post!]! # Reverse relationship (Post has FK to User)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Many-to-Many Relationships
|
||||||
|
|
||||||
|
Junction tables (tables with only PKs and FKs) are automatically detected and hidden:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE post_tag (
|
||||||
|
post_id bigint NOT NULL REFERENCES post(id),
|
||||||
|
tag_id bigint NOT NULL REFERENCES tag(id),
|
||||||
|
PRIMARY KEY (post_id, tag_id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
tags: [Tag!]! # Many-to-many through PostTag junction table
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Reverse many-to-many
|
||||||
|
}
|
||||||
|
|
||||||
|
# Note: PostTag junction table is NOT included in output
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
|
)
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Metadata Options
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.graphql",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"includeScalarDeclarations": true, // Include scalar declarations
|
||||||
|
"includeComments": true, // Include field/table comments
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Write to Stdout
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "", // Empty path writes to stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Convert PostgreSQL database to GraphQL
|
||||||
|
relspec convert --from pgsql \
|
||||||
|
--from-conn "postgres://user:pass@localhost:5432/mydb" \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
|
||||||
|
# Convert GORM models to GraphQL
|
||||||
|
relspec convert --from gorm --from-path ./models \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
|
||||||
|
# Convert JSON to GraphQL
|
||||||
|
relspec convert --from json --from-path schema.json \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
The generated GraphQL schema follows this structure:
|
||||||
|
|
||||||
|
1. **Header comment** (if enabled)
|
||||||
|
2. **Custom scalar declarations** (if any custom scalars are used)
|
||||||
|
3. **Enum definitions** (alphabetically sorted)
|
||||||
|
4. **Type definitions** (with fields ordered: ID first, then scalars alphabetically, then relationships)
|
||||||
|
|
||||||
|
### Example Output
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
# Generated GraphQL Schema
|
||||||
|
# Database: myapp
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
MODERATOR
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
createdAt: DateTime!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
role: Role!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
profile: Profile
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
publishedAt: Date
|
||||||
|
title: String!
|
||||||
|
|
||||||
|
author: User!
|
||||||
|
tags: [Tag!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
name: String!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Metadata Options
|
||||||
|
|
||||||
|
| Option | Type | Description | Default |
|
||||||
|
|--------|------|-------------|---------|
|
||||||
|
| `includeScalarDeclarations` | bool | Include `scalar DateTime`, etc. declarations | true |
|
||||||
|
| `includeComments` | bool | Include table/field descriptions as comments | true |
|
||||||
|
| `preservePKType` | bool | Use Int/String for PKs instead of ID | false |
|
||||||
|
|
||||||
|
## Field Naming Conventions
|
||||||
|
|
||||||
|
- **FK columns**: Foreign key columns like `authorId` are removed from the output; instead, a relationship field `author` is generated
|
||||||
|
- **Relationship pluralization**: Reverse one-to-many relationships are pluralized (e.g., `posts`, `tags`)
|
||||||
|
- **CamelCase**: Field names are kept in their original casing from the database
|
||||||
|
|
||||||
|
## Junction Table Detection
|
||||||
|
|
||||||
|
A table is considered a junction table if it:
|
||||||
|
1. Has exactly 2 foreign key constraints
|
||||||
|
2. All columns are either primary keys or foreign keys
|
||||||
|
3. Has a composite primary key on the FK columns
|
||||||
|
|
||||||
|
Junction tables are automatically hidden from the GraphQL output, and many-to-many relationship fields are generated on the related types instead.
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- All tables in all schemas are flattened into a single GraphQL schema
|
||||||
|
- No support for GraphQL-specific features like directives, interfaces, or unions
|
||||||
|
- Nullable vs non-nullable is determined solely by the `NOT NULL` constraint
|
||||||
|
|
||||||
|
## Example Conversion
|
||||||
|
|
||||||
|
**Input** (Database Schema):
|
||||||
|
```sql
|
||||||
|
CREATE TABLE user (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
email text NOT NULL,
|
||||||
|
created_at timestamp NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE post (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
title text NOT NULL,
|
||||||
|
author_id bigint NOT NULL REFERENCES user(id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output** (GraphQL Schema):
|
||||||
|
```graphql
|
||||||
|
scalar DateTime
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
createdAt: DateTime!
|
||||||
|
email: String!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
|
```
|
||||||
178
pkg/writers/graphql/relationships.go
Normal file
178
pkg/writers/graphql/relationships.go
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (w *Writer) generateRelationFields(table *models.Table, db *models.Database, schema *models.Schema) []string {
|
||||||
|
var fields []string
|
||||||
|
|
||||||
|
// 1. Forward relationships (this table has FK)
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type != models.ForeignKeyConstraint {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the related table
|
||||||
|
relatedTable := w.findTable(db, constraint.ReferencedSchema, constraint.ReferencedTable)
|
||||||
|
if relatedTable == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate field name (remove "Id" suffix from FK column if present)
|
||||||
|
fieldName := w.relationFieldName(constraint.Columns[0])
|
||||||
|
|
||||||
|
// Determine nullability from FK column
|
||||||
|
nullable := true
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
if col.NotNull {
|
||||||
|
nullable = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format: fieldName: RelatedType! or fieldName: RelatedType
|
||||||
|
gqlType := relatedTable.Name
|
||||||
|
if !nullable {
|
||||||
|
gqlType += "!"
|
||||||
|
}
|
||||||
|
|
||||||
|
fields = append(fields, fmt.Sprintf(" %s: %s", fieldName, gqlType))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Reverse relationships (other tables reference this table)
|
||||||
|
for _, otherSchema := range db.Schemas {
|
||||||
|
for _, otherTable := range otherSchema.Tables {
|
||||||
|
if otherTable.Name == table.Name && otherSchema.Name == schema.Name {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip join tables for many-to-many
|
||||||
|
if w.isJoinTable(otherTable) {
|
||||||
|
// Check if this is a many-to-many through this join table
|
||||||
|
if m2mField := w.getManyToManyField(table, otherTable, db); m2mField != "" {
|
||||||
|
fields = append(fields, m2mField)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, constraint := range otherTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint &&
|
||||||
|
constraint.ReferencedTable == table.Name &&
|
||||||
|
constraint.ReferencedSchema == schema.Name {
|
||||||
|
// Add reverse relationship field (array)
|
||||||
|
fieldName := w.pluralize(w.camelCase(otherTable.Name))
|
||||||
|
fields = append(fields, fmt.Sprintf(" %s: [%s!]!", fieldName, otherTable.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) getManyToManyField(table *models.Table, joinTable *models.Table, db *models.Database) string {
|
||||||
|
// Find the two FK constraints in the join table
|
||||||
|
var fk1, fk2 *models.Constraint
|
||||||
|
for _, constraint := range joinTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
if fk1 == nil {
|
||||||
|
fk1 = constraint
|
||||||
|
} else {
|
||||||
|
fk2 = constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fk1 == nil || fk2 == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which FK points to our table and which to the other table
|
||||||
|
var targetConstraint *models.Constraint
|
||||||
|
if fk1.ReferencedTable == table.Name {
|
||||||
|
targetConstraint = fk2
|
||||||
|
} else if fk2.ReferencedTable == table.Name {
|
||||||
|
targetConstraint = fk1
|
||||||
|
} else {
|
||||||
|
return "" // This join table doesn't involve our table
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the target table
|
||||||
|
targetTable := w.findTable(db, targetConstraint.ReferencedSchema, targetConstraint.ReferencedTable)
|
||||||
|
if targetTable == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate many-to-many field
|
||||||
|
fieldName := w.pluralize(w.camelCase(targetTable.Name))
|
||||||
|
return fmt.Sprintf(" %s: [%s!]!", fieldName, targetTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) findTable(db *models.Database, schemaName, tableName string) *models.Table {
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name != schemaName {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == tableName {
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) relationFieldName(fkColumnName string) string {
|
||||||
|
// Remove "Id" or "_id" suffix
|
||||||
|
name := fkColumnName
|
||||||
|
if strings.HasSuffix(name, "Id") {
|
||||||
|
name = name[:len(name)-2]
|
||||||
|
} else if strings.HasSuffix(name, "_id") {
|
||||||
|
name = name[:len(name)-3]
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.camelCase(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) camelCase(s string) string {
|
||||||
|
// If already camelCase or PascalCase, convert to camelCase
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert first character to lowercase
|
||||||
|
return strings.ToLower(string(s[0])) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) pluralize(s string) string {
|
||||||
|
// Simple pluralization rules
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Already plural
|
||||||
|
if strings.HasSuffix(s, "s") {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Words ending in 'y' → 'ies'
|
||||||
|
if strings.HasSuffix(s, "y") {
|
||||||
|
return s[:len(s)-1] + "ies"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Words ending in 's', 'x', 'z', 'ch', 'sh' → add 'es'
|
||||||
|
if strings.HasSuffix(s, "s") || strings.HasSuffix(s, "x") ||
|
||||||
|
strings.HasSuffix(s, "z") || strings.HasSuffix(s, "ch") ||
|
||||||
|
strings.HasSuffix(s, "sh") {
|
||||||
|
return s + "es"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: add 's'
|
||||||
|
return s + "s"
|
||||||
|
}
|
||||||
148
pkg/writers/graphql/type_mapping.go
Normal file
148
pkg/writers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (w *Writer) sqlTypeToGraphQL(sqlType string, column *models.Column, table *models.Table, schema *models.Schema) string {
|
||||||
|
// Check if this is a primary key → ID type
|
||||||
|
if column.IsPrimaryKey {
|
||||||
|
// Check metadata for explicit type preference
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if preserveType, ok := w.options.Metadata["preservePKType"].(bool); ok && preserveType {
|
||||||
|
// Use Int or String based on SQL type
|
||||||
|
if w.isIntegerType(sqlType) {
|
||||||
|
return "Int"
|
||||||
|
}
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "ID"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map SQL types to custom scalars
|
||||||
|
if scalar := w.sqlTypeToCustomScalar(sqlType); scalar != "" {
|
||||||
|
return scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's an enum
|
||||||
|
if w.isEnumType(sqlType, schema) {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard type mappings
|
||||||
|
baseType := strings.Split(sqlType, "(")[0] // Remove length/precision
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
// Handle array types
|
||||||
|
if strings.HasSuffix(baseType, "[]") {
|
||||||
|
elemType := strings.TrimSuffix(baseType, "[]")
|
||||||
|
gqlType := w.mapBaseTypeToGraphQL(elemType)
|
||||||
|
return "[" + gqlType + "]"
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.mapBaseTypeToGraphQL(baseType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) mapBaseTypeToGraphQL(baseType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
// Text types
|
||||||
|
"text": "String",
|
||||||
|
"varchar": "String",
|
||||||
|
"char": "String",
|
||||||
|
"character": "String",
|
||||||
|
"bpchar": "String",
|
||||||
|
"name": "String",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "ID",
|
||||||
|
|
||||||
|
// Integer types
|
||||||
|
"integer": "Int",
|
||||||
|
"int": "Int",
|
||||||
|
"int2": "Int",
|
||||||
|
"int4": "Int",
|
||||||
|
"int8": "Int",
|
||||||
|
"bigint": "Int",
|
||||||
|
"smallint": "Int",
|
||||||
|
"serial": "Int",
|
||||||
|
"bigserial": "Int",
|
||||||
|
"smallserial": "Int",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"double precision": "Float",
|
||||||
|
"float": "Float",
|
||||||
|
"float4": "Float",
|
||||||
|
"float8": "Float",
|
||||||
|
"real": "Float",
|
||||||
|
"numeric": "Float",
|
||||||
|
"decimal": "Float",
|
||||||
|
"money": "Float",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"bool": "Boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
if gqlType, ok := typeMap[baseType]; ok {
|
||||||
|
return gqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: capitalize first letter
|
||||||
|
if len(baseType) > 0 {
|
||||||
|
return strings.ToUpper(string(baseType[0])) + baseType[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) sqlTypeToCustomScalar(sqlType string) string {
|
||||||
|
scalarMap := map[string]string{
|
||||||
|
"timestamp": "DateTime",
|
||||||
|
"timestamptz": "DateTime",
|
||||||
|
"timestamp with time zone": "DateTime",
|
||||||
|
"jsonb": "JSON",
|
||||||
|
"json": "JSON",
|
||||||
|
"date": "Date",
|
||||||
|
}
|
||||||
|
|
||||||
|
baseType := strings.Split(sqlType, "(")[0]
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
if scalar, ok := scalarMap[baseType]; ok {
|
||||||
|
return scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isIntegerType(sqlType string) bool {
|
||||||
|
intTypes := map[string]bool{
|
||||||
|
"integer": true,
|
||||||
|
"int": true,
|
||||||
|
"int2": true,
|
||||||
|
"int4": true,
|
||||||
|
"int8": true,
|
||||||
|
"bigint": true,
|
||||||
|
"smallint": true,
|
||||||
|
"serial": true,
|
||||||
|
"bigserial": true,
|
||||||
|
"smallserial": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
baseType := strings.Split(sqlType, "(")[0]
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
return intTypes[baseType]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isEnumType(sqlType string, schema *models.Schema) bool {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
if enum.Name == sqlType {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
272
pkg/writers/graphql/writer.go
Normal file
272
pkg/writers/graphql/writer.go
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
content := w.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) databaseToGraphQL(db *models.Database) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Header comment
|
||||||
|
if w.shouldIncludeComments() {
|
||||||
|
sb.WriteString("# Generated GraphQL Schema\n")
|
||||||
|
if db.Name != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf("# Database: %s\n", db.Name))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalar declarations
|
||||||
|
if w.shouldIncludeScalarDeclarations() {
|
||||||
|
scalars := w.collectCustomScalars(db)
|
||||||
|
if len(scalars) > 0 {
|
||||||
|
for _, scalar := range scalars {
|
||||||
|
sb.WriteString(fmt.Sprintf("scalar %s\n", scalar))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enum definitions
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
sb.WriteString(w.enumToGraphQL(enum))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Type definitions
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Skip join tables (tables with only PK+FK columns)
|
||||||
|
if w.isJoinTable(table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(w.tableToGraphQL(table, db, schema))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) shouldIncludeComments() bool {
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if include, ok := w.options.Metadata["includeComments"].(bool); ok {
|
||||||
|
return include
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true // Default to true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) shouldIncludeScalarDeclarations() bool {
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if include, ok := w.options.Metadata["includeScalarDeclarations"].(bool); ok {
|
||||||
|
return include
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true // Default to true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) collectCustomScalars(db *models.Database) []string {
|
||||||
|
scalarsNeeded := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if scalar := w.sqlTypeToCustomScalar(col.Type); scalar != "" {
|
||||||
|
scalarsNeeded[scalar] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to sorted slice
|
||||||
|
scalars := make([]string, 0, len(scalarsNeeded))
|
||||||
|
for scalar := range scalarsNeeded {
|
||||||
|
scalars = append(scalars, scalar)
|
||||||
|
}
|
||||||
|
sort.Strings(scalars)
|
||||||
|
|
||||||
|
return scalars
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isJoinTable(table *models.Table) bool {
|
||||||
|
// A join table typically has:
|
||||||
|
// 1. Exactly 2 FK constraints
|
||||||
|
// 2. Composite primary key on those FK columns
|
||||||
|
// 3. No other columns
|
||||||
|
|
||||||
|
fkCount := 0
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
fkCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCount != 2 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if all columns are either PKs or FKs
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
isFKColumn := false
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == col.Name {
|
||||||
|
isFKColumn = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isFKColumn && !col.IsPrimaryKey {
|
||||||
|
// Found a column that's neither PK nor FK
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) enumToGraphQL(enum *models.Enum) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("enum %s {\n", enum.Name))
|
||||||
|
for _, value := range enum.Values {
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s\n", value))
|
||||||
|
}
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) tableToGraphQL(table *models.Table, db *models.Database, schema *models.Schema) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Type name
|
||||||
|
typeName := table.Name
|
||||||
|
|
||||||
|
// Description comment
|
||||||
|
if w.shouldIncludeComments() && (table.Description != "" || table.Comment != "") {
|
||||||
|
desc := table.Description
|
||||||
|
if desc == "" {
|
||||||
|
desc = table.Comment
|
||||||
|
}
|
||||||
|
sb.WriteString(fmt.Sprintf("# %s\n", desc))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("type %s {\n", typeName))
|
||||||
|
|
||||||
|
// Collect and categorize fields
|
||||||
|
var idFields, scalarFields, relationFields []string
|
||||||
|
|
||||||
|
for _, column := range table.Columns {
|
||||||
|
// Skip FK columns (they become relation fields)
|
||||||
|
if w.isForeignKeyColumn(column, table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
gqlType := w.sqlTypeToGraphQL(column.Type, column, table, schema)
|
||||||
|
if gqlType == "" {
|
||||||
|
continue // Skip if type couldn't be mapped
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine nullability
|
||||||
|
if column.NotNull {
|
||||||
|
gqlType += "!"
|
||||||
|
}
|
||||||
|
|
||||||
|
field := fmt.Sprintf(" %s: %s", column.Name, gqlType)
|
||||||
|
|
||||||
|
if column.IsPrimaryKey {
|
||||||
|
idFields = append(idFields, field)
|
||||||
|
} else {
|
||||||
|
scalarFields = append(scalarFields, field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add relation fields
|
||||||
|
relationFields = w.generateRelationFields(table, db, schema)
|
||||||
|
|
||||||
|
// Write fields in order: ID, scalars (sorted), relations (sorted)
|
||||||
|
for _, field := range idFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(scalarFields)
|
||||||
|
for _, field := range scalarFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(relationFields) > 0 {
|
||||||
|
if len(scalarFields) > 0 || len(idFields) > 0 {
|
||||||
|
sb.WriteString("\n") // Blank line before relations
|
||||||
|
}
|
||||||
|
sort.Strings(relationFields)
|
||||||
|
for _, field := range relationFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isForeignKeyColumn(column *models.Column, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == column.Name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
412
pkg/writers/graphql/writer_test.go
Normal file
412
pkg/writers/graphql/writer_test.go
Normal file
@@ -0,0 +1,412 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestWriter_WriteTable_Simple(t *testing.T) {
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
nameCol := models.InitColumn("name", "User", "public")
|
||||||
|
nameCol.Type = "text"
|
||||||
|
nameCol.NotNull = true
|
||||||
|
table.Columns["name"] = nameCol
|
||||||
|
|
||||||
|
emailCol := models.InitColumn("email", "User", "public")
|
||||||
|
emailCol.Type = "text"
|
||||||
|
emailCol.NotNull = false
|
||||||
|
table.Columns["email"] = emailCol
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify output contains type definition
|
||||||
|
if !strings.Contains(output, "type User {") {
|
||||||
|
t.Error("Expected 'type User {' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify fields
|
||||||
|
if !strings.Contains(output, "id: ID!") {
|
||||||
|
t.Error("Expected 'id: ID!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "name: String!") {
|
||||||
|
t.Error("Expected 'name: String!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "email: String") {
|
||||||
|
t.Error("Expected 'email: String' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure email is not followed by ! (nullable)
|
||||||
|
if strings.Contains(output, "email: String!") {
|
||||||
|
t.Error("Did not expect 'email: String!' (should be nullable)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_WithEnum(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create enum
|
||||||
|
roleEnum := &models.Enum{
|
||||||
|
Name: "Role",
|
||||||
|
Schema: "public",
|
||||||
|
Values: []string{"ADMIN", "USER", "GUEST"},
|
||||||
|
}
|
||||||
|
schema.Enums = []*models.Enum{roleEnum}
|
||||||
|
|
||||||
|
// Create table with enum field
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
roleCol := models.InitColumn("role", "User", "public")
|
||||||
|
roleCol.Type = "Role"
|
||||||
|
roleCol.NotNull = true
|
||||||
|
table.Columns["role"] = roleCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify enum definition
|
||||||
|
if !strings.Contains(output, "enum Role {") {
|
||||||
|
t.Error("Expected 'enum Role {' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "ADMIN") {
|
||||||
|
t.Error("Expected 'ADMIN' enum value in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify enum usage in type
|
||||||
|
if !strings.Contains(output, "role: Role!") {
|
||||||
|
t.Error("Expected 'role: Role!' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_WithRelations(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create User table
|
||||||
|
userTable := models.InitTable("User", "public")
|
||||||
|
userIdCol := models.InitColumn("id", "User", "public")
|
||||||
|
userIdCol.Type = "bigint"
|
||||||
|
userIdCol.IsPrimaryKey = true
|
||||||
|
userIdCol.NotNull = true
|
||||||
|
userTable.Columns["id"] = userIdCol
|
||||||
|
|
||||||
|
userNameCol := models.InitColumn("name", "User", "public")
|
||||||
|
userNameCol.Type = "text"
|
||||||
|
userNameCol.NotNull = true
|
||||||
|
userTable.Columns["name"] = userNameCol
|
||||||
|
|
||||||
|
// Create Post table with FK to User
|
||||||
|
postTable := models.InitTable("Post", "public")
|
||||||
|
|
||||||
|
postIdCol := models.InitColumn("id", "Post", "public")
|
||||||
|
postIdCol.Type = "bigint"
|
||||||
|
postIdCol.IsPrimaryKey = true
|
||||||
|
postIdCol.NotNull = true
|
||||||
|
postTable.Columns["id"] = postIdCol
|
||||||
|
|
||||||
|
titleCol := models.InitColumn("title", "Post", "public")
|
||||||
|
titleCol.Type = "text"
|
||||||
|
titleCol.NotNull = true
|
||||||
|
postTable.Columns["title"] = titleCol
|
||||||
|
|
||||||
|
authorIdCol := models.InitColumn("authorId", "Post", "public")
|
||||||
|
authorIdCol.Type = "bigint"
|
||||||
|
authorIdCol.NotNull = true
|
||||||
|
postTable.Columns["authorId"] = authorIdCol
|
||||||
|
|
||||||
|
// Add FK constraint
|
||||||
|
fkConstraint := models.InitConstraint("fk_post_author", models.ForeignKeyConstraint)
|
||||||
|
fkConstraint.Schema = "public"
|
||||||
|
fkConstraint.Table = "Post"
|
||||||
|
fkConstraint.Columns = []string{"authorId"}
|
||||||
|
fkConstraint.ReferencedSchema = "public"
|
||||||
|
fkConstraint.ReferencedTable = "User"
|
||||||
|
fkConstraint.ReferencedColumns = []string{"id"}
|
||||||
|
postTable.Constraints["fk_post_author"] = fkConstraint
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{userTable, postTable}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify Post has author field (forward relationship)
|
||||||
|
if !strings.Contains(output, "author: User!") {
|
||||||
|
t.Error("Expected 'author: User!' in Post type")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify authorId FK column is NOT in the output
|
||||||
|
if strings.Contains(output, "authorId:") {
|
||||||
|
t.Error("Did not expect 'authorId:' field in output (FK columns should be hidden)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify User has posts field (reverse relationship)
|
||||||
|
if !strings.Contains(output, "posts: [Post!]!") {
|
||||||
|
t.Error("Expected 'posts: [Post!]!' in User type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_CustomScalars(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("Event", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "Event", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
createdAtCol := models.InitColumn("createdAt", "Event", "public")
|
||||||
|
createdAtCol.Type = "timestamp"
|
||||||
|
createdAtCol.NotNull = true
|
||||||
|
table.Columns["createdAt"] = createdAtCol
|
||||||
|
|
||||||
|
metadataCol := models.InitColumn("metadata", "Event", "public")
|
||||||
|
metadataCol.Type = "jsonb"
|
||||||
|
metadataCol.NotNull = false
|
||||||
|
table.Columns["metadata"] = metadataCol
|
||||||
|
|
||||||
|
dateCol := models.InitColumn("eventDate", "Event", "public")
|
||||||
|
dateCol.Type = "date"
|
||||||
|
dateCol.NotNull = false
|
||||||
|
table.Columns["eventDate"] = dateCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify scalar declarations
|
||||||
|
if !strings.Contains(output, "scalar DateTime") {
|
||||||
|
t.Error("Expected 'scalar DateTime' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "scalar JSON") {
|
||||||
|
t.Error("Expected 'scalar JSON' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "scalar Date") {
|
||||||
|
t.Error("Expected 'scalar Date' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify field types
|
||||||
|
if !strings.Contains(output, "createdAt: DateTime!") {
|
||||||
|
t.Error("Expected 'createdAt: DateTime!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "metadata: JSON") {
|
||||||
|
t.Error("Expected 'metadata: JSON' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "eventDate: Date") {
|
||||||
|
t.Error("Expected 'eventDate: Date' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_ManyToMany(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create Post table
|
||||||
|
postTable := models.InitTable("Post", "public")
|
||||||
|
postIdCol := models.InitColumn("id", "Post", "public")
|
||||||
|
postIdCol.Type = "bigint"
|
||||||
|
postIdCol.IsPrimaryKey = true
|
||||||
|
postIdCol.NotNull = true
|
||||||
|
postTable.Columns["id"] = postIdCol
|
||||||
|
|
||||||
|
titleCol := models.InitColumn("title", "Post", "public")
|
||||||
|
titleCol.Type = "text"
|
||||||
|
titleCol.NotNull = true
|
||||||
|
postTable.Columns["title"] = titleCol
|
||||||
|
|
||||||
|
// Create Tag table
|
||||||
|
tagTable := models.InitTable("Tag", "public")
|
||||||
|
tagIdCol := models.InitColumn("id", "Tag", "public")
|
||||||
|
tagIdCol.Type = "bigint"
|
||||||
|
tagIdCol.IsPrimaryKey = true
|
||||||
|
tagIdCol.NotNull = true
|
||||||
|
tagTable.Columns["id"] = tagIdCol
|
||||||
|
|
||||||
|
nameCol := models.InitColumn("name", "Tag", "public")
|
||||||
|
nameCol.Type = "text"
|
||||||
|
nameCol.NotNull = true
|
||||||
|
tagTable.Columns["name"] = nameCol
|
||||||
|
|
||||||
|
// Create PostTag join table
|
||||||
|
joinTable := models.InitTable("PostTag", "public")
|
||||||
|
|
||||||
|
postIdJoinCol := models.InitColumn("postId", "PostTag", "public")
|
||||||
|
postIdJoinCol.Type = "bigint"
|
||||||
|
postIdJoinCol.NotNull = true
|
||||||
|
postIdJoinCol.IsPrimaryKey = true
|
||||||
|
joinTable.Columns["postId"] = postIdJoinCol
|
||||||
|
|
||||||
|
tagIdJoinCol := models.InitColumn("tagId", "PostTag", "public")
|
||||||
|
tagIdJoinCol.Type = "bigint"
|
||||||
|
tagIdJoinCol.NotNull = true
|
||||||
|
tagIdJoinCol.IsPrimaryKey = true
|
||||||
|
joinTable.Columns["tagId"] = tagIdJoinCol
|
||||||
|
|
||||||
|
// Add composite PK constraint
|
||||||
|
pkConstraint := models.InitConstraint("pk_posttag", models.PrimaryKeyConstraint)
|
||||||
|
pkConstraint.Schema = "public"
|
||||||
|
pkConstraint.Table = "PostTag"
|
||||||
|
pkConstraint.Columns = []string{"postId", "tagId"}
|
||||||
|
joinTable.Constraints["pk_posttag"] = pkConstraint
|
||||||
|
|
||||||
|
// Add FK to Post
|
||||||
|
fk1 := models.InitConstraint("fk_posttag_post", models.ForeignKeyConstraint)
|
||||||
|
fk1.Schema = "public"
|
||||||
|
fk1.Table = "PostTag"
|
||||||
|
fk1.Columns = []string{"postId"}
|
||||||
|
fk1.ReferencedSchema = "public"
|
||||||
|
fk1.ReferencedTable = "Post"
|
||||||
|
fk1.ReferencedColumns = []string{"id"}
|
||||||
|
joinTable.Constraints["fk_posttag_post"] = fk1
|
||||||
|
|
||||||
|
// Add FK to Tag
|
||||||
|
fk2 := models.InitConstraint("fk_posttag_tag", models.ForeignKeyConstraint)
|
||||||
|
fk2.Schema = "public"
|
||||||
|
fk2.Table = "PostTag"
|
||||||
|
fk2.Columns = []string{"tagId"}
|
||||||
|
fk2.ReferencedSchema = "public"
|
||||||
|
fk2.ReferencedTable = "Tag"
|
||||||
|
fk2.ReferencedColumns = []string{"id"}
|
||||||
|
joinTable.Constraints["fk_posttag_tag"] = fk2
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{postTable, tagTable, joinTable}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify join table is NOT in output
|
||||||
|
if strings.Contains(output, "type PostTag") {
|
||||||
|
t.Error("Did not expect 'type PostTag' (join tables should be hidden)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Post has tags field
|
||||||
|
if !strings.Contains(output, "tags: [Tag!]!") {
|
||||||
|
t.Error("Expected 'tags: [Tag!]!' in Post type")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Tag has posts field
|
||||||
|
if !strings.Contains(output, "posts: [Post!]!") {
|
||||||
|
t.Error("Expected 'posts: [Post!]!' in Tag type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_UUIDType(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "uuid"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// UUID primary keys should still map to ID
|
||||||
|
if !strings.Contains(output, "id: ID!") {
|
||||||
|
t.Error("Expected 'id: ID!' for UUID primary key")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_Metadata_NoScalarDeclarations(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("Event", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "Event", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
createdAtCol := models.InitColumn("createdAt", "Event", "public")
|
||||||
|
createdAtCol.Type = "timestamp"
|
||||||
|
createdAtCol.NotNull = true
|
||||||
|
table.Columns["createdAt"] = createdAtCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"includeScalarDeclarations": false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify no scalar declarations
|
||||||
|
if strings.Contains(output, "scalar DateTime") {
|
||||||
|
t.Error("Did not expect 'scalar DateTime' with includeScalarDeclarations=false")
|
||||||
|
}
|
||||||
|
|
||||||
|
// But field should still use DateTime
|
||||||
|
if !strings.Contains(output, "createdAt: DateTime!") {
|
||||||
|
t.Error("Expected 'createdAt: DateTime!' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
46
tests/assets/graphql/complex.graphql
Normal file
46
tests/assets/graphql/complex.graphql
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# Complex GraphQL schema with multiple features
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
USER
|
||||||
|
ADMIN
|
||||||
|
MODERATOR
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
role: Role!
|
||||||
|
createdAt: DateTime!
|
||||||
|
posts: [Post!]!
|
||||||
|
profile: Profile
|
||||||
|
}
|
||||||
|
|
||||||
|
type Profile {
|
||||||
|
id: ID!
|
||||||
|
bio: String
|
||||||
|
avatar: String
|
||||||
|
metadata: JSON
|
||||||
|
user: User!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
slug: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
publishedAt: Date
|
||||||
|
author: User!
|
||||||
|
tags: [Tag!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
name: String!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
13
tests/assets/graphql/custom_scalars.graphql
Normal file
13
tests/assets/graphql/custom_scalars.graphql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# GraphQL schema with custom scalars
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
createdAt: DateTime!
|
||||||
|
metadata: JSON
|
||||||
|
birthDate: Date
|
||||||
|
}
|
||||||
13
tests/assets/graphql/enums.graphql
Normal file
13
tests/assets/graphql/enums.graphql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# GraphQL schema with enums
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
GUEST
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
role: Role!
|
||||||
|
}
|
||||||
16
tests/assets/graphql/relations.graphql
Normal file
16
tests/assets/graphql/relations.graphql
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# GraphQL schema with relationships
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
9
tests/assets/graphql/simple.graphql
Normal file
9
tests/assets/graphql/simple.graphql
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Simple GraphQL schema for testing basic type parsing
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String
|
||||||
|
age: Int
|
||||||
|
active: Boolean!
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user