Added Graphql
This commit is contained in:
203
pkg/readers/graphql/README.md
Normal file
203
pkg/readers/graphql/README.md
Normal file
@@ -0,0 +1,203 @@
|
||||
# GraphQL Schema Reader
|
||||
|
||||
The GraphQL reader parses GraphQL Schema Definition Language (SDL) files and converts them into RelSpec's internal database model.
|
||||
|
||||
## Features
|
||||
|
||||
- **Standard GraphQL SDL** support (generic, non-framework-specific)
|
||||
- **Type to Table mapping**: GraphQL types become database tables
|
||||
- **Field to Column mapping**: GraphQL fields become table columns
|
||||
- **Enum support**: GraphQL enums are preserved
|
||||
- **Custom scalars**: DateTime, JSON, Date automatically mapped to appropriate SQL types
|
||||
- **Implicit relationships**: Detects relationships from field types
|
||||
- **Many-to-many support**: Creates junction tables for bidirectional array relationships
|
||||
- **Configurable ID mapping**: Choose between bigint (default) or UUID for ID fields
|
||||
|
||||
## Supported GraphQL Features
|
||||
|
||||
### Built-in Scalars
|
||||
- `ID` → bigint (default) or uuid (configurable)
|
||||
- `String` → text
|
||||
- `Int` → integer
|
||||
- `Float` → double precision
|
||||
- `Boolean` → boolean
|
||||
|
||||
### Custom Scalars
|
||||
- `DateTime` → timestamp
|
||||
- `JSON` → jsonb
|
||||
- `Date` → date
|
||||
- `Time` → time
|
||||
- `Decimal` → numeric
|
||||
|
||||
Additional custom scalars can be mapped via metadata.
|
||||
|
||||
### Relationships
|
||||
|
||||
Relationships are inferred from field types:
|
||||
|
||||
```graphql
|
||||
type Post {
|
||||
id: ID!
|
||||
title: String!
|
||||
author: User! # Many-to-one (creates authorId FK column, NOT NULL)
|
||||
reviewer: User # Many-to-one nullable (creates reviewerId FK column, NULL)
|
||||
tags: [Tag!]! # One-to-many or many-to-many (depending on reverse)
|
||||
}
|
||||
|
||||
type User {
|
||||
id: ID!
|
||||
posts: [Post!]! # Reverse of Post.author (no FK created)
|
||||
}
|
||||
|
||||
type Tag {
|
||||
id: ID!
|
||||
posts: [Post!]! # Many-to-many with Post (creates PostTag junction table)
|
||||
}
|
||||
```
|
||||
|
||||
**Relationship Detection Rules:**
|
||||
- Single type reference (`user: User`) → Creates FK column (e.g., `userId`)
|
||||
- Array type reference (`posts: [Post!]!`) → One-to-many reverse (no FK on this table)
|
||||
- Bidirectional arrays → Many-to-many (creates junction table)
|
||||
|
||||
### Enums
|
||||
|
||||
```graphql
|
||||
enum Role {
|
||||
ADMIN
|
||||
USER
|
||||
GUEST
|
||||
}
|
||||
|
||||
type User {
|
||||
role: Role!
|
||||
}
|
||||
```
|
||||
|
||||
Enums are preserved in the schema and can be used as column types.
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||
)
|
||||
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
}
|
||||
|
||||
reader := graphql.NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
```
|
||||
|
||||
### With UUID ID Type
|
||||
|
||||
```go
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
Metadata: map[string]interface{}{
|
||||
"idType": "uuid", // Map ID scalar to uuid instead of bigint
|
||||
},
|
||||
}
|
||||
|
||||
reader := graphql.NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
```
|
||||
|
||||
### With Per-Type ID Mapping
|
||||
|
||||
```go
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
Metadata: map[string]interface{}{
|
||||
"typeIdMappings": map[string]string{
|
||||
"User": "uuid", // User.id → uuid
|
||||
"Post": "bigint", // Post.id → bigint
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### With Custom Scalar Mappings
|
||||
|
||||
```go
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
Metadata: map[string]interface{}{
|
||||
"customScalarMappings": map[string]string{
|
||||
"Upload": "bytea",
|
||||
"Decimal": "numeric(10,2)",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## CLI Usage
|
||||
|
||||
```bash
|
||||
# Convert GraphQL to JSON
|
||||
relspec convert --from graphql --from-path schema.graphql \
|
||||
--to json --to-path schema.json
|
||||
|
||||
# Convert GraphQL to GORM models
|
||||
relspec convert --from graphql --from-path schema.graphql \
|
||||
--to gorm --to-path models/ --package models
|
||||
|
||||
# Convert GraphQL to PostgreSQL SQL
|
||||
relspec convert --from graphql --from-path schema.graphql \
|
||||
--to pgsql --to-path schema.sql
|
||||
```
|
||||
|
||||
## Metadata Options
|
||||
|
||||
| Option | Type | Description | Default |
|
||||
|--------|------|-------------|---------|
|
||||
| `idType` | string | Global ID type mapping ("bigint" or "uuid") | "bigint" |
|
||||
| `typeIdMappings` | map[string]string | Per-type ID mappings | {} |
|
||||
| `customScalarMappings` | map[string]string | Custom scalar to SQL type mappings | {} |
|
||||
| `schemaName` | string | Schema name for all tables | "public" |
|
||||
|
||||
## Limitations
|
||||
|
||||
- Only supports GraphQL SDL (Schema Definition Language), not queries or mutations
|
||||
- Directives are ignored (except for future extensibility)
|
||||
- Interfaces and Unions are not supported
|
||||
- GraphQL's concept of "schema" is different from database schemas; all types go into a single database schema (default: "public")
|
||||
|
||||
## Example
|
||||
|
||||
**Input** (`schema.graphql`):
|
||||
```graphql
|
||||
scalar DateTime
|
||||
|
||||
enum Role {
|
||||
ADMIN
|
||||
USER
|
||||
}
|
||||
|
||||
type User {
|
||||
id: ID!
|
||||
email: String!
|
||||
role: Role!
|
||||
createdAt: DateTime!
|
||||
posts: [Post!]!
|
||||
}
|
||||
|
||||
type Post {
|
||||
id: ID!
|
||||
title: String!
|
||||
content: String
|
||||
published: Boolean!
|
||||
author: User!
|
||||
}
|
||||
```
|
||||
|
||||
**Result**: Database with:
|
||||
- 2 tables: `User` and `Post`
|
||||
- `Post` table has `authorId` foreign key to `User.id`
|
||||
- `Role` enum with values: ADMIN, USER
|
||||
- Custom scalar `DateTime` mapped to `timestamp`
|
||||
279
pkg/readers/graphql/reader.go
Normal file
279
pkg/readers/graphql/reader.go
Normal file
@@ -0,0 +1,279 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for GraphQL reader")
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
return r.parseGraphQL(string(content))
|
||||
}
|
||||
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas found")
|
||||
}
|
||||
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
schema, err := r.ReadSchema()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(schema.Tables) == 0 {
|
||||
return nil, fmt.Errorf("no tables found")
|
||||
}
|
||||
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
type parseContext struct {
|
||||
inType bool
|
||||
inEnum bool
|
||||
currentType string
|
||||
typeLines []string
|
||||
currentEnum string
|
||||
enumLines []string
|
||||
customScalars map[string]bool
|
||||
}
|
||||
|
||||
func (r *Reader) parseGraphQL(content string) (*models.Database, error) {
|
||||
dbName := "database"
|
||||
if r.options.Metadata != nil {
|
||||
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||
dbName = name
|
||||
}
|
||||
}
|
||||
|
||||
db := models.InitDatabase(dbName)
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
ctx := &parseContext{
|
||||
customScalars: make(map[string]bool),
|
||||
}
|
||||
|
||||
// First pass: collect custom scalars and enums
|
||||
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||
scalarRegex := regexp.MustCompile(`^\s*scalar\s+(\w+)`)
|
||||
enumRegex := regexp.MustCompile(`^\s*enum\s+(\w+)\s*\{`)
|
||||
closingBraceRegex := regexp.MustCompile(`^\s*\}`)
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := scalarRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
ctx.customScalars[matches[1]] = true
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
ctx.inEnum = true
|
||||
ctx.currentEnum = matches[1]
|
||||
ctx.enumLines = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
if closingBraceRegex.MatchString(trimmed) && ctx.inEnum {
|
||||
r.parseEnum(ctx.currentEnum, ctx.enumLines, schema)
|
||||
// Add enum name to custom scalars for type detection
|
||||
ctx.customScalars[ctx.currentEnum] = true
|
||||
ctx.inEnum = false
|
||||
ctx.currentEnum = ""
|
||||
ctx.enumLines = nil
|
||||
continue
|
||||
}
|
||||
|
||||
if ctx.inEnum {
|
||||
ctx.enumLines = append(ctx.enumLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
// Second pass: parse types
|
||||
scanner = bufio.NewScanner(strings.NewReader(content))
|
||||
typeRegex := regexp.MustCompile(`^\s*type\s+(\w+)\s*\{`)
|
||||
ctx.inType = false
|
||||
ctx.inEnum = false
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := typeRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
ctx.inType = true
|
||||
ctx.currentType = matches[1]
|
||||
ctx.typeLines = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
if closingBraceRegex.MatchString(trimmed) && ctx.inType {
|
||||
if err := r.parseType(ctx.currentType, ctx.typeLines, schema, ctx); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse type %s: %w", ctx.currentType, err)
|
||||
}
|
||||
ctx.inType = false
|
||||
ctx.currentType = ""
|
||||
ctx.typeLines = nil
|
||||
continue
|
||||
}
|
||||
|
||||
if ctx.inType {
|
||||
ctx.typeLines = append(ctx.typeLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
db.Schemas = []*models.Schema{schema}
|
||||
|
||||
// Third pass: detect and create relationships
|
||||
if err := r.detectAndCreateRelationships(schema, ctx); err != nil {
|
||||
return nil, fmt.Errorf("failed to create relationships: %w", err)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
type fieldInfo struct {
|
||||
name string
|
||||
typeName string
|
||||
isArray bool
|
||||
isNullable bool
|
||||
innerNullable bool
|
||||
}
|
||||
|
||||
func (r *Reader) parseType(typeName string, lines []string, schema *models.Schema, ctx *parseContext) error {
|
||||
table := models.InitTable(typeName, schema.Name)
|
||||
table.Metadata = make(map[string]any)
|
||||
|
||||
// Store field info for relationship detection
|
||||
relationFields := make(map[string]*fieldInfo)
|
||||
|
||||
fieldRegex := regexp.MustCompile(`^\s*(\w+)\s*:\s*(\[)?(\w+)(!)?(\])?(!)?\s*`)
|
||||
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
matches := fieldRegex.FindStringSubmatch(trimmed)
|
||||
if matches == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
fieldName := matches[1]
|
||||
hasOpenBracket := matches[2] == "["
|
||||
baseType := matches[3]
|
||||
innerNonNull := matches[4] == "!"
|
||||
hasCloseBracket := matches[5] == "]"
|
||||
outerNonNull := matches[6] == "!"
|
||||
|
||||
isArray := hasOpenBracket && hasCloseBracket
|
||||
|
||||
// Determine if this is a scalar or a relation
|
||||
if r.isScalarType(baseType, ctx) {
|
||||
// This is a scalar field
|
||||
column := models.InitColumn(fieldName, table.Name, schema.Name)
|
||||
column.Type = r.graphQLTypeToSQL(baseType, fieldName, typeName)
|
||||
|
||||
if isArray {
|
||||
// Array of scalars: use array type
|
||||
column.Type += "[]"
|
||||
column.NotNull = outerNonNull
|
||||
} else {
|
||||
column.NotNull = !isArray && innerNonNull
|
||||
}
|
||||
|
||||
// Check if this is a primary key (convention: field named "id")
|
||||
if fieldName == "id" {
|
||||
column.IsPrimaryKey = true
|
||||
column.AutoIncrement = true
|
||||
}
|
||||
|
||||
table.Columns[fieldName] = column
|
||||
} else {
|
||||
// This is a relation field - store for later processing
|
||||
relationFields[fieldName] = &fieldInfo{
|
||||
name: fieldName,
|
||||
typeName: baseType,
|
||||
isArray: isArray,
|
||||
isNullable: !innerNonNull && !isArray,
|
||||
innerNullable: !innerNonNull && isArray,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store relation fields in table metadata for relationship detection
|
||||
if len(relationFields) > 0 {
|
||||
table.Metadata["relationFields"] = relationFields
|
||||
}
|
||||
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
|
||||
enum := &models.Enum{
|
||||
Name: enumName,
|
||||
Schema: schema.Name,
|
||||
Values: make([]string, 0),
|
||||
}
|
||||
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
// Enum values are simple identifiers
|
||||
enum.Values = append(enum.Values, trimmed)
|
||||
}
|
||||
|
||||
schema.Enums = append(schema.Enums, enum)
|
||||
}
|
||||
362
pkg/readers/graphql/reader_test.go
Normal file
362
pkg/readers/graphql/reader_test.go
Normal file
@@ -0,0 +1,362 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
t.Fatal("Expected at least one schema")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
userTable := schema.Tables[0]
|
||||
if userTable.Name != "User" {
|
||||
t.Errorf("Expected table name 'User', got '%s'", userTable.Name)
|
||||
}
|
||||
|
||||
// Verify columns
|
||||
expectedColumns := map[string]struct {
|
||||
sqlType string
|
||||
notNull bool
|
||||
isPK bool
|
||||
}{
|
||||
"id": {"bigint", true, true},
|
||||
"email": {"text", true, false},
|
||||
"name": {"text", false, false},
|
||||
"age": {"integer", false, false},
|
||||
"active": {"boolean", true, false},
|
||||
}
|
||||
|
||||
if len(userTable.Columns) != len(expectedColumns) {
|
||||
t.Fatalf("Expected %d columns, got %d", len(expectedColumns), len(userTable.Columns))
|
||||
}
|
||||
|
||||
for colName, expected := range expectedColumns {
|
||||
col, exists := userTable.Columns[colName]
|
||||
if !exists {
|
||||
t.Errorf("Expected column '%s' not found", colName)
|
||||
continue
|
||||
}
|
||||
|
||||
if col.Type != expected.sqlType {
|
||||
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expected.sqlType, col.Type)
|
||||
}
|
||||
|
||||
if col.NotNull != expected.notNull {
|
||||
t.Errorf("Column '%s': expected NotNull=%v, got %v", colName, expected.notNull, col.NotNull)
|
||||
}
|
||||
|
||||
if col.IsPrimaryKey != expected.isPK {
|
||||
t.Errorf("Column '%s': expected IsPrimaryKey=%v, got %v", colName, expected.isPK, col.IsPrimaryKey)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_WithRelations(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "relations.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
|
||||
if len(schema.Tables) != 2 {
|
||||
t.Fatalf("Expected 2 tables, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
// Find Post table (should have FK to User)
|
||||
var postTable *models.Table
|
||||
for _, table := range schema.Tables {
|
||||
if table.Name == "Post" {
|
||||
postTable = table
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if postTable == nil {
|
||||
t.Fatal("Post table not found")
|
||||
}
|
||||
|
||||
// Verify authorId FK column was created
|
||||
authorIdCol, exists := postTable.Columns["authorId"]
|
||||
if !exists {
|
||||
t.Fatal("Expected 'authorId' FK column not found in Post table")
|
||||
}
|
||||
|
||||
if authorIdCol.Type != "bigint" {
|
||||
t.Errorf("Expected authorId type 'bigint', got '%s'", authorIdCol.Type)
|
||||
}
|
||||
|
||||
if !authorIdCol.NotNull {
|
||||
t.Error("Expected authorId to be NOT NULL")
|
||||
}
|
||||
|
||||
// Verify FK constraint
|
||||
fkConstraintFound := false
|
||||
for _, constraint := range postTable.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint {
|
||||
if constraint.ReferencedTable == "User" && len(constraint.Columns) > 0 && constraint.Columns[0] == "authorId" {
|
||||
fkConstraintFound = true
|
||||
if constraint.OnDelete != "CASCADE" {
|
||||
t.Errorf("Expected OnDelete CASCADE, got %s", constraint.OnDelete)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !fkConstraintFound {
|
||||
t.Error("Foreign key constraint from Post to User not found")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_WithEnums(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "enums.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
|
||||
if len(schema.Enums) != 1 {
|
||||
t.Fatalf("Expected 1 enum, got %d", len(schema.Enums))
|
||||
}
|
||||
|
||||
roleEnum := schema.Enums[0]
|
||||
if roleEnum.Name != "Role" {
|
||||
t.Errorf("Expected enum name 'Role', got '%s'", roleEnum.Name)
|
||||
}
|
||||
|
||||
expectedValues := []string{"ADMIN", "USER", "GUEST"}
|
||||
if len(roleEnum.Values) != len(expectedValues) {
|
||||
t.Fatalf("Expected %d enum values, got %d", len(expectedValues), len(roleEnum.Values))
|
||||
}
|
||||
|
||||
for i, expected := range expectedValues {
|
||||
if roleEnum.Values[i] != expected {
|
||||
t.Errorf("Expected enum value '%s' at index %d, got '%s'", expected, i, roleEnum.Values[i])
|
||||
}
|
||||
}
|
||||
|
||||
// Verify role column in User table
|
||||
userTable := schema.Tables[0]
|
||||
roleCol, exists := userTable.Columns["role"]
|
||||
if !exists {
|
||||
t.Fatal("Expected 'role' column not found")
|
||||
}
|
||||
|
||||
if roleCol.Type != "Role" {
|
||||
t.Errorf("Expected role type 'Role', got '%s'", roleCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_CustomScalars(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "custom_scalars.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
userTable := schema.Tables[0]
|
||||
|
||||
// Verify custom scalar mappings
|
||||
expectedTypes := map[string]string{
|
||||
"createdAt": "timestamp",
|
||||
"metadata": "jsonb",
|
||||
"birthDate": "date",
|
||||
}
|
||||
|
||||
for colName, expectedType := range expectedTypes {
|
||||
col, exists := userTable.Columns[colName]
|
||||
if !exists {
|
||||
t.Errorf("Expected column '%s' not found", colName)
|
||||
continue
|
||||
}
|
||||
|
||||
if col.Type != expectedType {
|
||||
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expectedType, col.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_UUIDMetadata(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
Metadata: map[string]interface{}{
|
||||
"idType": "uuid",
|
||||
},
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
userTable := schema.Tables[0]
|
||||
|
||||
idCol, exists := userTable.Columns["id"]
|
||||
if !exists {
|
||||
t.Fatal("Expected 'id' column not found")
|
||||
}
|
||||
|
||||
if idCol.Type != "uuid" {
|
||||
t.Errorf("Expected id type 'uuid' with metadata, got '%s'", idCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "complex.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
|
||||
// Should have 5 tables: User, Profile, Post, Tag, and PostTag (join table)
|
||||
expectedTableCount := 5
|
||||
if len(schema.Tables) != expectedTableCount {
|
||||
t.Fatalf("Expected %d tables, got %d", expectedTableCount, len(schema.Tables))
|
||||
}
|
||||
|
||||
// Verify PostTag join table exists (many-to-many between Post and Tag)
|
||||
var joinTable *models.Table
|
||||
for _, table := range schema.Tables {
|
||||
if table.Name == "PostTag" {
|
||||
joinTable = table
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if joinTable == nil {
|
||||
t.Fatal("Expected PostTag join table not found")
|
||||
}
|
||||
|
||||
// Verify join table has both FK columns
|
||||
if _, exists := joinTable.Columns["postId"]; !exists {
|
||||
t.Error("Expected 'postId' column in PostTag join table")
|
||||
}
|
||||
|
||||
if _, exists := joinTable.Columns["tagId"]; !exists {
|
||||
t.Error("Expected 'tagId' column in PostTag join table")
|
||||
}
|
||||
|
||||
// Verify composite primary key
|
||||
pkFound := false
|
||||
for _, constraint := range joinTable.Constraints {
|
||||
if constraint.Type == models.PrimaryKeyConstraint {
|
||||
if len(constraint.Columns) == 2 {
|
||||
pkFound = true
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !pkFound {
|
||||
t.Error("Expected composite primary key in PostTag join table")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadSchema(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadSchema() error = %v", err)
|
||||
}
|
||||
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadTable(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
table, err := reader.ReadTable()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadTable() error = %v", err)
|
||||
}
|
||||
|
||||
if table.Name != "User" {
|
||||
t.Errorf("Expected table name 'User', got '%s'", table.Name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_InvalidPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "/nonexistent/path.graphql",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid path, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_EmptyPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty path, got nil")
|
||||
}
|
||||
}
|
||||
225
pkg/readers/graphql/relationships.go
Normal file
225
pkg/readers/graphql/relationships.go
Normal file
@@ -0,0 +1,225 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
func (r *Reader) detectAndCreateRelationships(schema *models.Schema, ctx *parseContext) error {
|
||||
// Build table lookup map
|
||||
tableMap := make(map[string]*models.Table)
|
||||
for _, table := range schema.Tables {
|
||||
tableMap[table.Name] = table
|
||||
}
|
||||
|
||||
// Process each table's relation fields
|
||||
for _, table := range schema.Tables {
|
||||
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||
if !ok || len(relationFields) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
for fieldName, fieldInfo := range relationFields {
|
||||
targetTable, exists := tableMap[fieldInfo.typeName]
|
||||
if !exists {
|
||||
// Referenced type doesn't exist - might be an interface/union, skip
|
||||
continue
|
||||
}
|
||||
|
||||
if fieldInfo.isArray {
|
||||
// This is a one-to-many or many-to-many reverse side
|
||||
// Check if target table has a reverse array field
|
||||
if r.hasReverseArrayField(targetTable, table.Name) {
|
||||
// Bidirectional array = many-to-many
|
||||
// Only create join table once (lexicographically first table creates it)
|
||||
if table.Name < targetTable.Name {
|
||||
if err := r.createManyToManyJoinTable(schema, table, targetTable, fieldName, tableMap); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
// For one-to-many, no action needed (FK is on the other table)
|
||||
} else {
|
||||
// This is a many-to-one or one-to-one
|
||||
// Create FK column on this table
|
||||
if err := r.createForeignKeyColumn(table, targetTable, fieldName, fieldInfo.isNullable, schema); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up metadata
|
||||
for _, table := range schema.Tables {
|
||||
delete(table.Metadata, "relationFields")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Reader) hasReverseArrayField(table *models.Table, targetTypeName string) bool {
|
||||
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, fieldInfo := range relationFields {
|
||||
if fieldInfo.typeName == targetTypeName && fieldInfo.isArray {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *Reader) createForeignKeyColumn(fromTable, toTable *models.Table, fieldName string, nullable bool, schema *models.Schema) error {
|
||||
// Get primary key from target table
|
||||
pkCol := toTable.GetPrimaryKey()
|
||||
if pkCol == nil {
|
||||
return fmt.Errorf("target table %s has no primary key for relationship", toTable.Name)
|
||||
}
|
||||
|
||||
// Create FK column name: {fieldName}Id
|
||||
fkColName := fieldName + "Id"
|
||||
|
||||
// Check if column already exists (shouldn't happen but be safe)
|
||||
if _, exists := fromTable.Columns[fkColName]; exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create FK column
|
||||
fkCol := models.InitColumn(fkColName, fromTable.Name, schema.Name)
|
||||
fkCol.Type = pkCol.Type
|
||||
fkCol.NotNull = !nullable
|
||||
|
||||
fromTable.Columns[fkColName] = fkCol
|
||||
|
||||
// Create FK constraint
|
||||
constraint := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", fromTable.Name, fieldName),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
constraint.Schema = schema.Name
|
||||
constraint.Table = fromTable.Name
|
||||
constraint.Columns = []string{fkColName}
|
||||
constraint.ReferencedSchema = schema.Name
|
||||
constraint.ReferencedTable = toTable.Name
|
||||
constraint.ReferencedColumns = []string{pkCol.Name}
|
||||
constraint.OnDelete = "CASCADE"
|
||||
constraint.OnUpdate = "RESTRICT"
|
||||
|
||||
fromTable.Constraints[constraint.Name] = constraint
|
||||
|
||||
// Create relationship
|
||||
relationship := models.InitRelationship(
|
||||
fmt.Sprintf("rel_%s_%s", fromTable.Name, fieldName),
|
||||
models.OneToMany,
|
||||
)
|
||||
relationship.FromTable = fromTable.Name
|
||||
relationship.FromSchema = schema.Name
|
||||
relationship.FromColumns = []string{fkColName}
|
||||
relationship.ToTable = toTable.Name
|
||||
relationship.ToSchema = schema.Name
|
||||
relationship.ToColumns = []string{pkCol.Name}
|
||||
relationship.ForeignKey = constraint.Name
|
||||
|
||||
fromTable.Relationships[relationship.Name] = relationship
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Reader) createManyToManyJoinTable(schema *models.Schema, table1, table2 *models.Table, fieldName string, tableMap map[string]*models.Table) error {
|
||||
// Create join table name
|
||||
joinTableName := table1.Name + table2.Name
|
||||
|
||||
// Check if join table already exists
|
||||
if _, exists := tableMap[joinTableName]; exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get primary keys
|
||||
pk1 := table1.GetPrimaryKey()
|
||||
pk2 := table2.GetPrimaryKey()
|
||||
|
||||
if pk1 == nil || pk2 == nil {
|
||||
return fmt.Errorf("cannot create many-to-many: tables must have primary keys")
|
||||
}
|
||||
|
||||
// Create join table
|
||||
joinTable := models.InitTable(joinTableName, schema.Name)
|
||||
|
||||
// Create FK column for table1
|
||||
fkCol1Name := strings.ToLower(table1.Name) + "Id"
|
||||
fkCol1 := models.InitColumn(fkCol1Name, joinTable.Name, schema.Name)
|
||||
fkCol1.Type = pk1.Type
|
||||
fkCol1.NotNull = true
|
||||
joinTable.Columns[fkCol1Name] = fkCol1
|
||||
|
||||
// Create FK column for table2
|
||||
fkCol2Name := strings.ToLower(table2.Name) + "Id"
|
||||
fkCol2 := models.InitColumn(fkCol2Name, joinTable.Name, schema.Name)
|
||||
fkCol2.Type = pk2.Type
|
||||
fkCol2.NotNull = true
|
||||
joinTable.Columns[fkCol2Name] = fkCol2
|
||||
|
||||
// Create composite primary key
|
||||
pkConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("pk_%s", joinTableName),
|
||||
models.PrimaryKeyConstraint,
|
||||
)
|
||||
pkConstraint.Schema = schema.Name
|
||||
pkConstraint.Table = joinTable.Name
|
||||
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||
|
||||
// Create FK constraint to table1
|
||||
fk1 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, table1.Name),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk1.Schema = schema.Name
|
||||
fk1.Table = joinTable.Name
|
||||
fk1.Columns = []string{fkCol1Name}
|
||||
fk1.ReferencedSchema = schema.Name
|
||||
fk1.ReferencedTable = table1.Name
|
||||
fk1.ReferencedColumns = []string{pk1.Name}
|
||||
fk1.OnDelete = "CASCADE"
|
||||
fk1.OnUpdate = "RESTRICT"
|
||||
joinTable.Constraints[fk1.Name] = fk1
|
||||
|
||||
// Create FK constraint to table2
|
||||
fk2 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, table2.Name),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk2.Schema = schema.Name
|
||||
fk2.Table = joinTable.Name
|
||||
fk2.Columns = []string{fkCol2Name}
|
||||
fk2.ReferencedSchema = schema.Name
|
||||
fk2.ReferencedTable = table2.Name
|
||||
fk2.ReferencedColumns = []string{pk2.Name}
|
||||
fk2.OnDelete = "CASCADE"
|
||||
fk2.OnUpdate = "RESTRICT"
|
||||
joinTable.Constraints[fk2.Name] = fk2
|
||||
|
||||
// Create relationships
|
||||
rel1 := models.InitRelationship(
|
||||
fmt.Sprintf("rel_%s_%s_%s", joinTableName, table1.Name, table2.Name),
|
||||
models.ManyToMany,
|
||||
)
|
||||
rel1.FromTable = table1.Name
|
||||
rel1.FromSchema = schema.Name
|
||||
rel1.ToTable = table2.Name
|
||||
rel1.ToSchema = schema.Name
|
||||
rel1.ThroughTable = joinTableName
|
||||
rel1.ThroughSchema = schema.Name
|
||||
joinTable.Relationships[rel1.Name] = rel1
|
||||
|
||||
// Add join table to schema
|
||||
schema.Tables = append(schema.Tables, joinTable)
|
||||
tableMap[joinTableName] = joinTable
|
||||
|
||||
return nil
|
||||
}
|
||||
97
pkg/readers/graphql/type_mapping.go
Normal file
97
pkg/readers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package graphql
|
||||
|
||||
func (r *Reader) isScalarType(typeName string, ctx *parseContext) bool {
|
||||
// Built-in GraphQL scalars
|
||||
builtInScalars := map[string]bool{
|
||||
"ID": true,
|
||||
"String": true,
|
||||
"Int": true,
|
||||
"Float": true,
|
||||
"Boolean": true,
|
||||
}
|
||||
|
||||
if builtInScalars[typeName] {
|
||||
return true
|
||||
}
|
||||
|
||||
// Custom scalars declared in the schema
|
||||
if ctx.customScalars[typeName] {
|
||||
return true
|
||||
}
|
||||
|
||||
// Common custom scalars (even if not declared)
|
||||
commonCustomScalars := map[string]bool{
|
||||
"DateTime": true,
|
||||
"JSON": true,
|
||||
"Date": true,
|
||||
"Time": true,
|
||||
"Upload": true,
|
||||
"Decimal": true,
|
||||
}
|
||||
|
||||
return commonCustomScalars[typeName]
|
||||
}
|
||||
|
||||
func (r *Reader) graphQLTypeToSQL(gqlType string, fieldName string, typeName string) string {
|
||||
// Check for ID type with configurable mapping
|
||||
if gqlType == "ID" {
|
||||
// Check metadata for ID type preference
|
||||
if r.options.Metadata != nil {
|
||||
// Global idType setting
|
||||
if idType, ok := r.options.Metadata["idType"].(string); ok {
|
||||
if idType == "uuid" {
|
||||
return "uuid"
|
||||
}
|
||||
}
|
||||
|
||||
// Per-type ID mapping
|
||||
if typeIdMappings, ok := r.options.Metadata["typeIdMappings"].(map[string]string); ok {
|
||||
if idType, ok := typeIdMappings[typeName]; ok {
|
||||
if idType == "uuid" {
|
||||
return "uuid"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "bigint" // Default
|
||||
}
|
||||
|
||||
// Custom scalar mappings
|
||||
if r.options.Metadata != nil {
|
||||
if customMappings, ok := r.options.Metadata["customScalarMappings"].(map[string]string); ok {
|
||||
if sqlType, ok := customMappings[gqlType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Built-in custom scalar mappings
|
||||
customScalars := map[string]string{
|
||||
"DateTime": "timestamp",
|
||||
"JSON": "jsonb",
|
||||
"Date": "date",
|
||||
"Time": "time",
|
||||
"Decimal": "numeric",
|
||||
"Upload": "bytea",
|
||||
}
|
||||
if sqlType, ok := customScalars[gqlType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
|
||||
// Standard scalar mappings
|
||||
typeMap := map[string]string{
|
||||
"String": "text",
|
||||
"Int": "integer",
|
||||
"Float": "double precision",
|
||||
"Boolean": "boolean",
|
||||
}
|
||||
|
||||
if sqlType, ok := typeMap[gqlType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
|
||||
// If not a known scalar, assume it's an enum or custom type
|
||||
// Return as-is (might be an enum)
|
||||
return gqlType
|
||||
}
|
||||
Reference in New Issue
Block a user