better tests
This commit is contained in:
544
pkg/readers/bun/reader.go
Normal file
544
pkg/readers/bun/reader.go
Normal file
@@ -0,0 +1,544 @@
|
||||
package bun
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
// Reader implements the readers.Reader interface for Bun Go model files
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
// NewReader creates a new Bun reader with the given options
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadDatabase reads Bun Go model files and returns a Database model
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Bun reader")
|
||||
}
|
||||
|
||||
// Check if path is a directory or file
|
||||
info, err := os.Stat(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to stat path: %w", err)
|
||||
}
|
||||
|
||||
var files []string
|
||||
if info.IsDir() {
|
||||
// Read all .go files in directory
|
||||
entries, err := os.ReadDir(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read directory: %w", err)
|
||||
}
|
||||
for _, entry := range entries {
|
||||
if !entry.IsDir() && strings.HasSuffix(entry.Name(), ".go") && !strings.HasSuffix(entry.Name(), "_test.go") {
|
||||
files = append(files, filepath.Join(r.options.FilePath, entry.Name()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
files = append(files, r.options.FilePath)
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
return nil, fmt.Errorf("no Go files found")
|
||||
}
|
||||
|
||||
// Parse all files and collect tables
|
||||
db := models.InitDatabase("database")
|
||||
schemaMap := make(map[string]*models.Schema)
|
||||
|
||||
for _, file := range files {
|
||||
tables, err := r.parseFile(file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse file %s: %w", file, err)
|
||||
}
|
||||
|
||||
for _, table := range tables {
|
||||
// Get or create schema
|
||||
schema, ok := schemaMap[table.Schema]
|
||||
if !ok {
|
||||
schema = models.InitSchema(table.Schema)
|
||||
schemaMap[table.Schema] = schema
|
||||
}
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert schema map to slice
|
||||
for _, schema := range schemaMap {
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// ReadSchema reads Bun Go model files and returns a Schema model
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas found")
|
||||
}
|
||||
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
// ReadTable reads a Bun Go model file and returns a Table model
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
schema, err := r.ReadSchema()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(schema.Tables) == 0 {
|
||||
return nil, fmt.Errorf("no tables found")
|
||||
}
|
||||
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
// parseFile parses a single Go file and extracts table models
|
||||
func (r *Reader) parseFile(filename string) ([]*models.Table, error) {
|
||||
fset := token.NewFileSet()
|
||||
node, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse Go file: %w", err)
|
||||
}
|
||||
|
||||
var tables []*models.Table
|
||||
structMap := make(map[string]*models.Table)
|
||||
|
||||
// First pass: collect struct definitions
|
||||
for _, decl := range node.Decls {
|
||||
genDecl, ok := decl.(*ast.GenDecl)
|
||||
if !ok || genDecl.Tok != token.TYPE {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, spec := range genDecl.Specs {
|
||||
typeSpec, ok := spec.(*ast.TypeSpec)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
structType, ok := typeSpec.Type.(*ast.StructType)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if this struct has bun tags (indicates it's a model)
|
||||
if r.hasModelFields(structType) {
|
||||
table := r.parseStruct(typeSpec.Name.Name, structType)
|
||||
if table != nil {
|
||||
structMap[typeSpec.Name.Name] = table
|
||||
tables = append(tables, table)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: find TableName() methods (for redundancy/verification)
|
||||
for _, decl := range node.Decls {
|
||||
funcDecl, ok := decl.(*ast.FuncDecl)
|
||||
if !ok || funcDecl.Name.Name != "TableName" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get receiver type
|
||||
if funcDecl.Recv == nil || len(funcDecl.Recv.List) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
receiverType := r.getReceiverType(funcDecl.Recv.List[0].Type)
|
||||
if receiverType == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find the table for this struct
|
||||
table, ok := structMap[receiverType]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse the return value (this is redundant with the bun tag, but provides verification)
|
||||
tableName, schemaName := r.parseTableNameMethod(funcDecl)
|
||||
if tableName != "" {
|
||||
table.Name = tableName
|
||||
if schemaName != "" {
|
||||
table.Schema = schemaName
|
||||
}
|
||||
|
||||
// Update columns
|
||||
for _, col := range table.Columns {
|
||||
col.Table = tableName
|
||||
col.Schema = table.Schema
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
// getReceiverType extracts the type name from a receiver
|
||||
func (r *Reader) getReceiverType(expr ast.Expr) string {
|
||||
switch t := expr.(type) {
|
||||
case *ast.Ident:
|
||||
return t.Name
|
||||
case *ast.StarExpr:
|
||||
if ident, ok := t.X.(*ast.Ident); ok {
|
||||
return ident.Name
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// parseTableNameMethod parses a TableName() method and extracts the table and schema name
|
||||
func (r *Reader) parseTableNameMethod(funcDecl *ast.FuncDecl) (string, string) {
|
||||
if funcDecl.Body == nil {
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// Look for return statement
|
||||
for _, stmt := range funcDecl.Body.List {
|
||||
retStmt, ok := stmt.(*ast.ReturnStmt)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(retStmt.Results) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the return value (should be a string literal)
|
||||
if basicLit, ok := retStmt.Results[0].(*ast.BasicLit); ok {
|
||||
if basicLit.Kind == token.STRING {
|
||||
// Remove quotes
|
||||
fullName := strings.Trim(basicLit.Value, "\"")
|
||||
|
||||
// Split schema.table
|
||||
if strings.Contains(fullName, ".") {
|
||||
parts := strings.SplitN(fullName, ".", 2)
|
||||
return parts[1], parts[0]
|
||||
}
|
||||
|
||||
return fullName, "public"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// hasModelFields checks if the struct has fields with bun tags
|
||||
func (r *Reader) hasModelFields(structType *ast.StructType) bool {
|
||||
for _, field := range structType.Fields.List {
|
||||
if field.Tag != nil {
|
||||
tag := field.Tag.Value
|
||||
if strings.Contains(tag, "bun:") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// parseStruct converts an AST struct to a Table model
|
||||
func (r *Reader) parseStruct(structName string, structType *ast.StructType) *models.Table {
|
||||
// Extract table name from the first field's bun tag if present
|
||||
tableName := ""
|
||||
schemaName := "public"
|
||||
|
||||
// Look for table name in struct tags
|
||||
for _, field := range structType.Fields.List {
|
||||
if field.Tag != nil {
|
||||
tag := field.Tag.Value
|
||||
if strings.Contains(tag, "bun:\"table:") {
|
||||
tableName, schemaName = r.extractTableNameFromTag(tag)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no table name found, derive from struct name
|
||||
if tableName == "" {
|
||||
tableName = r.deriveTableName(structName)
|
||||
}
|
||||
|
||||
table := models.InitTable(tableName, schemaName)
|
||||
sequence := uint(1)
|
||||
|
||||
// Parse fields
|
||||
for _, field := range structType.Fields.List {
|
||||
if field.Tag == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
tag := field.Tag.Value
|
||||
if !strings.Contains(tag, "bun:") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip BaseModel and relationship fields
|
||||
if r.isBaseModel(field) || r.isRelationship(tag) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get field name
|
||||
fieldName := ""
|
||||
if len(field.Names) > 0 {
|
||||
fieldName = field.Names[0].Name
|
||||
}
|
||||
|
||||
// Parse column from tag
|
||||
column := r.parseColumn(fieldName, field.Type, tag, sequence)
|
||||
if column != nil {
|
||||
column.Table = tableName
|
||||
column.Schema = schemaName
|
||||
table.Columns[column.Name] = column
|
||||
sequence++
|
||||
}
|
||||
}
|
||||
|
||||
return table
|
||||
}
|
||||
|
||||
// isBaseModel checks if a field is bun.BaseModel
|
||||
func (r *Reader) isBaseModel(field *ast.Field) bool {
|
||||
if len(field.Names) > 0 {
|
||||
return false // BaseModel is embedded, so it has no name
|
||||
}
|
||||
|
||||
// Check if the type is bun.BaseModel
|
||||
selExpr, ok := field.Type.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
ident, ok := selExpr.X.(*ast.Ident)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return ident.Name == "bun" && selExpr.Sel.Name == "BaseModel"
|
||||
}
|
||||
|
||||
// isRelationship checks if a field is a relationship based on bun tag
|
||||
func (r *Reader) isRelationship(tag string) bool {
|
||||
return strings.Contains(tag, "bun:\"rel:") || strings.Contains(tag, ",rel:")
|
||||
}
|
||||
|
||||
// extractTableNameFromTag extracts table and schema from bun tag
|
||||
func (r *Reader) extractTableNameFromTag(tag string) (string, string) {
|
||||
// Extract bun tag value
|
||||
re := regexp.MustCompile(`bun:"table:([^"]+)"`)
|
||||
matches := re.FindStringSubmatch(tag)
|
||||
if len(matches) < 2 {
|
||||
return "", "public"
|
||||
}
|
||||
|
||||
tablePart := matches[1]
|
||||
parts := strings.Split(tablePart, ",")
|
||||
fullName := parts[0]
|
||||
|
||||
// Split schema.table
|
||||
if strings.Contains(fullName, ".") {
|
||||
schemaParts := strings.SplitN(fullName, ".", 2)
|
||||
return schemaParts[1], schemaParts[0]
|
||||
}
|
||||
|
||||
return fullName, "public"
|
||||
}
|
||||
|
||||
// deriveTableName derives a table name from struct name
|
||||
func (r *Reader) deriveTableName(structName string) string {
|
||||
// Remove "Model" prefix if present
|
||||
name := strings.TrimPrefix(structName, "Model")
|
||||
|
||||
// Convert PascalCase to snake_case
|
||||
var result strings.Builder
|
||||
for i, r := range name {
|
||||
if i > 0 && r >= 'A' && r <= 'Z' {
|
||||
result.WriteRune('_')
|
||||
}
|
||||
result.WriteRune(r)
|
||||
}
|
||||
|
||||
return strings.ToLower(result.String())
|
||||
}
|
||||
|
||||
// parseColumn parses a struct field into a Column model
|
||||
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) *models.Column {
|
||||
// Extract bun tag
|
||||
bunTag := r.extractBunTag(tag)
|
||||
if bunTag == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
column := models.InitColumn("", "", "")
|
||||
column.Sequence = sequence
|
||||
|
||||
// Parse bun tag
|
||||
parts := strings.Split(bunTag, ",")
|
||||
if len(parts) > 0 {
|
||||
column.Name = parts[0]
|
||||
}
|
||||
|
||||
// Parse tag attributes
|
||||
for _, part := range parts[1:] {
|
||||
kv := strings.SplitN(part, ":", 2)
|
||||
key := kv[0]
|
||||
value := ""
|
||||
if len(kv) > 1 {
|
||||
value = kv[1]
|
||||
}
|
||||
|
||||
switch key {
|
||||
case "type":
|
||||
// Parse type and extract length if present (e.g., varchar(255))
|
||||
column.Type, column.Length = r.parseTypeWithLength(value)
|
||||
case "pk":
|
||||
column.IsPrimaryKey = true
|
||||
case "notnull":
|
||||
column.NotNull = true
|
||||
case "autoincrement":
|
||||
column.AutoIncrement = true
|
||||
case "default":
|
||||
column.Default = value
|
||||
}
|
||||
}
|
||||
|
||||
// If no type specified in tag, derive from Go type
|
||||
if column.Type == "" {
|
||||
column.Type = r.goTypeToSQL(fieldType)
|
||||
}
|
||||
|
||||
// Determine if nullable based on Go type
|
||||
if r.isNullableType(fieldType) {
|
||||
column.NotNull = false
|
||||
} else if !column.IsPrimaryKey && column.Type != "" {
|
||||
// If it's not a nullable type and not a primary key, check the tag
|
||||
if !strings.Contains(bunTag, "notnull") {
|
||||
// If notnull is not explicitly set, it might still be nullable
|
||||
// This is a heuristic - we default to nullable unless specified
|
||||
}
|
||||
}
|
||||
|
||||
return column
|
||||
}
|
||||
|
||||
// extractBunTag extracts the bun tag value from a struct tag
|
||||
func (r *Reader) extractBunTag(tag string) string {
|
||||
// Remove backticks
|
||||
tag = strings.Trim(tag, "`")
|
||||
|
||||
// Use reflect.StructTag to properly parse
|
||||
st := reflect.StructTag(tag)
|
||||
return st.Get("bun")
|
||||
}
|
||||
|
||||
// parseTypeWithLength parses a type string and extracts length if present
|
||||
// e.g., "varchar(255)" returns ("varchar", 255)
|
||||
func (r *Reader) parseTypeWithLength(typeStr string) (string, int) {
|
||||
// Check for type with length: varchar(255), char(10), etc.
|
||||
re := regexp.MustCompile(`^([a-zA-Z\s]+)\((\d+)\)$`)
|
||||
matches := re.FindStringSubmatch(typeStr)
|
||||
if len(matches) == 3 {
|
||||
length := 0
|
||||
fmt.Sscanf(matches[2], "%d", &length)
|
||||
return strings.TrimSpace(matches[1]), length
|
||||
}
|
||||
return typeStr, 0
|
||||
}
|
||||
|
||||
// goTypeToSQL maps Go types to SQL types
|
||||
func (r *Reader) goTypeToSQL(expr ast.Expr) string {
|
||||
switch t := expr.(type) {
|
||||
case *ast.Ident:
|
||||
switch t.Name {
|
||||
case "int", "int32":
|
||||
return "integer"
|
||||
case "int64":
|
||||
return "bigint"
|
||||
case "string":
|
||||
return "text"
|
||||
case "bool":
|
||||
return "boolean"
|
||||
case "float32":
|
||||
return "real"
|
||||
case "float64":
|
||||
return "double precision"
|
||||
}
|
||||
case *ast.SelectorExpr:
|
||||
// Handle types like time.Time, sql_types.SqlString, etc.
|
||||
if ident, ok := t.X.(*ast.Ident); ok {
|
||||
switch ident.Name {
|
||||
case "time":
|
||||
if t.Sel.Name == "Time" {
|
||||
return "timestamp"
|
||||
}
|
||||
case "resolvespec_common", "sql_types":
|
||||
return r.sqlTypeToSQL(t.Sel.Name)
|
||||
}
|
||||
}
|
||||
case *ast.StarExpr:
|
||||
// Pointer type - nullable version
|
||||
return r.goTypeToSQL(t.X)
|
||||
}
|
||||
return "text"
|
||||
}
|
||||
|
||||
// sqlTypeToSQL maps sql_types types to SQL types
|
||||
func (r *Reader) sqlTypeToSQL(typeName string) string {
|
||||
switch typeName {
|
||||
case "SqlString":
|
||||
return "text"
|
||||
case "SqlInt":
|
||||
return "integer"
|
||||
case "SqlInt64":
|
||||
return "bigint"
|
||||
case "SqlFloat":
|
||||
return "double precision"
|
||||
case "SqlBool":
|
||||
return "boolean"
|
||||
case "SqlTime":
|
||||
return "timestamp"
|
||||
default:
|
||||
return "text"
|
||||
}
|
||||
}
|
||||
|
||||
// isNullableType checks if a Go type represents a nullable field
|
||||
func (r *Reader) isNullableType(expr ast.Expr) bool {
|
||||
switch t := expr.(type) {
|
||||
case *ast.StarExpr:
|
||||
// Pointer type is nullable
|
||||
return true
|
||||
case *ast.SelectorExpr:
|
||||
// Check for sql_types nullable types
|
||||
if ident, ok := t.X.(*ast.Ident); ok {
|
||||
if ident.Name == "resolvespec_common" || ident.Name == "sql_types" {
|
||||
return strings.HasPrefix(t.Sel.Name, "Sql")
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
574
pkg/readers/gorm/reader.go
Normal file
574
pkg/readers/gorm/reader.go
Normal file
@@ -0,0 +1,574 @@
|
||||
package gorm
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
// Reader implements the readers.Reader interface for GORM Go model files
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
// NewReader creates a new GORM reader with the given options
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadDatabase reads GORM Go model files and returns a Database model
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for GORM reader")
|
||||
}
|
||||
|
||||
// Check if path is a directory or file
|
||||
info, err := os.Stat(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to stat path: %w", err)
|
||||
}
|
||||
|
||||
var files []string
|
||||
if info.IsDir() {
|
||||
// Read all .go files in directory
|
||||
entries, err := os.ReadDir(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read directory: %w", err)
|
||||
}
|
||||
for _, entry := range entries {
|
||||
if !entry.IsDir() && strings.HasSuffix(entry.Name(), ".go") && !strings.HasSuffix(entry.Name(), "_test.go") {
|
||||
files = append(files, filepath.Join(r.options.FilePath, entry.Name()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
files = append(files, r.options.FilePath)
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
return nil, fmt.Errorf("no Go files found")
|
||||
}
|
||||
|
||||
// Parse all files and collect tables
|
||||
db := models.InitDatabase("database")
|
||||
schemaMap := make(map[string]*models.Schema)
|
||||
|
||||
for _, file := range files {
|
||||
tables, err := r.parseFile(file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse file %s: %w", file, err)
|
||||
}
|
||||
|
||||
for _, table := range tables {
|
||||
// Get or create schema
|
||||
schema, ok := schemaMap[table.Schema]
|
||||
if !ok {
|
||||
schema = models.InitSchema(table.Schema)
|
||||
schemaMap[table.Schema] = schema
|
||||
}
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert schema map to slice
|
||||
for _, schema := range schemaMap {
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// ReadSchema reads GORM Go model files and returns a Schema model
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas found")
|
||||
}
|
||||
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
// ReadTable reads a GORM Go model file and returns a Table model
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
schema, err := r.ReadSchema()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(schema.Tables) == 0 {
|
||||
return nil, fmt.Errorf("no tables found")
|
||||
}
|
||||
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
// parseFile parses a single Go file and extracts table models
|
||||
func (r *Reader) parseFile(filename string) ([]*models.Table, error) {
|
||||
fset := token.NewFileSet()
|
||||
node, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse Go file: %w", err)
|
||||
}
|
||||
|
||||
var tables []*models.Table
|
||||
structMap := make(map[string]*models.Table)
|
||||
|
||||
// First pass: collect struct definitions
|
||||
for _, decl := range node.Decls {
|
||||
genDecl, ok := decl.(*ast.GenDecl)
|
||||
if !ok || genDecl.Tok != token.TYPE {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, spec := range genDecl.Specs {
|
||||
typeSpec, ok := spec.(*ast.TypeSpec)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
structType, ok := typeSpec.Type.(*ast.StructType)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if this struct has gorm tags (indicates it's a model)
|
||||
if r.hasModelFields(structType) {
|
||||
table := r.parseStruct(typeSpec.Name.Name, structType)
|
||||
if table != nil {
|
||||
structMap[typeSpec.Name.Name] = table
|
||||
tables = append(tables, table)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: find TableName() methods
|
||||
for _, decl := range node.Decls {
|
||||
funcDecl, ok := decl.(*ast.FuncDecl)
|
||||
if !ok || funcDecl.Name.Name != "TableName" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get receiver type
|
||||
if funcDecl.Recv == nil || len(funcDecl.Recv.List) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
receiverType := r.getReceiverType(funcDecl.Recv.List[0].Type)
|
||||
if receiverType == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find the table for this struct
|
||||
table, ok := structMap[receiverType]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse the return value
|
||||
tableName, schemaName := r.parseTableNameMethod(funcDecl)
|
||||
if tableName != "" {
|
||||
table.Name = tableName
|
||||
if schemaName != "" {
|
||||
table.Schema = schemaName
|
||||
}
|
||||
|
||||
// Update columns
|
||||
for _, col := range table.Columns {
|
||||
col.Table = tableName
|
||||
col.Schema = table.Schema
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
// getReceiverType extracts the type name from a receiver
|
||||
func (r *Reader) getReceiverType(expr ast.Expr) string {
|
||||
switch t := expr.(type) {
|
||||
case *ast.Ident:
|
||||
return t.Name
|
||||
case *ast.StarExpr:
|
||||
if ident, ok := t.X.(*ast.Ident); ok {
|
||||
return ident.Name
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// parseTableNameMethod parses a TableName() method and extracts the table and schema name
|
||||
func (r *Reader) parseTableNameMethod(funcDecl *ast.FuncDecl) (string, string) {
|
||||
if funcDecl.Body == nil {
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// Look for return statement
|
||||
for _, stmt := range funcDecl.Body.List {
|
||||
retStmt, ok := stmt.(*ast.ReturnStmt)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(retStmt.Results) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the return value (should be a string literal)
|
||||
if basicLit, ok := retStmt.Results[0].(*ast.BasicLit); ok {
|
||||
if basicLit.Kind == token.STRING {
|
||||
// Remove quotes
|
||||
fullName := strings.Trim(basicLit.Value, "\"")
|
||||
|
||||
// Split schema.table
|
||||
if strings.Contains(fullName, ".") {
|
||||
parts := strings.SplitN(fullName, ".", 2)
|
||||
return parts[1], parts[0]
|
||||
}
|
||||
|
||||
return fullName, "public"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// hasModelFields checks if the struct has fields with gorm tags
|
||||
func (r *Reader) hasModelFields(structType *ast.StructType) bool {
|
||||
for _, field := range structType.Fields.List {
|
||||
if field.Tag != nil {
|
||||
tag := field.Tag.Value
|
||||
if strings.Contains(tag, "gorm:") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// parseStruct converts an AST struct to a Table model
|
||||
func (r *Reader) parseStruct(structName string, structType *ast.StructType) *models.Table {
|
||||
tableName := r.deriveTableName(structName)
|
||||
schemaName := "public"
|
||||
|
||||
table := models.InitTable(tableName, schemaName)
|
||||
sequence := uint(1)
|
||||
|
||||
// Parse fields
|
||||
for _, field := range structType.Fields.List {
|
||||
if field.Tag == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
tag := field.Tag.Value
|
||||
if !strings.Contains(tag, "gorm:") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip embedded GORM model and relationship fields
|
||||
if r.isGORMModel(field) || r.isRelationship(tag) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get field name
|
||||
fieldName := ""
|
||||
if len(field.Names) > 0 {
|
||||
fieldName = field.Names[0].Name
|
||||
}
|
||||
|
||||
// Parse column from tag
|
||||
column := r.parseColumn(fieldName, field.Type, tag, sequence)
|
||||
if column != nil {
|
||||
// Extract schema and table name from TableName() method if present
|
||||
if strings.Contains(tag, "gorm:") {
|
||||
tablePart, schemaPart := r.extractTableFromGormTag(tag)
|
||||
if tablePart != "" {
|
||||
tableName = tablePart
|
||||
}
|
||||
if schemaPart != "" {
|
||||
schemaName = schemaPart
|
||||
}
|
||||
}
|
||||
|
||||
column.Table = tableName
|
||||
column.Schema = schemaName
|
||||
table.Name = tableName
|
||||
table.Schema = schemaName
|
||||
table.Columns[column.Name] = column
|
||||
sequence++
|
||||
}
|
||||
}
|
||||
|
||||
return table
|
||||
}
|
||||
|
||||
// isGORMModel checks if a field is gorm.Model
|
||||
func (r *Reader) isGORMModel(field *ast.Field) bool {
|
||||
if len(field.Names) > 0 {
|
||||
return false // gorm.Model is embedded, so it has no name
|
||||
}
|
||||
|
||||
// Check if the type is gorm.Model
|
||||
selExpr, ok := field.Type.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
ident, ok := selExpr.X.(*ast.Ident)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return ident.Name == "gorm" && selExpr.Sel.Name == "Model"
|
||||
}
|
||||
|
||||
// isRelationship checks if a field is a relationship based on gorm tag
|
||||
func (r *Reader) isRelationship(tag string) bool {
|
||||
gormTag := r.extractGormTag(tag)
|
||||
return strings.Contains(gormTag, "foreignKey:") ||
|
||||
strings.Contains(gormTag, "references:") ||
|
||||
strings.Contains(gormTag, "many2many:")
|
||||
}
|
||||
|
||||
// extractTableFromGormTag extracts table and schema from gorm tag
|
||||
func (r *Reader) extractTableFromGormTag(tag string) (string, string) {
|
||||
// This is typically set via TableName() method, not in tags
|
||||
// We'll return empty strings and rely on deriveTableName
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// deriveTableName derives a table name from struct name
|
||||
func (r *Reader) deriveTableName(structName string) string {
|
||||
// Remove "Model" prefix if present
|
||||
name := strings.TrimPrefix(structName, "Model")
|
||||
|
||||
// Convert PascalCase to snake_case
|
||||
var result strings.Builder
|
||||
for i, r := range name {
|
||||
if i > 0 && r >= 'A' && r <= 'Z' {
|
||||
result.WriteRune('_')
|
||||
}
|
||||
result.WriteRune(r)
|
||||
}
|
||||
|
||||
return strings.ToLower(result.String())
|
||||
}
|
||||
|
||||
// parseColumn parses a struct field into a Column model
|
||||
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) *models.Column {
|
||||
// Extract gorm tag
|
||||
gormTag := r.extractGormTag(tag)
|
||||
if gormTag == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
column := models.InitColumn("", "", "")
|
||||
column.Sequence = sequence
|
||||
|
||||
// Parse gorm tag
|
||||
parts := r.parseGormTag(gormTag)
|
||||
|
||||
// Get column name
|
||||
if colName, ok := parts["column"]; ok {
|
||||
column.Name = colName
|
||||
} else if fieldName != "" {
|
||||
// Derive column name from field name
|
||||
column.Name = r.fieldNameToColumnName(fieldName)
|
||||
}
|
||||
|
||||
// Parse tag attributes
|
||||
if typ, ok := parts["type"]; ok {
|
||||
// Parse type and extract length if present (e.g., varchar(255))
|
||||
column.Type, column.Length = r.parseTypeWithLength(typ)
|
||||
}
|
||||
if _, ok := parts["primaryKey"]; ok {
|
||||
column.IsPrimaryKey = true
|
||||
}
|
||||
if _, ok := parts["not null"]; ok {
|
||||
column.NotNull = true
|
||||
}
|
||||
if _, ok := parts["autoIncrement"]; ok {
|
||||
column.AutoIncrement = true
|
||||
}
|
||||
if def, ok := parts["default"]; ok {
|
||||
column.Default = def
|
||||
}
|
||||
if size, ok := parts["size"]; ok {
|
||||
if s, err := strconv.Atoi(size); err == nil {
|
||||
column.Length = s
|
||||
}
|
||||
}
|
||||
|
||||
// If no type specified in tag, derive from Go type
|
||||
if column.Type == "" {
|
||||
column.Type = r.goTypeToSQL(fieldType)
|
||||
}
|
||||
|
||||
// Determine if nullable based on Go type
|
||||
if r.isNullableType(fieldType) {
|
||||
column.NotNull = false
|
||||
}
|
||||
|
||||
return column
|
||||
}
|
||||
|
||||
// extractGormTag extracts the gorm tag value from a struct tag
|
||||
func (r *Reader) extractGormTag(tag string) string {
|
||||
// Remove backticks
|
||||
tag = strings.Trim(tag, "`")
|
||||
|
||||
// Use reflect.StructTag to properly parse
|
||||
st := reflect.StructTag(tag)
|
||||
return st.Get("gorm")
|
||||
}
|
||||
|
||||
// parseTypeWithLength parses a type string and extracts length if present
|
||||
// e.g., "varchar(255)" returns ("varchar", 255)
|
||||
func (r *Reader) parseTypeWithLength(typeStr string) (string, int) {
|
||||
// Check for type with length: varchar(255), char(10), etc.
|
||||
// Also handle precision/scale: numeric(10,2)
|
||||
if strings.Contains(typeStr, "(") {
|
||||
idx := strings.Index(typeStr, "(")
|
||||
baseType := strings.TrimSpace(typeStr[:idx])
|
||||
|
||||
// Extract numbers from parentheses
|
||||
parens := typeStr[idx+1:]
|
||||
if endIdx := strings.Index(parens, ")"); endIdx > 0 {
|
||||
parens = parens[:endIdx]
|
||||
}
|
||||
|
||||
// For now, just handle single number (length)
|
||||
if !strings.Contains(parens, ",") {
|
||||
length := 0
|
||||
fmt.Sscanf(parens, "%d", &length)
|
||||
return baseType, length
|
||||
}
|
||||
}
|
||||
return typeStr, 0
|
||||
}
|
||||
|
||||
// parseGormTag parses a gorm tag string into a map
|
||||
func (r *Reader) parseGormTag(gormTag string) map[string]string {
|
||||
result := make(map[string]string)
|
||||
|
||||
// Split by semicolon
|
||||
parts := strings.Split(gormTag, ";")
|
||||
for _, part := range parts {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for key:value pairs
|
||||
if strings.Contains(part, ":") {
|
||||
kv := strings.SplitN(part, ":", 2)
|
||||
result[kv[0]] = kv[1]
|
||||
} else {
|
||||
// Flags like "primaryKey", "not null", etc.
|
||||
result[part] = ""
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// fieldNameToColumnName converts a field name to a column name
|
||||
func (r *Reader) fieldNameToColumnName(fieldName string) string {
|
||||
var result strings.Builder
|
||||
for i, r := range fieldName {
|
||||
if i > 0 && r >= 'A' && r <= 'Z' {
|
||||
result.WriteRune('_')
|
||||
}
|
||||
result.WriteRune(r)
|
||||
}
|
||||
return strings.ToLower(result.String())
|
||||
}
|
||||
|
||||
// goTypeToSQL maps Go types to SQL types
|
||||
func (r *Reader) goTypeToSQL(expr ast.Expr) string {
|
||||
switch t := expr.(type) {
|
||||
case *ast.Ident:
|
||||
switch t.Name {
|
||||
case "int", "int32":
|
||||
return "integer"
|
||||
case "int64":
|
||||
return "bigint"
|
||||
case "string":
|
||||
return "text"
|
||||
case "bool":
|
||||
return "boolean"
|
||||
case "float32":
|
||||
return "real"
|
||||
case "float64":
|
||||
return "double precision"
|
||||
}
|
||||
case *ast.SelectorExpr:
|
||||
// Handle types like time.Time, sql_types.SqlString, etc.
|
||||
if ident, ok := t.X.(*ast.Ident); ok {
|
||||
switch ident.Name {
|
||||
case "time":
|
||||
if t.Sel.Name == "Time" {
|
||||
return "timestamp"
|
||||
}
|
||||
case "sql_types":
|
||||
return r.sqlTypeToSQL(t.Sel.Name)
|
||||
}
|
||||
}
|
||||
case *ast.StarExpr:
|
||||
// Pointer type - nullable version
|
||||
return r.goTypeToSQL(t.X)
|
||||
}
|
||||
return "text"
|
||||
}
|
||||
|
||||
// sqlTypeToSQL maps sql_types types to SQL types
|
||||
func (r *Reader) sqlTypeToSQL(typeName string) string {
|
||||
switch typeName {
|
||||
case "SqlString":
|
||||
return "text"
|
||||
case "SqlInt":
|
||||
return "integer"
|
||||
case "SqlInt64":
|
||||
return "bigint"
|
||||
case "SqlFloat":
|
||||
return "double precision"
|
||||
case "SqlBool":
|
||||
return "boolean"
|
||||
case "SqlTime":
|
||||
return "timestamp"
|
||||
default:
|
||||
return "text"
|
||||
}
|
||||
}
|
||||
|
||||
// isNullableType checks if a Go type represents a nullable field
|
||||
func (r *Reader) isNullableType(expr ast.Expr) bool {
|
||||
switch t := expr.(type) {
|
||||
case *ast.StarExpr:
|
||||
// Pointer type is nullable
|
||||
return true
|
||||
case *ast.SelectorExpr:
|
||||
// Check for sql_types nullable types
|
||||
if ident, ok := t.X.(*ast.Ident); ok {
|
||||
if ident.Name == "sql_types" {
|
||||
return strings.HasPrefix(t.Sel.Name, "Sql")
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
612
tests/assets/yaml/complex_database.yaml
Normal file
612
tests/assets/yaml/complex_database.yaml
Normal file
@@ -0,0 +1,612 @@
|
||||
name: complex_test_db
|
||||
description: Complex test database with relationships and indexes
|
||||
database_type: pgsql
|
||||
schemas:
|
||||
- name: public
|
||||
tables:
|
||||
- name: users
|
||||
schema: public
|
||||
description: User accounts table
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
table: users
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
auto_increment: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
email:
|
||||
name: email
|
||||
table: users
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 255
|
||||
not_null: true
|
||||
comment: User email address
|
||||
sequence: 2
|
||||
username:
|
||||
name: username
|
||||
table: users
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 100
|
||||
not_null: true
|
||||
sequence: 3
|
||||
name:
|
||||
name: name
|
||||
table: users
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 100
|
||||
not_null: false
|
||||
sequence: 4
|
||||
bio:
|
||||
name: bio
|
||||
table: users
|
||||
schema: public
|
||||
type: text
|
||||
not_null: false
|
||||
sequence: 5
|
||||
avatar_url:
|
||||
name: avatar_url
|
||||
table: users
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 500
|
||||
not_null: false
|
||||
sequence: 6
|
||||
is_active:
|
||||
name: is_active
|
||||
table: users
|
||||
schema: public
|
||||
type: boolean
|
||||
not_null: true
|
||||
default: true
|
||||
sequence: 7
|
||||
created_at:
|
||||
name: created_at
|
||||
table: users
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: true
|
||||
default: CURRENT_TIMESTAMP
|
||||
sequence: 8
|
||||
updated_at:
|
||||
name: updated_at
|
||||
table: users
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: false
|
||||
sequence: 9
|
||||
indexes:
|
||||
idx_users_email:
|
||||
name: idx_users_email
|
||||
table: users
|
||||
schema: public
|
||||
columns:
|
||||
- email
|
||||
unique: true
|
||||
type: btree
|
||||
idx_users_username:
|
||||
name: idx_users_username
|
||||
table: users
|
||||
schema: public
|
||||
columns:
|
||||
- username
|
||||
unique: true
|
||||
type: btree
|
||||
idx_users_created_at:
|
||||
name: idx_users_created_at
|
||||
table: users
|
||||
schema: public
|
||||
columns:
|
||||
- created_at
|
||||
unique: false
|
||||
type: btree
|
||||
constraints: {}
|
||||
relationships: {}
|
||||
|
||||
- name: categories
|
||||
schema: public
|
||||
description: Post categories
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
table: categories
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
auto_increment: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
name:
|
||||
name: name
|
||||
table: categories
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 100
|
||||
not_null: true
|
||||
sequence: 2
|
||||
slug:
|
||||
name: slug
|
||||
table: categories
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 100
|
||||
not_null: true
|
||||
sequence: 3
|
||||
description:
|
||||
name: description
|
||||
table: categories
|
||||
schema: public
|
||||
type: text
|
||||
not_null: false
|
||||
sequence: 4
|
||||
indexes:
|
||||
idx_categories_slug:
|
||||
name: idx_categories_slug
|
||||
table: categories
|
||||
schema: public
|
||||
columns:
|
||||
- slug
|
||||
unique: true
|
||||
type: btree
|
||||
constraints: {}
|
||||
relationships: {}
|
||||
|
||||
- name: posts
|
||||
schema: public
|
||||
description: Blog posts
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
table: posts
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
auto_increment: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
user_id:
|
||||
name: user_id
|
||||
table: posts
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
comment: Foreign key to users table
|
||||
sequence: 2
|
||||
title:
|
||||
name: title
|
||||
table: posts
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 200
|
||||
not_null: true
|
||||
sequence: 3
|
||||
slug:
|
||||
name: slug
|
||||
table: posts
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 200
|
||||
not_null: true
|
||||
sequence: 4
|
||||
content:
|
||||
name: content
|
||||
table: posts
|
||||
schema: public
|
||||
type: text
|
||||
not_null: false
|
||||
sequence: 5
|
||||
excerpt:
|
||||
name: excerpt
|
||||
table: posts
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 500
|
||||
not_null: false
|
||||
sequence: 6
|
||||
status:
|
||||
name: status
|
||||
table: posts
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 20
|
||||
not_null: true
|
||||
default: draft
|
||||
sequence: 7
|
||||
view_count:
|
||||
name: view_count
|
||||
table: posts
|
||||
schema: public
|
||||
type: integer
|
||||
not_null: true
|
||||
default: 0
|
||||
sequence: 8
|
||||
published_at:
|
||||
name: published_at
|
||||
table: posts
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: false
|
||||
sequence: 9
|
||||
created_at:
|
||||
name: created_at
|
||||
table: posts
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: true
|
||||
default: CURRENT_TIMESTAMP
|
||||
sequence: 10
|
||||
updated_at:
|
||||
name: updated_at
|
||||
table: posts
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: false
|
||||
sequence: 11
|
||||
indexes:
|
||||
idx_posts_user_id:
|
||||
name: idx_posts_user_id
|
||||
table: posts
|
||||
schema: public
|
||||
columns:
|
||||
- user_id
|
||||
unique: false
|
||||
type: btree
|
||||
idx_posts_slug:
|
||||
name: idx_posts_slug
|
||||
table: posts
|
||||
schema: public
|
||||
columns:
|
||||
- slug
|
||||
unique: true
|
||||
type: btree
|
||||
idx_posts_status:
|
||||
name: idx_posts_status
|
||||
table: posts
|
||||
schema: public
|
||||
columns:
|
||||
- status
|
||||
unique: false
|
||||
type: btree
|
||||
idx_posts_published_at:
|
||||
name: idx_posts_published_at
|
||||
table: posts
|
||||
schema: public
|
||||
columns:
|
||||
- published_at
|
||||
unique: false
|
||||
type: btree
|
||||
constraints:
|
||||
fk_posts_user:
|
||||
name: fk_posts_user
|
||||
type: foreign_key
|
||||
table: posts
|
||||
schema: public
|
||||
columns:
|
||||
- user_id
|
||||
referenced_table: users
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
relationships: {}
|
||||
|
||||
- name: comments
|
||||
schema: public
|
||||
description: Post comments
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
table: comments
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
auto_increment: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
post_id:
|
||||
name: post_id
|
||||
table: comments
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
comment: Foreign key to posts table
|
||||
sequence: 2
|
||||
user_id:
|
||||
name: user_id
|
||||
table: comments
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
comment: Foreign key to users table
|
||||
sequence: 3
|
||||
parent_id:
|
||||
name: parent_id
|
||||
table: comments
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: false
|
||||
comment: Self-referencing foreign key for nested comments
|
||||
sequence: 4
|
||||
content:
|
||||
name: content
|
||||
table: comments
|
||||
schema: public
|
||||
type: text
|
||||
not_null: true
|
||||
sequence: 5
|
||||
is_approved:
|
||||
name: is_approved
|
||||
table: comments
|
||||
schema: public
|
||||
type: boolean
|
||||
not_null: true
|
||||
default: false
|
||||
sequence: 6
|
||||
created_at:
|
||||
name: created_at
|
||||
table: comments
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: true
|
||||
default: CURRENT_TIMESTAMP
|
||||
sequence: 7
|
||||
updated_at:
|
||||
name: updated_at
|
||||
table: comments
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: false
|
||||
sequence: 8
|
||||
indexes:
|
||||
idx_comments_post_id:
|
||||
name: idx_comments_post_id
|
||||
table: comments
|
||||
schema: public
|
||||
columns:
|
||||
- post_id
|
||||
unique: false
|
||||
type: btree
|
||||
idx_comments_user_id:
|
||||
name: idx_comments_user_id
|
||||
table: comments
|
||||
schema: public
|
||||
columns:
|
||||
- user_id
|
||||
unique: false
|
||||
type: btree
|
||||
idx_comments_parent_id:
|
||||
name: idx_comments_parent_id
|
||||
table: comments
|
||||
schema: public
|
||||
columns:
|
||||
- parent_id
|
||||
unique: false
|
||||
type: btree
|
||||
constraints:
|
||||
fk_comments_post:
|
||||
name: fk_comments_post
|
||||
type: foreign_key
|
||||
table: comments
|
||||
schema: public
|
||||
columns:
|
||||
- post_id
|
||||
referenced_table: posts
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
fk_comments_user:
|
||||
name: fk_comments_user
|
||||
type: foreign_key
|
||||
table: comments
|
||||
schema: public
|
||||
columns:
|
||||
- user_id
|
||||
referenced_table: users
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
fk_comments_parent:
|
||||
name: fk_comments_parent
|
||||
type: foreign_key
|
||||
table: comments
|
||||
schema: public
|
||||
columns:
|
||||
- parent_id
|
||||
referenced_table: comments
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
relationships: {}
|
||||
|
||||
- name: post_categories
|
||||
schema: public
|
||||
description: Many-to-many relationship between posts and categories
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
table: post_categories
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
auto_increment: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
post_id:
|
||||
name: post_id
|
||||
table: post_categories
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
sequence: 2
|
||||
category_id:
|
||||
name: category_id
|
||||
table: post_categories
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
sequence: 3
|
||||
created_at:
|
||||
name: created_at
|
||||
table: post_categories
|
||||
schema: public
|
||||
type: timestamp
|
||||
not_null: true
|
||||
default: CURRENT_TIMESTAMP
|
||||
sequence: 4
|
||||
indexes:
|
||||
idx_post_categories_unique:
|
||||
name: idx_post_categories_unique
|
||||
table: post_categories
|
||||
schema: public
|
||||
columns:
|
||||
- post_id
|
||||
- category_id
|
||||
unique: true
|
||||
type: btree
|
||||
idx_post_categories_category:
|
||||
name: idx_post_categories_category
|
||||
table: post_categories
|
||||
schema: public
|
||||
columns:
|
||||
- category_id
|
||||
unique: false
|
||||
type: btree
|
||||
constraints:
|
||||
fk_post_categories_post:
|
||||
name: fk_post_categories_post
|
||||
type: foreign_key
|
||||
table: post_categories
|
||||
schema: public
|
||||
columns:
|
||||
- post_id
|
||||
referenced_table: posts
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
fk_post_categories_category:
|
||||
name: fk_post_categories_category
|
||||
type: foreign_key
|
||||
table: post_categories
|
||||
schema: public
|
||||
columns:
|
||||
- category_id
|
||||
referenced_table: categories
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
relationships: {}
|
||||
|
||||
- name: tags
|
||||
schema: public
|
||||
description: Tags for posts
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
table: tags
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
auto_increment: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
name:
|
||||
name: name
|
||||
table: tags
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 50
|
||||
not_null: true
|
||||
sequence: 2
|
||||
slug:
|
||||
name: slug
|
||||
table: tags
|
||||
schema: public
|
||||
type: varchar
|
||||
length: 50
|
||||
not_null: true
|
||||
sequence: 3
|
||||
indexes:
|
||||
idx_tags_slug:
|
||||
name: idx_tags_slug
|
||||
table: tags
|
||||
schema: public
|
||||
columns:
|
||||
- slug
|
||||
unique: true
|
||||
type: btree
|
||||
constraints: {}
|
||||
relationships: {}
|
||||
|
||||
- name: post_tags
|
||||
schema: public
|
||||
description: Many-to-many relationship between posts and tags
|
||||
columns:
|
||||
post_id:
|
||||
name: post_id
|
||||
table: post_tags
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
tag_id:
|
||||
name: tag_id
|
||||
table: post_tags
|
||||
schema: public
|
||||
type: bigint
|
||||
not_null: true
|
||||
is_primary_key: true
|
||||
sequence: 2
|
||||
indexes:
|
||||
idx_post_tags_tag:
|
||||
name: idx_post_tags_tag
|
||||
table: post_tags
|
||||
schema: public
|
||||
columns:
|
||||
- tag_id
|
||||
unique: false
|
||||
type: btree
|
||||
constraints:
|
||||
fk_post_tags_post:
|
||||
name: fk_post_tags_post
|
||||
type: foreign_key
|
||||
table: post_tags
|
||||
schema: public
|
||||
columns:
|
||||
- post_id
|
||||
referenced_table: posts
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
fk_post_tags_tag:
|
||||
name: fk_post_tags_tag
|
||||
type: foreign_key
|
||||
table: post_tags
|
||||
schema: public
|
||||
columns:
|
||||
- tag_id
|
||||
referenced_table: tags
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: CASCADE
|
||||
relationships: {}
|
||||
314
tests/integration/orm_roundtrip_test.go
Normal file
314
tests/integration/orm_roundtrip_test.go
Normal file
@@ -0,0 +1,314 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
bunreader "git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||
gormreader "git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||
yamlreader "git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
bunwriter "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||
gormwriter "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||
yamlwriter "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// TestYAMLToBunRoundTrip tests YAML → Bun Go → YAML roundtrip
|
||||
func TestYAMLToBunRoundTrip(t *testing.T) {
|
||||
testDir := t.TempDir()
|
||||
|
||||
// Step 1: Read YAML file
|
||||
t.Log("Step 1: Reading YAML file...")
|
||||
yamlPath := filepath.Join("..", "assets", "yaml", "database.yaml")
|
||||
yamlReaderOpts := &readers.ReaderOptions{
|
||||
FilePath: yamlPath,
|
||||
}
|
||||
yamlReader := yamlreader.NewReader(yamlReaderOpts)
|
||||
|
||||
dbFromYAML, err := yamlReader.ReadDatabase()
|
||||
require.NoError(t, err, "Failed to read YAML file")
|
||||
require.NotNil(t, dbFromYAML, "Database from YAML should not be nil")
|
||||
t.Logf(" ✓ Read database '%s' with %d schemas", dbFromYAML.Name, len(dbFromYAML.Schemas))
|
||||
|
||||
// Step 2: Write to Bun Go code
|
||||
t.Log("Step 2: Writing to Bun Go code...")
|
||||
bunGoPath := filepath.Join(testDir, "models_bun.go")
|
||||
bunWriterOpts := &writers.WriterOptions{
|
||||
OutputPath: bunGoPath,
|
||||
PackageName: "models",
|
||||
Metadata: map[string]interface{}{
|
||||
"generate_table_name": true,
|
||||
"generate_get_id": false,
|
||||
},
|
||||
}
|
||||
bunWriter := bunwriter.NewWriter(bunWriterOpts)
|
||||
|
||||
err = bunWriter.WriteDatabase(dbFromYAML)
|
||||
require.NoError(t, err, "Failed to write Bun Go code")
|
||||
|
||||
bunStat, err := os.Stat(bunGoPath)
|
||||
require.NoError(t, err, "Bun Go file should exist")
|
||||
require.Greater(t, bunStat.Size(), int64(0), "Bun Go file should not be empty")
|
||||
t.Logf(" ✓ Wrote Bun Go file (%d bytes)", bunStat.Size())
|
||||
|
||||
// Step 3: Read Bun Go code back
|
||||
t.Log("Step 3: Reading Bun Go code back...")
|
||||
bunReaderOpts := &readers.ReaderOptions{
|
||||
FilePath: bunGoPath,
|
||||
}
|
||||
bunReader := bunreader.NewReader(bunReaderOpts)
|
||||
|
||||
dbFromBun, err := bunReader.ReadDatabase()
|
||||
require.NoError(t, err, "Failed to read Bun Go code")
|
||||
require.NotNil(t, dbFromBun, "Database from Bun should not be nil")
|
||||
t.Logf(" ✓ Read database from Bun with %d schemas", len(dbFromBun.Schemas))
|
||||
|
||||
// Step 4: Write back to YAML
|
||||
t.Log("Step 4: Writing back to YAML...")
|
||||
yaml2Path := filepath.Join(testDir, "roundtrip.yaml")
|
||||
yamlWriter2Opts := &writers.WriterOptions{
|
||||
OutputPath: yaml2Path,
|
||||
}
|
||||
yamlWriter2 := yamlwriter.NewWriter(yamlWriter2Opts)
|
||||
|
||||
err = yamlWriter2.WriteDatabase(dbFromBun)
|
||||
require.NoError(t, err, "Failed to write YAML")
|
||||
|
||||
yaml2Stat, err := os.Stat(yaml2Path)
|
||||
require.NoError(t, err, "Second YAML file should exist")
|
||||
require.Greater(t, yaml2Stat.Size(), int64(0), "Second YAML file should not be empty")
|
||||
t.Logf(" ✓ Wrote second YAML file (%d bytes)", yaml2Stat.Size())
|
||||
|
||||
// Step 5: Compare YAML files
|
||||
t.Log("Step 5: Comparing YAML outputs...")
|
||||
|
||||
// Read both YAML files
|
||||
yaml1Data, err := os.ReadFile(yamlPath)
|
||||
require.NoError(t, err, "Failed to read first YAML")
|
||||
|
||||
yaml2Data, err := os.ReadFile(yaml2Path)
|
||||
require.NoError(t, err, "Failed to read second YAML")
|
||||
|
||||
// Parse into Database models for comparison
|
||||
var db1, db2 models.Database
|
||||
err = yaml.Unmarshal(yaml1Data, &db1)
|
||||
require.NoError(t, err, "Failed to parse first YAML")
|
||||
|
||||
err = yaml.Unmarshal(yaml2Data, &db2)
|
||||
require.NoError(t, err, "Failed to parse second YAML")
|
||||
|
||||
// Compare high-level structure
|
||||
t.Log(" Comparing high-level structure...")
|
||||
assert.Equal(t, len(db1.Schemas), len(db2.Schemas), "Schema count should match")
|
||||
|
||||
// Compare schemas and tables
|
||||
for i, schema1 := range db1.Schemas {
|
||||
if i >= len(db2.Schemas) {
|
||||
t.Errorf("Schema index %d out of bounds in second database", i)
|
||||
continue
|
||||
}
|
||||
schema2 := db2.Schemas[i]
|
||||
|
||||
assert.Equal(t, schema1.Name, schema2.Name, "Schema names should match")
|
||||
assert.Equal(t, len(schema1.Tables), len(schema2.Tables),
|
||||
"Table count in schema '%s' should match", schema1.Name)
|
||||
|
||||
// Compare tables
|
||||
for j, table1 := range schema1.Tables {
|
||||
if j >= len(schema2.Tables) {
|
||||
t.Errorf("Table index %d out of bounds in schema '%s'", j, schema1.Name)
|
||||
continue
|
||||
}
|
||||
table2 := schema2.Tables[j]
|
||||
|
||||
assert.Equal(t, table1.Name, table2.Name,
|
||||
"Table names should match in schema '%s'", schema1.Name)
|
||||
|
||||
// Compare column count
|
||||
assert.Equal(t, len(table1.Columns), len(table2.Columns),
|
||||
"Column count in table '%s.%s' should match", schema1.Name, table1.Name)
|
||||
|
||||
// Compare each column
|
||||
for colName, col1 := range table1.Columns {
|
||||
col2, ok := table2.Columns[colName]
|
||||
if !ok {
|
||||
t.Errorf("Column '%s' missing from roundtrip table '%s.%s'",
|
||||
colName, schema1.Name, table1.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
// Compare key column properties
|
||||
assert.Equal(t, col1.Name, col2.Name,
|
||||
"Column name mismatch in '%s.%s.%s'", schema1.Name, table1.Name, colName)
|
||||
assert.Equal(t, col1.Type, col2.Type,
|
||||
"Column type mismatch in '%s.%s.%s'", schema1.Name, table1.Name, colName)
|
||||
assert.Equal(t, col1.IsPrimaryKey, col2.IsPrimaryKey,
|
||||
"Primary key mismatch in '%s.%s.%s'", schema1.Name, table1.Name, colName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Summary
|
||||
t.Log("Summary:")
|
||||
t.Logf(" ✓ Round-trip completed: YAML → Bun → YAML")
|
||||
t.Logf(" ✓ Schemas match: %d", len(db1.Schemas))
|
||||
|
||||
totalTables := 0
|
||||
for _, schema := range db1.Schemas {
|
||||
totalTables += len(schema.Tables)
|
||||
}
|
||||
t.Logf(" ✓ Total tables: %d", totalTables)
|
||||
}
|
||||
|
||||
// TestYAMLToGORMRoundTrip tests YAML → GORM Go → YAML roundtrip
|
||||
func TestYAMLToGORMRoundTrip(t *testing.T) {
|
||||
testDir := t.TempDir()
|
||||
|
||||
// Step 1: Read YAML file
|
||||
t.Log("Step 1: Reading YAML file...")
|
||||
yamlPath := filepath.Join("..", "assets", "yaml", "database.yaml")
|
||||
yamlReaderOpts := &readers.ReaderOptions{
|
||||
FilePath: yamlPath,
|
||||
}
|
||||
yamlReader := yamlreader.NewReader(yamlReaderOpts)
|
||||
|
||||
dbFromYAML, err := yamlReader.ReadDatabase()
|
||||
require.NoError(t, err, "Failed to read YAML file")
|
||||
require.NotNil(t, dbFromYAML, "Database from YAML should not be nil")
|
||||
t.Logf(" ✓ Read database '%s' with %d schemas", dbFromYAML.Name, len(dbFromYAML.Schemas))
|
||||
|
||||
// Step 2: Write to GORM Go code
|
||||
t.Log("Step 2: Writing to GORM Go code...")
|
||||
gormGoPath := filepath.Join(testDir, "models_gorm.go")
|
||||
gormWriterOpts := &writers.WriterOptions{
|
||||
OutputPath: gormGoPath,
|
||||
PackageName: "models",
|
||||
Metadata: map[string]interface{}{
|
||||
"generate_table_name": true,
|
||||
"generate_get_id": false,
|
||||
},
|
||||
}
|
||||
gormWriter := gormwriter.NewWriter(gormWriterOpts)
|
||||
|
||||
err = gormWriter.WriteDatabase(dbFromYAML)
|
||||
require.NoError(t, err, "Failed to write GORM Go code")
|
||||
|
||||
gormStat, err := os.Stat(gormGoPath)
|
||||
require.NoError(t, err, "GORM Go file should exist")
|
||||
require.Greater(t, gormStat.Size(), int64(0), "GORM Go file should not be empty")
|
||||
t.Logf(" ✓ Wrote GORM Go file (%d bytes)", gormStat.Size())
|
||||
|
||||
// Step 3: Read GORM Go code back
|
||||
t.Log("Step 3: Reading GORM Go code back...")
|
||||
gormReaderOpts := &readers.ReaderOptions{
|
||||
FilePath: gormGoPath,
|
||||
}
|
||||
gormReader := gormreader.NewReader(gormReaderOpts)
|
||||
|
||||
dbFromGORM, err := gormReader.ReadDatabase()
|
||||
require.NoError(t, err, "Failed to read GORM Go code")
|
||||
require.NotNil(t, dbFromGORM, "Database from GORM should not be nil")
|
||||
t.Logf(" ✓ Read database from GORM with %d schemas", len(dbFromGORM.Schemas))
|
||||
|
||||
// Step 4: Write back to YAML
|
||||
t.Log("Step 4: Writing back to YAML...")
|
||||
yaml2Path := filepath.Join(testDir, "roundtrip.yaml")
|
||||
yamlWriter2Opts := &writers.WriterOptions{
|
||||
OutputPath: yaml2Path,
|
||||
}
|
||||
yamlWriter2 := yamlwriter.NewWriter(yamlWriter2Opts)
|
||||
|
||||
err = yamlWriter2.WriteDatabase(dbFromGORM)
|
||||
require.NoError(t, err, "Failed to write YAML")
|
||||
|
||||
yaml2Stat, err := os.Stat(yaml2Path)
|
||||
require.NoError(t, err, "Second YAML file should exist")
|
||||
require.Greater(t, yaml2Stat.Size(), int64(0), "Second YAML file should not be empty")
|
||||
t.Logf(" ✓ Wrote second YAML file (%d bytes)", yaml2Stat.Size())
|
||||
|
||||
// Step 5: Compare YAML files
|
||||
t.Log("Step 5: Comparing YAML outputs...")
|
||||
|
||||
// Read both YAML files
|
||||
yaml1Data, err := os.ReadFile(yamlPath)
|
||||
require.NoError(t, err, "Failed to read first YAML")
|
||||
|
||||
yaml2Data, err := os.ReadFile(yaml2Path)
|
||||
require.NoError(t, err, "Failed to read second YAML")
|
||||
|
||||
// Parse into Database models for comparison
|
||||
var db1, db2 models.Database
|
||||
err = yaml.Unmarshal(yaml1Data, &db1)
|
||||
require.NoError(t, err, "Failed to parse first YAML")
|
||||
|
||||
err = yaml.Unmarshal(yaml2Data, &db2)
|
||||
require.NoError(t, err, "Failed to parse second YAML")
|
||||
|
||||
// Compare high-level structure
|
||||
t.Log(" Comparing high-level structure...")
|
||||
assert.Equal(t, len(db1.Schemas), len(db2.Schemas), "Schema count should match")
|
||||
|
||||
// Compare schemas and tables
|
||||
for i, schema1 := range db1.Schemas {
|
||||
if i >= len(db2.Schemas) {
|
||||
t.Errorf("Schema index %d out of bounds in second database", i)
|
||||
continue
|
||||
}
|
||||
schema2 := db2.Schemas[i]
|
||||
|
||||
assert.Equal(t, schema1.Name, schema2.Name, "Schema names should match")
|
||||
assert.Equal(t, len(schema1.Tables), len(schema2.Tables),
|
||||
"Table count in schema '%s' should match", schema1.Name)
|
||||
|
||||
// Compare tables
|
||||
for j, table1 := range schema1.Tables {
|
||||
if j >= len(schema2.Tables) {
|
||||
t.Errorf("Table index %d out of bounds in schema '%s'", j, schema1.Name)
|
||||
continue
|
||||
}
|
||||
table2 := schema2.Tables[j]
|
||||
|
||||
assert.Equal(t, table1.Name, table2.Name,
|
||||
"Table names should match in schema '%s'", schema1.Name)
|
||||
|
||||
// Compare column count
|
||||
assert.Equal(t, len(table1.Columns), len(table2.Columns),
|
||||
"Column count in table '%s.%s' should match", schema1.Name, table1.Name)
|
||||
|
||||
// Compare each column
|
||||
for colName, col1 := range table1.Columns {
|
||||
col2, ok := table2.Columns[colName]
|
||||
if !ok {
|
||||
t.Errorf("Column '%s' missing from roundtrip table '%s.%s'",
|
||||
colName, schema1.Name, table1.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
// Compare key column properties
|
||||
assert.Equal(t, col1.Name, col2.Name,
|
||||
"Column name mismatch in '%s.%s.%s'", schema1.Name, table1.Name, colName)
|
||||
assert.Equal(t, col1.Type, col2.Type,
|
||||
"Column type mismatch in '%s.%s.%s'", schema1.Name, table1.Name, colName)
|
||||
assert.Equal(t, col1.IsPrimaryKey, col2.IsPrimaryKey,
|
||||
"Primary key mismatch in '%s.%s.%s'", schema1.Name, table1.Name, colName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Summary
|
||||
t.Log("Summary:")
|
||||
t.Logf(" ✓ Round-trip completed: YAML → GORM → YAML")
|
||||
t.Logf(" ✓ Schemas match: %d", len(db1.Schemas))
|
||||
|
||||
totalTables := 0
|
||||
for _, schema := range db1.Schemas {
|
||||
totalTables += len(schema.Tables)
|
||||
}
|
||||
t.Logf(" ✓ Total tables: %d", totalTables)
|
||||
}
|
||||
Reference in New Issue
Block a user