So far so good
Some checks are pending
CI / Test (1.23) (push) Waiting to run
CI / Test (1.24) (push) Waiting to run
CI / Test (1.25) (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Build (push) Waiting to run

This commit is contained in:
2025-12-16 18:10:40 +02:00
parent b9650739bf
commit 7c7054d2e2
44 changed files with 27029 additions and 48 deletions

400
pkg/readers/dbml/reader.go Normal file
View File

@@ -0,0 +1,400 @@
package dbml
import (
"bufio"
"fmt"
"os"
"regexp"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
// Reader implements the readers.Reader interface for DBML format
type Reader struct {
options *readers.ReaderOptions
}
// NewReader creates a new DBML reader with the given options
func NewReader(options *readers.ReaderOptions) *Reader {
return &Reader{
options: options,
}
}
// ReadDatabase reads and parses DBML input, returning a Database model
func (r *Reader) ReadDatabase() (*models.Database, error) {
if r.options.FilePath == "" {
return nil, fmt.Errorf("file path is required for DBML reader")
}
content, err := os.ReadFile(r.options.FilePath)
if err != nil {
return nil, fmt.Errorf("failed to read file: %w", err)
}
return r.parseDBML(string(content))
}
// ReadSchema reads and parses DBML input, returning a Schema model
func (r *Reader) ReadSchema() (*models.Schema, error) {
db, err := r.ReadDatabase()
if err != nil {
return nil, err
}
if len(db.Schemas) == 0 {
return nil, fmt.Errorf("no schemas found in DBML")
}
// Return the first schema
return db.Schemas[0], nil
}
// ReadTable reads and parses DBML input, returning a Table model
func (r *Reader) ReadTable() (*models.Table, error) {
schema, err := r.ReadSchema()
if err != nil {
return nil, err
}
if len(schema.Tables) == 0 {
return nil, fmt.Errorf("no tables found in DBML")
}
// Return the first table
return schema.Tables[0], nil
}
// parseDBML parses DBML content and returns a Database model
func (r *Reader) parseDBML(content string) (*models.Database, error) {
db := models.InitDatabase("database")
if r.options.Metadata != nil {
if name, ok := r.options.Metadata["name"].(string); ok {
db.Name = name
}
}
scanner := bufio.NewScanner(strings.NewReader(content))
schemaMap := make(map[string]*models.Schema)
var currentTable *models.Table
var currentSchema string
var inIndexes bool
var inTable bool
tableRegex := regexp.MustCompile(`^Table\s+([a-zA-Z0-9_.]+)\s*{`)
refRegex := regexp.MustCompile(`^Ref:\s+(.+)`)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip empty lines and comments
if line == "" || strings.HasPrefix(line, "//") {
continue
}
// Parse Table definition
if matches := tableRegex.FindStringSubmatch(line); matches != nil {
tableName := matches[1]
parts := strings.Split(tableName, ".")
if len(parts) == 2 {
currentSchema = parts[0]
tableName = parts[1]
} else {
currentSchema = "public"
}
// Ensure schema exists
if _, exists := schemaMap[currentSchema]; !exists {
schemaMap[currentSchema] = models.InitSchema(currentSchema)
}
currentTable = models.InitTable(tableName, currentSchema)
inTable = true
inIndexes = false
continue
}
// End of table definition
if inTable && line == "}" {
if currentTable != nil && currentSchema != "" {
schemaMap[currentSchema].Tables = append(schemaMap[currentSchema].Tables, currentTable)
currentTable = nil
}
inTable = false
inIndexes = false
continue
}
// Parse indexes section
if inTable && strings.HasPrefix(line, "indexes") {
inIndexes = true
continue
}
// End of indexes section
if inIndexes && line == "}" {
inIndexes = false
continue
}
// Parse index definition
if inIndexes && currentTable != nil {
index := r.parseIndex(line, currentTable.Name, currentSchema)
if index != nil {
currentTable.Indexes[index.Name] = index
}
continue
}
// Parse table note
if inTable && currentTable != nil && strings.HasPrefix(line, "Note:") {
note := strings.TrimPrefix(line, "Note:")
note = strings.Trim(note, " '\"")
currentTable.Description = note
continue
}
// Parse column definition
if inTable && !inIndexes && currentTable != nil {
column := r.parseColumn(line, currentTable.Name, currentSchema)
if column != nil {
currentTable.Columns[column.Name] = column
}
continue
}
// Parse Ref (relationship/foreign key)
if matches := refRegex.FindStringSubmatch(line); matches != nil {
constraint := r.parseRef(matches[1])
if constraint != nil {
// Find the table and add the constraint
for _, schema := range schemaMap {
for _, table := range schema.Tables {
if table.Schema == constraint.Schema && table.Name == constraint.Table {
table.Constraints[constraint.Name] = constraint
break
}
}
}
}
continue
}
}
// Add schemas to database
for _, schema := range schemaMap {
db.Schemas = append(db.Schemas, schema)
}
return db, nil
}
// parseColumn parses a DBML column definition
func (r *Reader) parseColumn(line, tableName, schemaName string) *models.Column {
// Format: column_name type [attributes] // comment
parts := strings.Fields(line)
if len(parts) < 2 {
return nil
}
columnName := parts[0]
columnType := parts[1]
column := models.InitColumn(columnName, tableName, schemaName)
column.Type = columnType
// Parse attributes in brackets
if strings.Contains(line, "[") && strings.Contains(line, "]") {
attrStart := strings.Index(line, "[")
attrEnd := strings.Index(line, "]")
if attrStart < attrEnd {
attrs := line[attrStart+1 : attrEnd]
attrList := strings.Split(attrs, ",")
for _, attr := range attrList {
attr = strings.TrimSpace(attr)
if strings.Contains(attr, "primary key") || attr == "pk" {
column.IsPrimaryKey = true
column.NotNull = true
} else if strings.Contains(attr, "not null") {
column.NotNull = true
} else if attr == "increment" {
column.AutoIncrement = true
} else if strings.HasPrefix(attr, "default:") {
defaultVal := strings.TrimSpace(strings.TrimPrefix(attr, "default:"))
column.Default = strings.Trim(defaultVal, "'\"")
} else if attr == "unique" {
// Could create a unique constraint here
}
}
}
}
// Parse inline comment
if strings.Contains(line, "//") {
commentStart := strings.Index(line, "//")
column.Comment = strings.TrimSpace(line[commentStart+2:])
}
return column
}
// parseIndex parses a DBML index definition
func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
// Format: (columns) [attributes]
if !strings.Contains(line, "(") || !strings.Contains(line, ")") {
return nil
}
colStart := strings.Index(line, "(")
colEnd := strings.Index(line, ")")
if colStart >= colEnd {
return nil
}
columnsStr := line[colStart+1 : colEnd]
columns := strings.Split(columnsStr, ",")
for i := range columns {
columns[i] = strings.TrimSpace(columns[i])
}
index := models.InitIndex("")
index.Table = tableName
index.Schema = schemaName
index.Columns = columns
// Parse attributes
if strings.Contains(line, "[") && strings.Contains(line, "]") {
attrStart := strings.Index(line, "[")
attrEnd := strings.Index(line, "]")
if attrStart < attrEnd {
attrs := line[attrStart+1 : attrEnd]
attrList := strings.Split(attrs, ",")
for _, attr := range attrList {
attr = strings.TrimSpace(attr)
if attr == "unique" {
index.Unique = true
} else if strings.HasPrefix(attr, "name:") {
name := strings.TrimSpace(strings.TrimPrefix(attr, "name:"))
index.Name = strings.Trim(name, "'\"")
} else if strings.HasPrefix(attr, "type:") {
indexType := strings.TrimSpace(strings.TrimPrefix(attr, "type:"))
index.Type = strings.Trim(indexType, "'\"")
}
}
}
}
// Generate name if not provided
if index.Name == "" {
index.Name = fmt.Sprintf("idx_%s_%s", tableName, strings.Join(columns, "_"))
}
return index
}
// parseRef parses a DBML Ref (foreign key relationship)
func (r *Reader) parseRef(refStr string) *models.Constraint {
// Format: schema.table.(columns) > schema.table.(columns) [actions]
// Split by relationship operator (>, <, -, etc.)
var fromPart, toPart string
for _, op := range []string{">", "<", "-"} {
if strings.Contains(refStr, op) {
parts := strings.Split(refStr, op)
if len(parts) == 2 {
fromPart = strings.TrimSpace(parts[0])
toPart = strings.TrimSpace(parts[1])
break
}
}
}
if fromPart == "" || toPart == "" {
return nil
}
// Remove actions part if present
if strings.Contains(toPart, "[") {
toPart = strings.TrimSpace(toPart[:strings.Index(toPart, "[")])
}
// Parse from table and column
fromSchema, fromTable, fromColumns := r.parseTableRef(fromPart)
toSchema, toTable, toColumns := r.parseTableRef(toPart)
if fromTable == "" || toTable == "" {
return nil
}
constraint := models.InitConstraint(
fmt.Sprintf("fk_%s_%s", fromTable, toTable),
models.ForeignKeyConstraint,
)
constraint.Schema = fromSchema
constraint.Table = fromTable
constraint.Columns = fromColumns
constraint.ReferencedSchema = toSchema
constraint.ReferencedTable = toTable
constraint.ReferencedColumns = toColumns
// Parse actions if present
if strings.Contains(refStr, "[") && strings.Contains(refStr, "]") {
actStart := strings.Index(refStr, "[")
actEnd := strings.Index(refStr, "]")
if actStart < actEnd {
actions := refStr[actStart+1 : actEnd]
actionList := strings.Split(actions, ",")
for _, action := range actionList {
action = strings.TrimSpace(action)
if strings.HasPrefix(action, "ondelete:") {
constraint.OnDelete = strings.TrimSpace(strings.TrimPrefix(action, "ondelete:"))
} else if strings.HasPrefix(action, "onupdate:") {
constraint.OnUpdate = strings.TrimSpace(strings.TrimPrefix(action, "onupdate:"))
}
}
}
}
return constraint
}
// parseTableRef parses a table reference like "schema.table.(column1, column2)"
func (r *Reader) parseTableRef(ref string) (schema, table string, columns []string) {
// Extract columns if present
if strings.Contains(ref, "(") && strings.Contains(ref, ")") {
colStart := strings.Index(ref, "(")
colEnd := strings.Index(ref, ")")
if colStart < colEnd {
columnsStr := ref[colStart+1 : colEnd]
for _, col := range strings.Split(columnsStr, ",") {
columns = append(columns, strings.TrimSpace(col))
}
}
ref = ref[:colStart]
}
// Parse schema and table
parts := strings.Split(strings.TrimSpace(ref), ".")
if len(parts) == 2 {
schema = parts[0]
table = parts[1]
} else if len(parts) == 1 {
schema = "public"
table = parts[0]
}
return
}

486
pkg/readers/dctx/reader.go Normal file
View File

@@ -0,0 +1,486 @@
package dctx
import (
"encoding/xml"
"fmt"
"os"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
// Reader implements the readers.Reader interface for DCTX format
type Reader struct {
options *readers.ReaderOptions
}
// NewReader creates a new DCTX reader with the given options
func NewReader(options *readers.ReaderOptions) *Reader {
return &Reader{
options: options,
}
}
// ReadDatabase reads and parses DCTX input, returning a Database model
func (r *Reader) ReadDatabase() (*models.Database, error) {
if r.options.FilePath == "" {
return nil, fmt.Errorf("file path is required for DCTX reader")
}
data, err := os.ReadFile(r.options.FilePath)
if err != nil {
return nil, fmt.Errorf("failed to read file: %w", err)
}
var dctx DCTXDictionary
if err := xml.Unmarshal(data, &dctx); err != nil {
return nil, fmt.Errorf("failed to parse DCTX XML: %w", err)
}
return r.convertToDatabase(&dctx)
}
// ReadSchema reads and parses DCTX input, returning a Schema model
func (r *Reader) ReadSchema() (*models.Schema, error) {
db, err := r.ReadDatabase()
if err != nil {
return nil, err
}
if len(db.Schemas) == 0 {
return nil, fmt.Errorf("no schemas found in DCTX")
}
return db.Schemas[0], nil
}
// ReadTable reads and parses DCTX input, returning a Table model
func (r *Reader) ReadTable() (*models.Table, error) {
schema, err := r.ReadSchema()
if err != nil {
return nil, err
}
if len(schema.Tables) == 0 {
return nil, fmt.Errorf("no tables found in DCTX")
}
return schema.Tables[0], nil
}
// convertToDatabase converts a DCTX dictionary to a Database model
func (r *Reader) convertToDatabase(dctx *DCTXDictionary) (*models.Database, error) {
dbName := dctx.Name
if dbName == "" {
dbName = "database"
}
db := models.InitDatabase(dbName)
schema := models.InitSchema("public")
// Create GUID mappings for tables and keys
tableGuidMap := make(map[string]string) // GUID -> table name
keyGuidMap := make(map[string]*DCTXKey) // GUID -> key definition
keyTableMap := make(map[string]string) // key GUID -> table name
fieldGuidMaps := make(map[string]map[string]string) // table name -> field GUID -> field name
// First pass: build GUID mappings
for _, dctxTable := range dctx.Tables {
if !r.hasSQLOption(&dctxTable) {
continue
}
tableName := r.sanitizeName(dctxTable.Name)
tableGuidMap[dctxTable.Guid] = tableName
// Map keys to their table
for _, dctxKey := range dctxTable.Keys {
keyGuidMap[dctxKey.Guid] = &dctxKey
keyTableMap[dctxKey.Guid] = tableName
}
}
// Process tables - only include tables with SQL option enabled
for _, dctxTable := range dctx.Tables {
if !r.hasSQLOption(&dctxTable) {
continue
}
table, fieldGuidMap, err := r.convertTable(&dctxTable)
if err != nil {
return nil, fmt.Errorf("failed to convert table %s: %w", dctxTable.Name, err)
}
fieldGuidMaps[table.Name] = fieldGuidMap
schema.Tables = append(schema.Tables, table)
// Process keys (indexes, primary keys)
err = r.processKeys(&dctxTable, table, fieldGuidMap)
if err != nil {
return nil, fmt.Errorf("failed to process keys for table %s: %w", dctxTable.Name, err)
}
}
// Process relations
err := r.processRelations(dctx, schema, tableGuidMap, keyGuidMap, fieldGuidMaps)
if err != nil {
return nil, fmt.Errorf("failed to process relations: %w", err)
}
db.Schemas = append(db.Schemas, schema)
return db, nil
}
// hasSQLOption checks if a DCTX table has the SQL option set to "1"
func (r *Reader) hasSQLOption(dctxTable *DCTXTable) bool {
for _, option := range dctxTable.Options {
if option.Property == "SQL" && option.PropertyValue == "1" {
return true
}
}
return false
}
// convertTable converts a DCTX table to a Table model
func (r *Reader) convertTable(dctxTable *DCTXTable) (*models.Table, map[string]string, error) {
tableName := r.sanitizeName(dctxTable.Name)
table := models.InitTable(tableName, "public")
table.Description = dctxTable.Description
fieldGuidMap := make(map[string]string)
// Process fields
for _, dctxField := range dctxTable.Fields {
// Store GUID to name mapping
if dctxField.Guid != "" && dctxField.Name != "" {
fieldGuidMap[dctxField.Guid] = r.sanitizeName(dctxField.Name)
}
columns, err := r.convertField(&dctxField, table.Name)
if err != nil {
return nil, nil, fmt.Errorf("failed to convert field %s: %w", dctxField.Name, err)
}
// Add all columns
for _, column := range columns {
table.Columns[column.Name] = column
}
}
return table, fieldGuidMap, nil
}
// convertField converts a DCTX field to Column(s)
func (r *Reader) convertField(dctxField *DCTXField, tableName string) ([]*models.Column, error) {
var columns []*models.Column
// Handle GROUP fields (nested structures)
if dctxField.DataType == "GROUP" {
for _, subField := range dctxField.Fields {
subColumns, err := r.convertField(&subField, tableName)
if err != nil {
return nil, err
}
columns = append(columns, subColumns...)
}
return columns, nil
}
// Convert single field
column := models.InitColumn(r.sanitizeName(dctxField.Name), tableName, "public")
// Map Clarion data types
dataType, length := r.mapDataType(dctxField.DataType, dctxField.Size)
column.Type = dataType
column.Length = length
// Check for auto-increment (identity)
for _, option := range dctxField.Options {
if option.Property == "IsIdentity" && option.PropertyValue == "1" {
column.AutoIncrement = true
column.NotNull = true
}
}
columns = append(columns, column)
return columns, nil
}
// mapDataType maps Clarion data types to SQL types
func (r *Reader) mapDataType(clarionType string, size int) (string, int) {
switch strings.ToUpper(clarionType) {
case "LONG":
if size == 8 {
return "bigint", 0
}
return "integer", 0
case "ULONG":
if size == 8 {
return "bigint", 0
}
return "integer", 0
case "SHORT":
return "smallint", 0
case "USHORT":
return "smallint", 0
case "BYTE":
return "smallint", 0
case "STRING":
if size > 0 {
return "varchar", size
}
return "text", 0
case "CSTRING":
if size > 0 {
// CSTRING includes null terminator, so subtract 1
length := size - 1
if length <= 0 {
length = 1
}
return "varchar", length
}
return "text", 0
case "PSTRING":
if size > 0 {
return "varchar", size
}
return "text", 0
case "DECIMAL":
return "decimal", 0
case "REAL":
return "real", 0
case "SREAL":
return "double precision", 0
case "DATE":
return "date", 0
case "TIME":
return "time", 0
case "BLOB":
return "bytea", 0
case "MEMO":
return "text", 0
case "BOOL", "BOOLEAN":
return "boolean", 0
default:
return "text", 0
}
}
// processKeys processes DCTX keys and converts them to indexes and primary keys
func (r *Reader) processKeys(dctxTable *DCTXTable, table *models.Table, fieldGuidMap map[string]string) error {
for _, dctxKey := range dctxTable.Keys {
err := r.convertKey(&dctxKey, table, fieldGuidMap)
if err != nil {
return fmt.Errorf("failed to convert key %s: %w", dctxKey.Name, err)
}
}
return nil
}
// convertKey converts a DCTX key to appropriate constraint/index
func (r *Reader) convertKey(dctxKey *DCTXKey, table *models.Table, fieldGuidMap map[string]string) error {
var columns []string
// Extract column names from key components
if len(dctxKey.Components) > 0 {
for _, component := range dctxKey.Components {
if fieldName, exists := fieldGuidMap[component.FieldId]; exists {
columns = append(columns, fieldName)
}
}
}
// If no columns found, try to infer
if len(columns) == 0 {
if dctxKey.Primary {
// Look for common primary key column patterns
for colName := range table.Columns {
colNameLower := strings.ToLower(colName)
if strings.HasPrefix(colNameLower, "rid_") || strings.HasSuffix(colNameLower, "id") {
columns = append(columns, colName)
break
}
}
}
// If still no columns, skip
if len(columns) == 0 {
return nil
}
}
// Handle primary key
if dctxKey.Primary {
// Create primary key constraint
constraint := models.InitConstraint(r.sanitizeName(dctxKey.Name), models.PrimaryKeyConstraint)
constraint.Table = table.Name
constraint.Schema = table.Schema
constraint.Columns = columns
table.Constraints[constraint.Name] = constraint
// Mark columns as NOT NULL
for _, colName := range columns {
if col, exists := table.Columns[colName]; exists {
col.NotNull = true
col.IsPrimaryKey = true
}
}
return nil
}
// Handle regular index
index := models.InitIndex(r.sanitizeName(dctxKey.Name))
index.Table = table.Name
index.Schema = table.Schema
index.Columns = columns
index.Unique = dctxKey.Unique
index.Type = "btree"
table.Indexes[index.Name] = index
return nil
}
// processRelations processes DCTX relations and creates foreign keys
func (r *Reader) processRelations(dctx *DCTXDictionary, schema *models.Schema, tableGuidMap map[string]string, keyGuidMap map[string]*DCTXKey, fieldGuidMaps map[string]map[string]string) error {
for _, relation := range dctx.Relations {
// Get table names from GUIDs
primaryTableName := tableGuidMap[relation.PrimaryTable]
foreignTableName := tableGuidMap[relation.ForeignTable]
if primaryTableName == "" || foreignTableName == "" {
continue
}
// Find tables
var primaryTable, foreignTable *models.Table
for _, table := range schema.Tables {
if table.Name == primaryTableName {
primaryTable = table
}
if table.Name == foreignTableName {
foreignTable = table
}
}
if primaryTable == nil || foreignTable == nil {
continue
}
var fkColumns, pkColumns []string
// Try to use explicit field mappings
if len(relation.ForeignMappings) > 0 && len(relation.PrimaryMappings) > 0 {
foreignFieldMap := fieldGuidMaps[foreignTableName]
primaryFieldMap := fieldGuidMaps[primaryTableName]
for _, mapping := range relation.ForeignMappings {
if fieldName, exists := foreignFieldMap[mapping.Field]; exists {
fkColumns = append(fkColumns, fieldName)
}
}
for _, mapping := range relation.PrimaryMappings {
if fieldName, exists := primaryFieldMap[mapping.Field]; exists {
pkColumns = append(pkColumns, fieldName)
}
}
}
// Validate columns exist
if len(fkColumns) == 0 || len(pkColumns) == 0 {
continue
}
allFkColumnsExist := true
for _, colName := range fkColumns {
if _, exists := foreignTable.Columns[colName]; !exists {
allFkColumnsExist = false
break
}
}
if !allFkColumnsExist {
continue
}
allPkColumnsExist := true
for _, colName := range pkColumns {
if _, exists := primaryTable.Columns[colName]; !exists {
allPkColumnsExist = false
break
}
}
if !allPkColumnsExist {
continue
}
// Create foreign key
fkName := r.sanitizeName(fmt.Sprintf("fk_%s_%s", foreignTableName, primaryTableName))
constraint := models.InitConstraint(fkName, models.ForeignKeyConstraint)
constraint.Table = foreignTableName
constraint.Schema = "public"
constraint.Columns = fkColumns
constraint.ReferencedTable = primaryTableName
constraint.ReferencedSchema = "public"
constraint.ReferencedColumns = pkColumns
constraint.OnDelete = r.mapReferentialAction(relation.Delete)
constraint.OnUpdate = r.mapReferentialAction(relation.Update)
foreignTable.Constraints[fkName] = constraint
// Create relationship
relationshipName := fmt.Sprintf("%s_to_%s", foreignTableName, primaryTableName)
relationship := models.InitRelationship(relationshipName, models.OneToMany)
relationship.FromTable = primaryTableName
relationship.FromSchema = "public"
relationship.ToTable = foreignTableName
relationship.ToSchema = "public"
relationship.ForeignKey = fkName
relationship.Properties["on_delete"] = constraint.OnDelete
relationship.Properties["on_update"] = constraint.OnUpdate
foreignTable.Relationships[relationshipName] = relationship
}
return nil
}
// mapReferentialAction maps DCTX referential actions to SQL syntax
func (r *Reader) mapReferentialAction(action string) string {
switch strings.ToUpper(action) {
case "RESTRICT", "RESTRICT_SERVER":
return "RESTRICT"
case "CASCADE", "CASCADE_SERVER":
return "CASCADE"
case "SET_NULL", "SET_NULL_SERVER":
return "SET NULL"
case "SET_DEFAULT", "SET_DEFAULT_SERVER":
return "SET DEFAULT"
case "NO_ACTION", "NO_ACTION_SERVER":
return "NO ACTION"
default:
return "RESTRICT"
}
}
// sanitizeName sanitizes a name to lowercase
func (r *Reader) sanitizeName(name string) string {
return strings.ToLower(name)
}

84
pkg/readers/dctx/types.go Normal file
View File

@@ -0,0 +1,84 @@
package dctx
import "encoding/xml"
// DCTXDictionary represents the root element of a DCTX file
type DCTXDictionary struct {
XMLName xml.Name `xml:"Dictionary"`
Name string `xml:"Name,attr"`
Version string `xml:"Version,attr"`
Tables []DCTXTable `xml:"Table"`
Relations []DCTXRelation `xml:"Relation"`
}
// DCTXTable represents a table definition in DCTX
type DCTXTable struct {
Guid string `xml:"Guid,attr"`
Name string `xml:"Name,attr"`
Prefix string `xml:"Prefix,attr"`
Driver string `xml:"Driver,attr"`
Owner string `xml:"Owner,attr"`
Path string `xml:"Path,attr"`
Description string `xml:"Description,attr"`
Fields []DCTXField `xml:"Field"`
Keys []DCTXKey `xml:"Key"`
Options []DCTXOption `xml:"Option"`
}
// DCTXField represents a field/column definition in DCTX
type DCTXField struct {
Guid string `xml:"Guid,attr"`
Name string `xml:"Name,attr"`
DataType string `xml:"DataType,attr"`
Size int `xml:"Size,attr"`
NoPopulate bool `xml:"NoPopulate,attr"`
Thread bool `xml:"Thread,attr"`
Fields []DCTXField `xml:"Field"` // For GROUP fields (nested structures)
Options []DCTXOption `xml:"Option"`
}
// DCTXKey represents an index or key definition in DCTX
type DCTXKey struct {
Guid string `xml:"Guid,attr"`
Name string `xml:"Name,attr"`
KeyType string `xml:"KeyType,attr"`
Primary bool `xml:"Primary,attr"`
Unique bool `xml:"Unique,attr"`
Order int `xml:"Order,attr"`
Description string `xml:"Description,attr"`
Components []DCTXComponent `xml:"Component"`
}
// DCTXComponent represents a component of a key (field reference)
type DCTXComponent struct {
Guid string `xml:"Guid,attr"`
FieldId string `xml:"FieldId,attr"`
Order int `xml:"Order,attr"`
Ascend bool `xml:"Ascend,attr"`
}
// DCTXOption represents a property option in DCTX
type DCTXOption struct {
Property string `xml:"Property,attr"`
PropertyType string `xml:"PropertyType,attr"`
PropertyValue string `xml:"PropertyValue,attr"`
}
// DCTXRelation represents a relationship/foreign key in DCTX
type DCTXRelation struct {
Guid string `xml:"Guid,attr"`
PrimaryTable string `xml:"PrimaryTable,attr"`
ForeignTable string `xml:"ForeignTable,attr"`
PrimaryKey string `xml:"PrimaryKey,attr"`
ForeignKey string `xml:"ForeignKey,attr"`
Delete string `xml:"Delete,attr"`
Update string `xml:"Update,attr"`
ForeignMappings []DCTXFieldMapping `xml:"ForeignMapping"`
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping"`
}
// DCTXFieldMapping represents a field mapping in a relation
type DCTXFieldMapping struct {
Guid string `xml:"Guid,attr"`
Field string `xml:"Field,attr"`
}

View File

@@ -0,0 +1,304 @@
package drawdb
import (
"encoding/json"
"fmt"
"os"
"strconv"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
)
// Reader implements the readers.Reader interface for DrawDB JSON format
type Reader struct {
options *readers.ReaderOptions
}
// NewReader creates a new DrawDB reader with the given options
func NewReader(options *readers.ReaderOptions) *Reader {
return &Reader{
options: options,
}
}
// ReadDatabase reads and parses DrawDB JSON input, returning a Database model
func (r *Reader) ReadDatabase() (*models.Database, error) {
if r.options.FilePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB reader")
}
data, err := os.ReadFile(r.options.FilePath)
if err != nil {
return nil, fmt.Errorf("failed to read file: %w", err)
}
var drawSchema drawdb.DrawDBSchema
if err := json.Unmarshal(data, &drawSchema); err != nil {
return nil, fmt.Errorf("failed to parse DrawDB JSON: %w", err)
}
return r.convertToDatabase(&drawSchema)
}
// ReadSchema reads and parses DrawDB JSON input, returning a Schema model
func (r *Reader) ReadSchema() (*models.Schema, error) {
if r.options.FilePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB reader")
}
data, err := os.ReadFile(r.options.FilePath)
if err != nil {
return nil, fmt.Errorf("failed to read file: %w", err)
}
var drawSchema drawdb.DrawDBSchema
if err := json.Unmarshal(data, &drawSchema); err != nil {
return nil, fmt.Errorf("failed to parse DrawDB JSON: %w", err)
}
return r.convertToSchema(&drawSchema, "default")
}
// ReadTable reads and parses DrawDB JSON input, returning a Table model
func (r *Reader) ReadTable() (*models.Table, error) {
if r.options.FilePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB reader")
}
data, err := os.ReadFile(r.options.FilePath)
if err != nil {
return nil, fmt.Errorf("failed to read file: %w", err)
}
var drawSchema drawdb.DrawDBSchema
if err := json.Unmarshal(data, &drawSchema); err != nil {
return nil, fmt.Errorf("failed to parse DrawDB JSON: %w", err)
}
if len(drawSchema.Tables) == 0 {
return nil, fmt.Errorf("no tables found in DrawDB JSON")
}
// Return the first table
return r.convertToTable(drawSchema.Tables[0], &drawSchema)
}
// convertToDatabase converts a DrawDB schema to a Database model
func (r *Reader) convertToDatabase(drawSchema *drawdb.DrawDBSchema) (*models.Database, error) {
db := models.InitDatabase("database")
if r.options.Metadata != nil {
if name, ok := r.options.Metadata["name"].(string); ok {
db.Name = name
}
}
// Extract database info from notes
for _, note := range drawSchema.Notes {
if strings.HasPrefix(note.Content, "Database:") {
parts := strings.SplitN(note.Content, "\n\n", 2)
if len(parts) == 2 {
db.Description = parts[1]
}
}
}
// Group tables by schema
schemaMap := make(map[string]*models.Schema)
for _, drawTable := range drawSchema.Tables {
schemaName := drawTable.Schema
if schemaName == "" {
schemaName = "public"
}
schema, exists := schemaMap[schemaName]
if !exists {
schema = models.InitSchema(schemaName)
schemaMap[schemaName] = schema
}
table, err := r.convertToTable(drawTable, drawSchema)
if err != nil {
return nil, fmt.Errorf("failed to convert table %s: %w", drawTable.Name, err)
}
schema.Tables = append(schema.Tables, table)
}
// Add schemas to database
for _, schema := range schemaMap {
db.Schemas = append(db.Schemas, schema)
}
return db, nil
}
// convertToSchema converts DrawDB tables to a Schema model
func (r *Reader) convertToSchema(drawSchema *drawdb.DrawDBSchema, schemaName string) (*models.Schema, error) {
schema := models.InitSchema(schemaName)
for _, drawTable := range drawSchema.Tables {
// Filter by schema if specified in the table
if drawTable.Schema != "" && drawTable.Schema != schemaName {
continue
}
table, err := r.convertToTable(drawTable, drawSchema)
if err != nil {
return nil, fmt.Errorf("failed to convert table %s: %w", drawTable.Name, err)
}
schema.Tables = append(schema.Tables, table)
}
return schema, nil
}
// convertToTable converts a DrawDB table to a Table model
func (r *Reader) convertToTable(drawTable *drawdb.DrawDBTable, drawSchema *drawdb.DrawDBSchema) (*models.Table, error) {
schemaName := drawTable.Schema
if schemaName == "" {
schemaName = "public"
}
table := models.InitTable(drawTable.Name, schemaName)
table.Description = drawTable.Comment
// Convert fields to columns
for _, field := range drawTable.Fields {
column := r.convertToColumn(field, drawTable.Name, schemaName)
table.Columns[column.Name] = column
}
// Convert indexes
for _, index := range drawTable.Indexes {
idx := r.convertToIndex(index, drawTable, schemaName)
table.Indexes[idx.Name] = idx
}
// Find and convert relationships/constraints for this table
for _, rel := range drawSchema.Relationships {
if rel.StartTableID == drawTable.ID {
constraint := r.convertToConstraint(rel, drawSchema)
if constraint != nil {
table.Constraints[constraint.Name] = constraint
}
}
}
return table, nil
}
// convertToColumn converts a DrawDB field to a Column model
func (r *Reader) convertToColumn(field *drawdb.DrawDBField, tableName, schemaName string) *models.Column {
column := models.InitColumn(field.Name, tableName, schemaName)
// Parse type and dimensions
typeStr := field.Type
column.Type = typeStr
// Try to extract length/precision from type string like "varchar(255)" or "decimal(10,2)"
if strings.Contains(typeStr, "(") {
parts := strings.Split(typeStr, "(")
column.Type = parts[0]
if len(parts) > 1 {
dimensions := strings.TrimSuffix(parts[1], ")")
if strings.Contains(dimensions, ",") {
// Precision and scale (e.g., decimal(10,2))
dims := strings.Split(dimensions, ",")
if precision, err := strconv.Atoi(strings.TrimSpace(dims[0])); err == nil {
column.Precision = precision
}
if len(dims) > 1 {
if scale, err := strconv.Atoi(strings.TrimSpace(dims[1])); err == nil {
column.Scale = scale
}
}
} else {
// Just length (e.g., varchar(255))
if length, err := strconv.Atoi(dimensions); err == nil {
column.Length = length
}
}
}
}
column.IsPrimaryKey = field.Primary
column.NotNull = field.NotNull || field.Primary
column.AutoIncrement = field.Increment
column.Comment = field.Comment
if field.Default != "" {
column.Default = field.Default
}
return column
}
// convertToIndex converts a DrawDB index to an Index model
func (r *Reader) convertToIndex(drawIndex *drawdb.DrawDBIndex, drawTable *drawdb.DrawDBTable, schemaName string) *models.Index {
index := models.InitIndex(drawIndex.Name)
index.Table = drawTable.Name
index.Schema = schemaName
index.Unique = drawIndex.Unique
// Convert field IDs to column names
for _, fieldID := range drawIndex.Fields {
if fieldID >= 0 && fieldID < len(drawTable.Fields) {
index.Columns = append(index.Columns, drawTable.Fields[fieldID].Name)
}
}
return index
}
// convertToConstraint converts a DrawDB relationship to a Constraint model
func (r *Reader) convertToConstraint(rel *drawdb.DrawDBRelationship, drawSchema *drawdb.DrawDBSchema) *models.Constraint {
// Find the start and end tables
var startTable, endTable *drawdb.DrawDBTable
for _, table := range drawSchema.Tables {
if table.ID == rel.StartTableID {
startTable = table
}
if table.ID == rel.EndTableID {
endTable = table
}
}
if startTable == nil || endTable == nil {
return nil
}
constraint := models.InitConstraint(rel.Name, models.ForeignKeyConstraint)
// Get the column names from field IDs
if rel.StartFieldID >= 0 && rel.StartFieldID < len(startTable.Fields) {
constraint.Columns = append(constraint.Columns, startTable.Fields[rel.StartFieldID].Name)
}
if rel.EndFieldID >= 0 && rel.EndFieldID < len(endTable.Fields) {
constraint.ReferencedColumns = append(constraint.ReferencedColumns, endTable.Fields[rel.EndFieldID].Name)
}
constraint.Table = startTable.Name
constraint.Schema = startTable.Schema
if constraint.Schema == "" {
constraint.Schema = "public"
}
constraint.ReferencedTable = endTable.Name
constraint.ReferencedSchema = endTable.Schema
if constraint.ReferencedSchema == "" {
constraint.ReferencedSchema = "public"
}
constraint.OnUpdate = rel.UpdateConstraint
constraint.OnDelete = rel.DeleteConstraint
return constraint
}

View File

@@ -5,10 +5,16 @@ import (
)
// Reader defines the interface for reading database specifications
// from various input formats
// from various input formats at different granularity levels
type Reader interface {
// Read reads and parses the input, returning a Database model
Read() (*models.Database, error)
// ReadDatabase reads and parses the input, returning a Database model
ReadDatabase() (*models.Database, error)
// ReadSchema reads and parses the input, returning a Schema model
ReadSchema() (*models.Schema, error)
// ReadTable reads and parses the input, returning a Table model
ReadTable() (*models.Table, error)
}
// ReaderOptions contains common options for readers