Files
relspecgo/pkg/readers/dctx/reader.go
Hein 7c7054d2e2
Some checks are pending
CI / Test (1.23) (push) Waiting to run
CI / Test (1.24) (push) Waiting to run
CI / Test (1.25) (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Build (push) Waiting to run
So far so good
2025-12-16 18:10:40 +02:00

487 lines
12 KiB
Go

package dctx
import (
"encoding/xml"
"fmt"
"os"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
// Reader implements the readers.Reader interface for DCTX format
type Reader struct {
options *readers.ReaderOptions
}
// NewReader creates a new DCTX reader with the given options
func NewReader(options *readers.ReaderOptions) *Reader {
return &Reader{
options: options,
}
}
// ReadDatabase reads and parses DCTX input, returning a Database model
func (r *Reader) ReadDatabase() (*models.Database, error) {
if r.options.FilePath == "" {
return nil, fmt.Errorf("file path is required for DCTX reader")
}
data, err := os.ReadFile(r.options.FilePath)
if err != nil {
return nil, fmt.Errorf("failed to read file: %w", err)
}
var dctx DCTXDictionary
if err := xml.Unmarshal(data, &dctx); err != nil {
return nil, fmt.Errorf("failed to parse DCTX XML: %w", err)
}
return r.convertToDatabase(&dctx)
}
// ReadSchema reads and parses DCTX input, returning a Schema model
func (r *Reader) ReadSchema() (*models.Schema, error) {
db, err := r.ReadDatabase()
if err != nil {
return nil, err
}
if len(db.Schemas) == 0 {
return nil, fmt.Errorf("no schemas found in DCTX")
}
return db.Schemas[0], nil
}
// ReadTable reads and parses DCTX input, returning a Table model
func (r *Reader) ReadTable() (*models.Table, error) {
schema, err := r.ReadSchema()
if err != nil {
return nil, err
}
if len(schema.Tables) == 0 {
return nil, fmt.Errorf("no tables found in DCTX")
}
return schema.Tables[0], nil
}
// convertToDatabase converts a DCTX dictionary to a Database model
func (r *Reader) convertToDatabase(dctx *DCTXDictionary) (*models.Database, error) {
dbName := dctx.Name
if dbName == "" {
dbName = "database"
}
db := models.InitDatabase(dbName)
schema := models.InitSchema("public")
// Create GUID mappings for tables and keys
tableGuidMap := make(map[string]string) // GUID -> table name
keyGuidMap := make(map[string]*DCTXKey) // GUID -> key definition
keyTableMap := make(map[string]string) // key GUID -> table name
fieldGuidMaps := make(map[string]map[string]string) // table name -> field GUID -> field name
// First pass: build GUID mappings
for _, dctxTable := range dctx.Tables {
if !r.hasSQLOption(&dctxTable) {
continue
}
tableName := r.sanitizeName(dctxTable.Name)
tableGuidMap[dctxTable.Guid] = tableName
// Map keys to their table
for _, dctxKey := range dctxTable.Keys {
keyGuidMap[dctxKey.Guid] = &dctxKey
keyTableMap[dctxKey.Guid] = tableName
}
}
// Process tables - only include tables with SQL option enabled
for _, dctxTable := range dctx.Tables {
if !r.hasSQLOption(&dctxTable) {
continue
}
table, fieldGuidMap, err := r.convertTable(&dctxTable)
if err != nil {
return nil, fmt.Errorf("failed to convert table %s: %w", dctxTable.Name, err)
}
fieldGuidMaps[table.Name] = fieldGuidMap
schema.Tables = append(schema.Tables, table)
// Process keys (indexes, primary keys)
err = r.processKeys(&dctxTable, table, fieldGuidMap)
if err != nil {
return nil, fmt.Errorf("failed to process keys for table %s: %w", dctxTable.Name, err)
}
}
// Process relations
err := r.processRelations(dctx, schema, tableGuidMap, keyGuidMap, fieldGuidMaps)
if err != nil {
return nil, fmt.Errorf("failed to process relations: %w", err)
}
db.Schemas = append(db.Schemas, schema)
return db, nil
}
// hasSQLOption checks if a DCTX table has the SQL option set to "1"
func (r *Reader) hasSQLOption(dctxTable *DCTXTable) bool {
for _, option := range dctxTable.Options {
if option.Property == "SQL" && option.PropertyValue == "1" {
return true
}
}
return false
}
// convertTable converts a DCTX table to a Table model
func (r *Reader) convertTable(dctxTable *DCTXTable) (*models.Table, map[string]string, error) {
tableName := r.sanitizeName(dctxTable.Name)
table := models.InitTable(tableName, "public")
table.Description = dctxTable.Description
fieldGuidMap := make(map[string]string)
// Process fields
for _, dctxField := range dctxTable.Fields {
// Store GUID to name mapping
if dctxField.Guid != "" && dctxField.Name != "" {
fieldGuidMap[dctxField.Guid] = r.sanitizeName(dctxField.Name)
}
columns, err := r.convertField(&dctxField, table.Name)
if err != nil {
return nil, nil, fmt.Errorf("failed to convert field %s: %w", dctxField.Name, err)
}
// Add all columns
for _, column := range columns {
table.Columns[column.Name] = column
}
}
return table, fieldGuidMap, nil
}
// convertField converts a DCTX field to Column(s)
func (r *Reader) convertField(dctxField *DCTXField, tableName string) ([]*models.Column, error) {
var columns []*models.Column
// Handle GROUP fields (nested structures)
if dctxField.DataType == "GROUP" {
for _, subField := range dctxField.Fields {
subColumns, err := r.convertField(&subField, tableName)
if err != nil {
return nil, err
}
columns = append(columns, subColumns...)
}
return columns, nil
}
// Convert single field
column := models.InitColumn(r.sanitizeName(dctxField.Name), tableName, "public")
// Map Clarion data types
dataType, length := r.mapDataType(dctxField.DataType, dctxField.Size)
column.Type = dataType
column.Length = length
// Check for auto-increment (identity)
for _, option := range dctxField.Options {
if option.Property == "IsIdentity" && option.PropertyValue == "1" {
column.AutoIncrement = true
column.NotNull = true
}
}
columns = append(columns, column)
return columns, nil
}
// mapDataType maps Clarion data types to SQL types
func (r *Reader) mapDataType(clarionType string, size int) (string, int) {
switch strings.ToUpper(clarionType) {
case "LONG":
if size == 8 {
return "bigint", 0
}
return "integer", 0
case "ULONG":
if size == 8 {
return "bigint", 0
}
return "integer", 0
case "SHORT":
return "smallint", 0
case "USHORT":
return "smallint", 0
case "BYTE":
return "smallint", 0
case "STRING":
if size > 0 {
return "varchar", size
}
return "text", 0
case "CSTRING":
if size > 0 {
// CSTRING includes null terminator, so subtract 1
length := size - 1
if length <= 0 {
length = 1
}
return "varchar", length
}
return "text", 0
case "PSTRING":
if size > 0 {
return "varchar", size
}
return "text", 0
case "DECIMAL":
return "decimal", 0
case "REAL":
return "real", 0
case "SREAL":
return "double precision", 0
case "DATE":
return "date", 0
case "TIME":
return "time", 0
case "BLOB":
return "bytea", 0
case "MEMO":
return "text", 0
case "BOOL", "BOOLEAN":
return "boolean", 0
default:
return "text", 0
}
}
// processKeys processes DCTX keys and converts them to indexes and primary keys
func (r *Reader) processKeys(dctxTable *DCTXTable, table *models.Table, fieldGuidMap map[string]string) error {
for _, dctxKey := range dctxTable.Keys {
err := r.convertKey(&dctxKey, table, fieldGuidMap)
if err != nil {
return fmt.Errorf("failed to convert key %s: %w", dctxKey.Name, err)
}
}
return nil
}
// convertKey converts a DCTX key to appropriate constraint/index
func (r *Reader) convertKey(dctxKey *DCTXKey, table *models.Table, fieldGuidMap map[string]string) error {
var columns []string
// Extract column names from key components
if len(dctxKey.Components) > 0 {
for _, component := range dctxKey.Components {
if fieldName, exists := fieldGuidMap[component.FieldId]; exists {
columns = append(columns, fieldName)
}
}
}
// If no columns found, try to infer
if len(columns) == 0 {
if dctxKey.Primary {
// Look for common primary key column patterns
for colName := range table.Columns {
colNameLower := strings.ToLower(colName)
if strings.HasPrefix(colNameLower, "rid_") || strings.HasSuffix(colNameLower, "id") {
columns = append(columns, colName)
break
}
}
}
// If still no columns, skip
if len(columns) == 0 {
return nil
}
}
// Handle primary key
if dctxKey.Primary {
// Create primary key constraint
constraint := models.InitConstraint(r.sanitizeName(dctxKey.Name), models.PrimaryKeyConstraint)
constraint.Table = table.Name
constraint.Schema = table.Schema
constraint.Columns = columns
table.Constraints[constraint.Name] = constraint
// Mark columns as NOT NULL
for _, colName := range columns {
if col, exists := table.Columns[colName]; exists {
col.NotNull = true
col.IsPrimaryKey = true
}
}
return nil
}
// Handle regular index
index := models.InitIndex(r.sanitizeName(dctxKey.Name))
index.Table = table.Name
index.Schema = table.Schema
index.Columns = columns
index.Unique = dctxKey.Unique
index.Type = "btree"
table.Indexes[index.Name] = index
return nil
}
// processRelations processes DCTX relations and creates foreign keys
func (r *Reader) processRelations(dctx *DCTXDictionary, schema *models.Schema, tableGuidMap map[string]string, keyGuidMap map[string]*DCTXKey, fieldGuidMaps map[string]map[string]string) error {
for _, relation := range dctx.Relations {
// Get table names from GUIDs
primaryTableName := tableGuidMap[relation.PrimaryTable]
foreignTableName := tableGuidMap[relation.ForeignTable]
if primaryTableName == "" || foreignTableName == "" {
continue
}
// Find tables
var primaryTable, foreignTable *models.Table
for _, table := range schema.Tables {
if table.Name == primaryTableName {
primaryTable = table
}
if table.Name == foreignTableName {
foreignTable = table
}
}
if primaryTable == nil || foreignTable == nil {
continue
}
var fkColumns, pkColumns []string
// Try to use explicit field mappings
if len(relation.ForeignMappings) > 0 && len(relation.PrimaryMappings) > 0 {
foreignFieldMap := fieldGuidMaps[foreignTableName]
primaryFieldMap := fieldGuidMaps[primaryTableName]
for _, mapping := range relation.ForeignMappings {
if fieldName, exists := foreignFieldMap[mapping.Field]; exists {
fkColumns = append(fkColumns, fieldName)
}
}
for _, mapping := range relation.PrimaryMappings {
if fieldName, exists := primaryFieldMap[mapping.Field]; exists {
pkColumns = append(pkColumns, fieldName)
}
}
}
// Validate columns exist
if len(fkColumns) == 0 || len(pkColumns) == 0 {
continue
}
allFkColumnsExist := true
for _, colName := range fkColumns {
if _, exists := foreignTable.Columns[colName]; !exists {
allFkColumnsExist = false
break
}
}
if !allFkColumnsExist {
continue
}
allPkColumnsExist := true
for _, colName := range pkColumns {
if _, exists := primaryTable.Columns[colName]; !exists {
allPkColumnsExist = false
break
}
}
if !allPkColumnsExist {
continue
}
// Create foreign key
fkName := r.sanitizeName(fmt.Sprintf("fk_%s_%s", foreignTableName, primaryTableName))
constraint := models.InitConstraint(fkName, models.ForeignKeyConstraint)
constraint.Table = foreignTableName
constraint.Schema = "public"
constraint.Columns = fkColumns
constraint.ReferencedTable = primaryTableName
constraint.ReferencedSchema = "public"
constraint.ReferencedColumns = pkColumns
constraint.OnDelete = r.mapReferentialAction(relation.Delete)
constraint.OnUpdate = r.mapReferentialAction(relation.Update)
foreignTable.Constraints[fkName] = constraint
// Create relationship
relationshipName := fmt.Sprintf("%s_to_%s", foreignTableName, primaryTableName)
relationship := models.InitRelationship(relationshipName, models.OneToMany)
relationship.FromTable = primaryTableName
relationship.FromSchema = "public"
relationship.ToTable = foreignTableName
relationship.ToSchema = "public"
relationship.ForeignKey = fkName
relationship.Properties["on_delete"] = constraint.OnDelete
relationship.Properties["on_update"] = constraint.OnUpdate
foreignTable.Relationships[relationshipName] = relationship
}
return nil
}
// mapReferentialAction maps DCTX referential actions to SQL syntax
func (r *Reader) mapReferentialAction(action string) string {
switch strings.ToUpper(action) {
case "RESTRICT", "RESTRICT_SERVER":
return "RESTRICT"
case "CASCADE", "CASCADE_SERVER":
return "CASCADE"
case "SET_NULL", "SET_NULL_SERVER":
return "SET NULL"
case "SET_DEFAULT", "SET_DEFAULT_SERVER":
return "SET DEFAULT"
case "NO_ACTION", "NO_ACTION_SERVER":
return "NO ACTION"
default:
return "RESTRICT"
}
}
// sanitizeName sanitizes a name to lowercase
func (r *Reader) sanitizeName(name string) string {
return strings.ToLower(name)
}