601 lines
16 KiB
Go
601 lines
16 KiB
Go
package pgsql
|
|
|
|
import (
|
|
"regexp"
|
|
"strings"
|
|
|
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
|
)
|
|
|
|
// querySchemas retrieves all non-system schemas from the database
|
|
func (r *Reader) querySchemas() ([]*models.Schema, error) {
|
|
query := `
|
|
SELECT
|
|
nspname as schema_name,
|
|
obj_description(oid, 'pg_namespace') as description
|
|
FROM pg_namespace
|
|
WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
|
AND nspname NOT LIKE 'pg_temp_%'
|
|
AND nspname NOT LIKE 'pg_toast_temp_%'
|
|
ORDER BY nspname
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
schemas := make([]*models.Schema, 0)
|
|
for rows.Next() {
|
|
var name string
|
|
var description *string
|
|
|
|
if err := rows.Scan(&name, &description); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
schema := models.InitSchema(name)
|
|
if description != nil {
|
|
schema.Description = *description
|
|
}
|
|
|
|
schemas = append(schemas, schema)
|
|
}
|
|
|
|
return schemas, rows.Err()
|
|
}
|
|
|
|
// queryTables retrieves all tables for a given schema
|
|
func (r *Reader) queryTables(schemaName string) ([]*models.Table, error) {
|
|
query := `
|
|
SELECT
|
|
schemaname,
|
|
tablename,
|
|
obj_description((schemaname||'.'||tablename)::regclass, 'pg_class') as description
|
|
FROM pg_tables
|
|
WHERE schemaname = $1
|
|
ORDER BY tablename
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
tables := make([]*models.Table, 0)
|
|
for rows.Next() {
|
|
var schema, tableName string
|
|
var description *string
|
|
|
|
if err := rows.Scan(&schema, &tableName, &description); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
table := models.InitTable(tableName, schema)
|
|
if description != nil {
|
|
table.Description = *description
|
|
}
|
|
|
|
tables = append(tables, table)
|
|
}
|
|
|
|
return tables, rows.Err()
|
|
}
|
|
|
|
// queryViews retrieves all views for a given schema
|
|
func (r *Reader) queryViews(schemaName string) ([]*models.View, error) {
|
|
query := `
|
|
SELECT
|
|
schemaname,
|
|
viewname,
|
|
definition,
|
|
obj_description((schemaname||'.'||viewname)::regclass, 'pg_class') as description
|
|
FROM pg_views
|
|
WHERE schemaname = $1
|
|
ORDER BY viewname
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
views := make([]*models.View, 0)
|
|
for rows.Next() {
|
|
var schema, viewName, definition string
|
|
var description *string
|
|
|
|
if err := rows.Scan(&schema, &viewName, &definition, &description); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
view := models.InitView(viewName, schema)
|
|
view.Definition = definition
|
|
if description != nil {
|
|
view.Description = *description
|
|
}
|
|
|
|
views = append(views, view)
|
|
}
|
|
|
|
return views, rows.Err()
|
|
}
|
|
|
|
// querySequences retrieves all sequences for a given schema
|
|
func (r *Reader) querySequences(schemaName string) ([]*models.Sequence, error) {
|
|
query := `
|
|
SELECT
|
|
ps.schemaname,
|
|
ps.sequencename,
|
|
ps.start_value,
|
|
ps.min_value,
|
|
ps.max_value,
|
|
ps.increment_by,
|
|
ps.cycle,
|
|
ps.cache_size,
|
|
obj_description((ps.schemaname||'.'||ps.sequencename)::regclass, 'pg_class') as description,
|
|
owner_table.relname as owned_by_table,
|
|
owner_column.attname as owned_by_column
|
|
FROM pg_sequences ps
|
|
LEFT JOIN pg_class seq_class ON seq_class.relname = ps.sequencename
|
|
AND seq_class.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = ps.schemaname)
|
|
LEFT JOIN pg_depend ON pg_depend.objid = seq_class.oid AND pg_depend.deptype = 'a'
|
|
LEFT JOIN pg_class owner_table ON pg_depend.refobjid = owner_table.oid
|
|
LEFT JOIN pg_attribute owner_column ON pg_depend.refobjid = owner_column.attrelid
|
|
AND pg_depend.refobjsubid = owner_column.attnum
|
|
WHERE ps.schemaname = $1
|
|
ORDER BY ps.sequencename
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
sequences := make([]*models.Sequence, 0)
|
|
for rows.Next() {
|
|
var schema, seqName string
|
|
var startValue, minValue, maxValue, incrementBy, cacheSize int64
|
|
var cycle bool
|
|
var description, tableName, columnName *string
|
|
|
|
if err := rows.Scan(&schema, &seqName, &startValue, &minValue, &maxValue, &incrementBy, &cycle, &cacheSize, &description, &tableName, &columnName); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
seq := models.InitSequence(seqName, schema)
|
|
seq.StartValue = startValue
|
|
seq.MinValue = minValue
|
|
seq.MaxValue = maxValue
|
|
seq.IncrementBy = incrementBy
|
|
seq.Cycle = cycle
|
|
seq.CacheSize = cacheSize
|
|
if description != nil {
|
|
seq.Description = *description
|
|
}
|
|
if tableName != nil {
|
|
seq.OwnedByTable = *tableName
|
|
}
|
|
if columnName != nil {
|
|
seq.OwnedByColumn = *columnName
|
|
}
|
|
|
|
sequences = append(sequences, seq)
|
|
}
|
|
|
|
return sequences, rows.Err()
|
|
}
|
|
|
|
// queryColumns retrieves all columns for tables and views in a schema
|
|
// Returns map[schema.table]map[columnName]*Column
|
|
func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.Column, error) {
|
|
query := `
|
|
SELECT
|
|
c.table_schema,
|
|
c.table_name,
|
|
c.column_name,
|
|
c.ordinal_position,
|
|
c.column_default,
|
|
c.is_nullable,
|
|
c.data_type,
|
|
c.character_maximum_length,
|
|
c.numeric_precision,
|
|
c.numeric_scale,
|
|
c.udt_name,
|
|
col_description((c.table_schema||'.'||c.table_name)::regclass, c.ordinal_position) as description
|
|
FROM information_schema.columns c
|
|
WHERE c.table_schema = $1
|
|
ORDER BY c.table_schema, c.table_name, c.ordinal_position
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
columnsMap := make(map[string]map[string]*models.Column)
|
|
|
|
for rows.Next() {
|
|
var schema, tableName, columnName, isNullable, dataType, udtName string
|
|
var ordinalPosition int
|
|
var columnDefault, description *string
|
|
var charMaxLength, numPrecision, numScale *int
|
|
|
|
if err := rows.Scan(&schema, &tableName, &columnName, &ordinalPosition, &columnDefault, &isNullable, &dataType, &charMaxLength, &numPrecision, &numScale, &udtName, &description); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
column := models.InitColumn(columnName, tableName, schema)
|
|
column.Type = r.mapDataType(dataType, udtName)
|
|
column.NotNull = (isNullable == "NO")
|
|
column.Sequence = uint(ordinalPosition)
|
|
|
|
if columnDefault != nil {
|
|
// Parse default value - remove nextval for sequences
|
|
defaultVal := *columnDefault
|
|
if strings.HasPrefix(defaultVal, "nextval") {
|
|
column.AutoIncrement = true
|
|
column.Default = defaultVal
|
|
} else {
|
|
column.Default = defaultVal
|
|
}
|
|
}
|
|
|
|
if description != nil {
|
|
column.Description = *description
|
|
}
|
|
|
|
if charMaxLength != nil {
|
|
column.Length = *charMaxLength
|
|
}
|
|
|
|
if numPrecision != nil {
|
|
column.Precision = *numPrecision
|
|
}
|
|
|
|
if numScale != nil {
|
|
column.Scale = *numScale
|
|
}
|
|
|
|
// Create table key
|
|
tableKey := schema + "." + tableName
|
|
if columnsMap[tableKey] == nil {
|
|
columnsMap[tableKey] = make(map[string]*models.Column)
|
|
}
|
|
columnsMap[tableKey][columnName] = column
|
|
}
|
|
|
|
return columnsMap, rows.Err()
|
|
}
|
|
|
|
// queryPrimaryKeys retrieves all primary key constraints for a schema
|
|
// Returns map[schema.table]*Constraint
|
|
func (r *Reader) queryPrimaryKeys(schemaName string) (map[string]*models.Constraint, error) {
|
|
query := `
|
|
SELECT
|
|
tc.table_schema,
|
|
tc.table_name,
|
|
tc.constraint_name,
|
|
array_agg(kcu.column_name ORDER BY kcu.ordinal_position) as columns
|
|
FROM information_schema.table_constraints tc
|
|
JOIN information_schema.key_column_usage kcu
|
|
ON tc.constraint_name = kcu.constraint_name
|
|
AND tc.table_schema = kcu.table_schema
|
|
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
AND tc.table_schema = $1
|
|
GROUP BY tc.table_schema, tc.table_name, tc.constraint_name
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
primaryKeys := make(map[string]*models.Constraint)
|
|
|
|
for rows.Next() {
|
|
var schema, tableName, constraintName string
|
|
var columns []string
|
|
|
|
if err := rows.Scan(&schema, &tableName, &constraintName, &columns); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
constraint := models.InitConstraint(constraintName, models.PrimaryKeyConstraint)
|
|
constraint.Schema = schema
|
|
constraint.Table = tableName
|
|
constraint.Columns = columns
|
|
|
|
tableKey := schema + "." + tableName
|
|
primaryKeys[tableKey] = constraint
|
|
}
|
|
|
|
return primaryKeys, rows.Err()
|
|
}
|
|
|
|
// queryForeignKeys retrieves all foreign key constraints for a schema
|
|
// Returns map[schema.table][]*Constraint
|
|
func (r *Reader) queryForeignKeys(schemaName string) (map[string][]*models.Constraint, error) {
|
|
query := `
|
|
SELECT
|
|
tc.table_schema,
|
|
tc.table_name,
|
|
tc.constraint_name,
|
|
kcu.table_schema as foreign_table_schema,
|
|
kcu.table_name as foreign_table_name,
|
|
kcu.column_name as foreign_column,
|
|
ccu.table_schema as referenced_table_schema,
|
|
ccu.table_name as referenced_table_name,
|
|
ccu.column_name as referenced_column,
|
|
rc.update_rule,
|
|
rc.delete_rule
|
|
FROM information_schema.table_constraints tc
|
|
JOIN information_schema.key_column_usage kcu
|
|
ON tc.constraint_name = kcu.constraint_name
|
|
AND tc.table_schema = kcu.table_schema
|
|
JOIN information_schema.constraint_column_usage ccu
|
|
ON ccu.constraint_name = tc.constraint_name
|
|
JOIN information_schema.referential_constraints rc
|
|
ON rc.constraint_name = tc.constraint_name
|
|
AND rc.constraint_schema = tc.table_schema
|
|
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
AND tc.table_schema = $1
|
|
ORDER BY tc.table_schema, tc.table_name, tc.constraint_name, kcu.ordinal_position
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
// First pass: collect all FK data
|
|
type fkData struct {
|
|
schema string
|
|
tableName string
|
|
constraintName string
|
|
foreignColumns []string
|
|
referencedSchema string
|
|
referencedTable string
|
|
referencedColumns []string
|
|
updateRule string
|
|
deleteRule string
|
|
}
|
|
|
|
fkMap := make(map[string]*fkData)
|
|
|
|
for rows.Next() {
|
|
var schema, tableName, constraintName string
|
|
var foreignSchema, foreignTable, foreignColumn string
|
|
var referencedSchema, referencedTable, referencedColumn string
|
|
var updateRule, deleteRule string
|
|
|
|
if err := rows.Scan(&schema, &tableName, &constraintName, &foreignSchema, &foreignTable, &foreignColumn, &referencedSchema, &referencedTable, &referencedColumn, &updateRule, &deleteRule); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
key := schema + "." + tableName + "." + constraintName
|
|
|
|
if _, exists := fkMap[key]; !exists {
|
|
fkMap[key] = &fkData{
|
|
schema: schema,
|
|
tableName: tableName,
|
|
constraintName: constraintName,
|
|
foreignColumns: []string{},
|
|
referencedSchema: referencedSchema,
|
|
referencedTable: referencedTable,
|
|
referencedColumns: []string{},
|
|
updateRule: updateRule,
|
|
deleteRule: deleteRule,
|
|
}
|
|
}
|
|
|
|
fkMap[key].foreignColumns = append(fkMap[key].foreignColumns, foreignColumn)
|
|
fkMap[key].referencedColumns = append(fkMap[key].referencedColumns, referencedColumn)
|
|
}
|
|
|
|
// Second pass: create constraints
|
|
foreignKeys := make(map[string][]*models.Constraint)
|
|
|
|
for _, fk := range fkMap {
|
|
constraint := models.InitConstraint(fk.constraintName, models.ForeignKeyConstraint)
|
|
constraint.Schema = fk.schema
|
|
constraint.Table = fk.tableName
|
|
constraint.Columns = fk.foreignColumns
|
|
constraint.ReferencedSchema = fk.referencedSchema
|
|
constraint.ReferencedTable = fk.referencedTable
|
|
constraint.ReferencedColumns = fk.referencedColumns
|
|
constraint.OnUpdate = fk.updateRule
|
|
constraint.OnDelete = fk.deleteRule
|
|
|
|
tableKey := fk.schema + "." + fk.tableName
|
|
foreignKeys[tableKey] = append(foreignKeys[tableKey], constraint)
|
|
}
|
|
|
|
return foreignKeys, rows.Err()
|
|
}
|
|
|
|
// queryUniqueConstraints retrieves all unique constraints for a schema
|
|
// Returns map[schema.table][]*Constraint
|
|
func (r *Reader) queryUniqueConstraints(schemaName string) (map[string][]*models.Constraint, error) {
|
|
query := `
|
|
SELECT
|
|
tc.table_schema,
|
|
tc.table_name,
|
|
tc.constraint_name,
|
|
array_agg(kcu.column_name ORDER BY kcu.ordinal_position) as columns
|
|
FROM information_schema.table_constraints tc
|
|
JOIN information_schema.key_column_usage kcu
|
|
ON tc.constraint_name = kcu.constraint_name
|
|
AND tc.table_schema = kcu.table_schema
|
|
WHERE tc.constraint_type = 'UNIQUE'
|
|
AND tc.table_schema = $1
|
|
GROUP BY tc.table_schema, tc.table_name, tc.constraint_name
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
uniqueConstraints := make(map[string][]*models.Constraint)
|
|
|
|
for rows.Next() {
|
|
var schema, tableName, constraintName string
|
|
var columns []string
|
|
|
|
if err := rows.Scan(&schema, &tableName, &constraintName, &columns); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
constraint := models.InitConstraint(constraintName, models.UniqueConstraint)
|
|
constraint.Schema = schema
|
|
constraint.Table = tableName
|
|
constraint.Columns = columns
|
|
|
|
tableKey := schema + "." + tableName
|
|
uniqueConstraints[tableKey] = append(uniqueConstraints[tableKey], constraint)
|
|
}
|
|
|
|
return uniqueConstraints, rows.Err()
|
|
}
|
|
|
|
// queryCheckConstraints retrieves all check constraints for a schema
|
|
// Returns map[schema.table][]*Constraint
|
|
func (r *Reader) queryCheckConstraints(schemaName string) (map[string][]*models.Constraint, error) {
|
|
query := `
|
|
SELECT
|
|
tc.table_schema,
|
|
tc.table_name,
|
|
tc.constraint_name,
|
|
cc.check_clause
|
|
FROM information_schema.table_constraints tc
|
|
JOIN information_schema.check_constraints cc
|
|
ON tc.constraint_name = cc.constraint_name
|
|
WHERE tc.constraint_type = 'CHECK'
|
|
AND tc.table_schema = $1
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
checkConstraints := make(map[string][]*models.Constraint)
|
|
|
|
for rows.Next() {
|
|
var schema, tableName, constraintName, checkClause string
|
|
|
|
if err := rows.Scan(&schema, &tableName, &constraintName, &checkClause); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
constraint := models.InitConstraint(constraintName, models.CheckConstraint)
|
|
constraint.Schema = schema
|
|
constraint.Table = tableName
|
|
constraint.Expression = checkClause
|
|
|
|
tableKey := schema + "." + tableName
|
|
checkConstraints[tableKey] = append(checkConstraints[tableKey], constraint)
|
|
}
|
|
|
|
return checkConstraints, rows.Err()
|
|
}
|
|
|
|
// queryIndexes retrieves all indexes for a schema
|
|
// Returns map[schema.table][]*Index
|
|
func (r *Reader) queryIndexes(schemaName string) (map[string][]*models.Index, error) {
|
|
query := `
|
|
SELECT
|
|
schemaname,
|
|
tablename,
|
|
indexname,
|
|
indexdef
|
|
FROM pg_indexes
|
|
WHERE schemaname = $1
|
|
ORDER BY schemaname, tablename, indexname
|
|
`
|
|
|
|
rows, err := r.conn.Query(r.ctx, query, schemaName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
indexes := make(map[string][]*models.Index)
|
|
|
|
for rows.Next() {
|
|
var schema, tableName, indexName, indexDef string
|
|
|
|
if err := rows.Scan(&schema, &tableName, &indexName, &indexDef); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
index, err := r.parseIndexDefinition(indexName, tableName, schema, indexDef)
|
|
if err != nil {
|
|
// If parsing fails, create a basic index
|
|
index = models.InitIndex(indexName, tableName, schema)
|
|
index.Table = tableName
|
|
index.Schema = schema
|
|
}
|
|
|
|
tableKey := schema + "." + tableName
|
|
indexes[tableKey] = append(indexes[tableKey], index)
|
|
}
|
|
|
|
return indexes, rows.Err()
|
|
}
|
|
|
|
// parseIndexDefinition parses a PostgreSQL index definition
|
|
func (r *Reader) parseIndexDefinition(indexName, tableName, schema, indexDef string) (*models.Index, error) {
|
|
index := models.InitIndex(indexName, tableName, schema)
|
|
index.Table = tableName
|
|
index.Schema = schema
|
|
|
|
// Check if unique
|
|
if strings.Contains(strings.ToUpper(indexDef), "UNIQUE") {
|
|
index.Unique = true
|
|
}
|
|
|
|
// Extract index method (USING btree, hash, gin, gist, etc.)
|
|
usingRegex := regexp.MustCompile(`USING\s+(\w+)`)
|
|
if matches := usingRegex.FindStringSubmatch(indexDef); len(matches) > 1 {
|
|
index.Type = strings.ToLower(matches[1])
|
|
} else {
|
|
index.Type = "btree" // default
|
|
}
|
|
|
|
// Extract columns - pattern: (column1, column2, ...)
|
|
columnsRegex := regexp.MustCompile(`\(([^)]+)\)`)
|
|
if matches := columnsRegex.FindStringSubmatch(indexDef); len(matches) > 1 {
|
|
columnsStr := matches[1]
|
|
// Split by comma and clean up
|
|
columnParts := strings.Split(columnsStr, ",")
|
|
for _, col := range columnParts {
|
|
col = strings.TrimSpace(col)
|
|
// Remove any ordering (ASC/DESC) or other modifiers
|
|
col = strings.Fields(col)[0]
|
|
// Remove parentheses if it's an expression
|
|
if !strings.Contains(col, "(") {
|
|
index.Columns = append(index.Columns, col)
|
|
}
|
|
}
|
|
}
|
|
|
|
// Extract WHERE clause for partial indexes
|
|
whereRegex := regexp.MustCompile(`WHERE\s+(.+)$`)
|
|
if matches := whereRegex.FindStringSubmatch(indexDef); len(matches) > 1 {
|
|
index.Where = strings.TrimSpace(matches[1])
|
|
}
|
|
|
|
return index, nil
|
|
}
|