feature: Inspector Gadget
Some checks failed
CI / Test (1.24) (push) Successful in -25m44s
CI / Test (1.25) (push) Successful in -25m40s
CI / Build (push) Successful in -25m53s
CI / Lint (push) Successful in -25m45s
Integration Tests / Integration Tests (push) Failing after -26m2s

This commit is contained in:
2025-12-31 01:40:08 +02:00
parent adfe126758
commit 97a57f5dc8
12 changed files with 2762 additions and 12 deletions

View File

@@ -6,11 +6,13 @@ RelSpec is a comprehensive database relations management tool that reads, transf
## Overview
RelSpec provides bidirectional conversion and comparison between various database specification formats, allowing you to:
RelSpec provides bidirectional conversion, comparison, and validation of database specification formats, allowing you to:
- Inspect live databases and extract their structure
- Convert between different ORM models (GORM, Bun , etc.)
- Validate schemas against configurable rules and naming conventions
- Convert between different ORM models (GORM, Bun, etc.)
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
- Generate standardized specification files (JSON, YAML, etc.)
- Compare database schemas and track changes
![1.00](./assets/image/relspec1_512.jpg)
@@ -60,6 +62,19 @@ RelSpec can write database schemas to multiple formats:
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
### Inspector (Schema Validation)
RelSpec includes a powerful schema validation and linting tool:
- [Inspector](pkg/inspector/README.md) - Validate database schemas against configurable rules
- Enforce naming conventions (snake_case, camelCase, custom patterns)
- Check primary key and foreign key standards
- Detect missing indexes on foreign keys
- Prevent use of SQL reserved keywords
- Ensure schema integrity (missing PKs, orphaned FKs, circular dependencies)
- Support for custom validation rules
- Multiple output formats (Markdown with colors, JSON)
- CI/CD integration ready
## Use of AI
[Rules and use of AI](./AI_USE.md)
@@ -74,30 +89,65 @@ go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
## Usage
### Schema Conversion
```bash
# Inspect database and generate GORM models
relspec --input db --conn "postgres://..." --output gorm --out-file models.go
# Convert PostgreSQL database to GORM models
relspec convert --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
--to gorm --to-path models/ --package models
# Convert GORM models to Bun
relspec --input gorm --in-file existing.go --output bun --out-file bun_models.go
relspec convert --from gorm --from-path models.go \
--to bun --to-path bun_models.go --package models
# Export database schema to JSON
relspec --input db --conn "mysql://..." --output json --out-file schema.json
relspec convert --from pgsql --from-conn "postgres://..." \
--to json --to-path schema.json
# Convert Clarion DCTX to YAML
relspec --input dctx --in-file legacy.dctx --output yaml --out-file schema.yaml
# Convert DBML to PostgreSQL SQL
relspec convert --from dbml --from-path schema.dbml \
--to pgsql --to-path schema.sql
```
### Schema Validation
```bash
# Validate a PostgreSQL database with default rules
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
# Validate DBML file with custom rules
relspec inspect --from dbml --from-path schema.dbml --rules .relspec-rules.yaml
# Generate JSON validation report
relspec inspect --from json --from-path db.json \
--output-format json --output report.json
# Validate specific schema only
relspec inspect --from pgsql --from-conn "..." --schema public
```
### Schema Comparison
```bash
# Compare two database schemas
relspec diff --from pgsql --from-conn "postgres://localhost/db1" \
--to pgsql --to-conn "postgres://localhost/db2"
```
## Project Structure
```
relspecgo/
├── cmd/ # CLI application
├── cmd/
│ └── relspec/ # CLI application (convert, inspect, diff, scripts)
├── pkg/
│ ├── readers/ # Input format readers
│ ├── writers/ # Output format writers
│ ├── readers/ # Input format readers (DBML, GORM, PostgreSQL, etc.)
│ ├── writers/ # Output format writers (GORM, Bun, SQL, etc.)
│ ├── inspector/ # Schema validation and linting
│ ├── diff/ # Schema comparison
│ ├── models/ # Internal data models
── transform/ # Transformation logic
── transform/ # Transformation logic
│ └── pgsql/ # PostgreSQL utilities (keywords, data types)
├── examples/ # Usage examples
└── tests/ # Test files
```

View File

@@ -2,6 +2,7 @@
## Input Readers / Writers
- [✔️] **Database Inspector**
- [✔️] PostgreSQL driver
- [ ] MySQL driver

321
cmd/relspec/inspect.go Normal file
View File

@@ -0,0 +1,321 @@
package main
import (
"fmt"
"os"
"strings"
"github.com/spf13/cobra"
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
)
var (
inspectSourceType string
inspectSourcePath string
inspectSourceConn string
inspectRulesPath string
inspectOutputFormat string
inspectOutputPath string
inspectSchemaFilter string
)
var inspectCmd = &cobra.Command{
Use: "inspect",
Short: "Inspect and validate database schemas against rules",
Long: `Inspect database schemas from various formats and validate against configurable rules.
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
JSON, YAML, etc.) and generates validation reports.
Input formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- graphql: GraphQL schema files (.graphql, SDL)
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go, file or directory)
- bun: Bun model files (Go, file or directory)
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
- prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL database (live connection)
Output formats:
- markdown: Human-readable markdown report (default, with ANSI colors for terminal)
- json: JSON report for tooling integration
PostgreSQL Connection String Examples:
postgres://username:password@localhost:5432/database_name
postgres://username:password@localhost/database_name
postgresql://user:pass@host:5432/dbname?sslmode=disable
postgresql://user:pass@host/dbname?sslmode=require
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
Examples:
# Inspect a PostgreSQL database with default rules
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
# Inspect a DBML file with custom rules
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
# Inspect and output JSON report to file
relspec inspect --from json --from-path db.json \
--output-format json --output report.json
# Inspect specific schema only
relspec inspect --from pgsql --from-conn "..." --schema public`,
RunE: runInspect,
}
func init() {
inspectCmd.Flags().StringVar(&inspectSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
inspectCmd.Flags().StringVar(&inspectSourcePath, "from-path", "", "Source file path (for file-based formats)")
inspectCmd.Flags().StringVar(&inspectSourceConn, "from-conn", "", "Source connection string (for database formats)")
inspectCmd.Flags().StringVar(&inspectRulesPath, "rules", ".relspec-rules.yaml", "Path to rules configuration file (uses defaults if not found)")
inspectCmd.Flags().StringVar(&inspectOutputFormat, "output-format", "markdown", "Output format (markdown, json)")
inspectCmd.Flags().StringVar(&inspectOutputPath, "output", "", "Output file path (default: stdout)")
inspectCmd.Flags().StringVar(&inspectSchemaFilter, "schema", "", "Filter to a specific schema by name")
err := inspectCmd.MarkFlagRequired("from")
if err != nil {
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
}
}
func runInspect(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Inspector ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
// Read source database
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", inspectSourceType)
if inspectSourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", inspectSourcePath)
}
if inspectSourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(inspectSourceConn))
}
db, err := readDatabaseForInspect(inspectSourceType, inspectSourcePath, inspectSourceConn)
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
// Apply schema filter if specified
if inspectSchemaFilter != "" {
db = filterDatabaseBySchema(db, inspectSchemaFilter)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
totalTables := 0
for _, schema := range db.Schemas {
totalTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
// Load rules configuration
fmt.Fprintf(os.Stderr, "[2/3] Loading validation rules...\n")
fmt.Fprintf(os.Stderr, " Rules: %s\n", inspectRulesPath)
config, err := inspector.LoadConfig(inspectRulesPath)
if err != nil {
return fmt.Errorf("failed to load rules config: %w", err)
}
enabledCount := 0
for _, rule := range config.Rules {
if rule.IsEnabled() {
enabledCount++
}
}
fmt.Fprintf(os.Stderr, " ✓ Loaded %d rule(s) (%d enabled)\n\n", len(config.Rules), enabledCount)
// Run inspection
fmt.Fprintf(os.Stderr, "[3/3] Running validation...\n")
insp := inspector.NewInspector(db, config)
report, err := insp.Inspect()
if err != nil {
return fmt.Errorf("inspection failed: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Inspection complete\n")
fmt.Fprintf(os.Stderr, " Errors: %d\n", report.Summary.ErrorCount)
fmt.Fprintf(os.Stderr, " Warnings: %d\n\n", report.Summary.WarningCount)
// Format and output report
var formattedReport string
switch strings.ToLower(inspectOutputFormat) {
case "json":
formatter := inspector.NewJSONFormatter()
formattedReport, err = formatter.Format(report)
case "markdown", "md":
// Determine output writer for terminal detection
var output *os.File
if inspectOutputPath != "" {
output, err = os.Create(inspectOutputPath)
if err != nil {
return fmt.Errorf("failed to create output file: %w", err)
}
defer output.Close()
} else {
output = os.Stdout
}
formatter := inspector.NewMarkdownFormatter(output)
formattedReport, err = formatter.Format(report)
default:
return fmt.Errorf("unsupported output format: %s", inspectOutputFormat)
}
if err != nil {
return fmt.Errorf("failed to format report: %w", err)
}
// Write output
if inspectOutputPath != "" {
err = os.WriteFile(inspectOutputPath, []byte(formattedReport), 0644)
if err != nil {
return fmt.Errorf("failed to write output file: %w", err)
}
fmt.Fprintf(os.Stderr, "Report written to: %s\n", inspectOutputPath)
} else {
fmt.Println(formattedReport)
}
fmt.Fprintf(os.Stderr, "\n=== Inspection Complete ===\n")
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
// Exit with appropriate code
if report.HasErrors() {
return fmt.Errorf("inspection found %d error(s)", report.Summary.ErrorCount)
}
return nil
}
func readDatabaseForInspect(dbType, filePath, connString string) (*models.Database, error) {
var reader readers.Reader
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DBML format")
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DCTX format")
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB format")
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "graphql":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GraphQL format")
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
if filePath == "" {
return nil, fmt.Errorf("file path is required for JSON format")
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml", "yml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for YAML format")
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GORM format")
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "bun":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Bun format")
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Drizzle format")
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Prisma format")
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for TypeORM format")
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql", "postgres", "postgresql":
if connString == "" {
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
default:
return nil, fmt.Errorf("unsupported database type: %s", dbType)
}
db, err := reader.ReadDatabase()
if err != nil {
return nil, err
}
return db, nil
}
func filterDatabaseBySchema(db *models.Database, schemaName string) *models.Database {
filtered := &models.Database{
Name: db.Name,
Description: db.Description,
DatabaseType: db.DatabaseType,
DatabaseVersion: db.DatabaseVersion,
SourceFormat: db.SourceFormat,
Schemas: []*models.Schema{},
}
for _, schema := range db.Schemas {
if schema.Name == schemaName {
filtered.Schemas = append(filtered.Schemas, schema)
break
}
}
return filtered
}

View File

@@ -18,5 +18,6 @@ JSON, YAML, SQL, etc.).`,
func init() {
rootCmd.AddCommand(convertCmd)
rootCmd.AddCommand(diffCmd)
rootCmd.AddCommand(inspectCmd)
rootCmd.AddCommand(scriptsCmd)
}

View File

@@ -0,0 +1,177 @@
# RelSpec Inspector Rules Configuration Example
# Copy this file to .relspec-rules.yaml and customize as needed
version: "1.0"
rules:
# ============================================================================
# PRIMARY KEY RULES
# ============================================================================
# Validate primary key column naming convention
primary_key_naming:
enabled: warn # enforce|warn|off
function: primary_key_naming
pattern: "^id_" # Regex pattern - PK columns must start with "id_"
message: "Primary key columns should start with 'id_'"
# Validate primary key data types
primary_key_datatype:
enabled: warn
function: primary_key_datatype
allowed_types:
- bigserial
- bigint
- int
- serial
- integer
- int4
- int8
message: "Primary keys should use integer types (bigserial, bigint, int, serial)"
# Check if primary keys have auto-increment enabled
primary_key_auto_increment:
enabled: off # Often disabled as not all PKs need auto-increment
function: primary_key_auto_increment
require_auto_increment: true
message: "Primary key without auto-increment detected"
# ============================================================================
# FOREIGN KEY RULES
# ============================================================================
# Validate foreign key column naming convention
foreign_key_column_naming:
enabled: warn
function: foreign_key_column_naming
pattern: "^rid_" # FK columns must start with "rid_" (referenced id)
message: "Foreign key columns should start with 'rid_'"
# Validate foreign key constraint naming convention
foreign_key_constraint_naming:
enabled: warn
function: foreign_key_constraint_naming
pattern: "^fk_" # FK constraints must start with "fk_"
message: "Foreign key constraint names should start with 'fk_'"
# Ensure foreign key columns have indexes for performance
foreign_key_index:
enabled: warn
function: foreign_key_index
require_index: true
message: "Foreign key columns should have indexes for optimal performance"
# ============================================================================
# NAMING CONVENTION RULES
# ============================================================================
# Validate table naming follows snake_case convention
table_naming_case:
enabled: warn
function: table_regexpr # Generic regex validator for table names
case: lowercase
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
message: "Table names should be lowercase with underscores (snake_case)"
# Validate column naming follows snake_case convention
column_naming_case:
enabled: warn
function: column_regexpr # Generic regex validator for column names
case: lowercase
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
message: "Column names should be lowercase with underscores (snake_case)"
# ============================================================================
# LENGTH RULES
# ============================================================================
# Limit table name length (PostgreSQL max is 63, but 64 is common practice)
table_name_length:
enabled: warn
function: table_name_length
max_length: 64
message: "Table name exceeds recommended maximum length of 64 characters"
# Limit column name length
column_name_length:
enabled: warn
function: column_name_length
max_length: 64
message: "Column name exceeds recommended maximum length of 64 characters"
# ============================================================================
# RESERVED KEYWORDS
# ============================================================================
# Warn about using SQL reserved keywords as identifiers
reserved_keywords:
enabled: warn
function: reserved_words
check_tables: true
check_columns: true
message: "Using SQL reserved keywords as identifiers can cause issues"
# ============================================================================
# SCHEMA INTEGRITY RULES
# ============================================================================
# Ensure all tables have primary keys
missing_primary_key:
enabled: warn
function: have_primary_key
message: "Table is missing a primary key"
# Detect orphaned foreign keys (referencing non-existent tables)
orphaned_foreign_key:
enabled: warn
function: orphaned_foreign_key
message: "Foreign key references a non-existent table"
# Detect circular foreign key dependencies
circular_dependency:
enabled: warn
function: circular_dependency
message: "Circular foreign key dependency detected"
# ============================================================================
# RULE CONFIGURATION NOTES
# ============================================================================
#
# enabled: Controls rule enforcement level
# - enforce: Violations are errors (exit code 1)
# - warn: Violations are warnings (exit code 0)
# - off: Rule is disabled
#
# function: The validation function to execute
# - Must match a registered validator function
# - Generic functions like table_regexpr and column_regexpr can be reused
#
# pattern: Regular expression for pattern matching
# - Used by naming validators
# - Must be valid Go regex syntax
#
# message: Custom message shown when rule is violated
# - Should be clear and actionable
# - Explains what the violation is and how to fix it
#
# ============================================================================
# CUSTOM RULES EXAMPLES
# ============================================================================
#
# You can add custom rules using the generic validator functions:
#
# # Example: Ensure table names don't contain numbers
# table_no_numbers:
# enabled: warn
# function: table_regexpr
# pattern: "^[a-z_]+$"
# message: "Table names should not contain numbers"
#
# # Example: Audit columns must end with _audit
# audit_column_suffix:
# enabled: enforce
# function: column_regexpr
# pattern: ".*_audit$"
# message: "Audit columns must end with '_audit'"
#
# ============================================================================

472
pkg/inspector/PLAN.md Normal file
View File

@@ -0,0 +1,472 @@
# Inspector Feature Implementation Plan
## Overview
Add a model inspection feature that validates database schemas against configurable rules. The inspector will read any supported format, apply validation rules from a YAML config, and output a report in markdown or JSON format.
## Architecture
### Core Components
1. **CLI Command** (`cmd/relspec/inspect.go`)
- New subcommand: `relspec inspect`
- Flags:
- `--from` (required): Input format (dbml, pgsql, json, etc.)
- `--from-path`: File path for file-based formats
- `--from-conn`: Connection string for database formats
- `--rules` (optional): Path to rules YAML file (default: `.relspec-rules.yaml`)
- `--output-format`: Report format (markdown, json) (default: markdown)
- `--output`: Output file path (default: stdout)
- `--schema`: Schema name filter (optional)
2. **Inspector Package** (`pkg/inspector/`)
- `inspector.go`: Main inspector logic
- `rules.go`: Rule definitions and configuration
- `validators.go`: Individual validation rule implementations
- `report.go`: Report generation (markdown, JSON)
- `config.go`: YAML config loading and parsing
### Data Flow
```
Input Format → Reader → Database Model → Inspector → Validation Results → Report Formatter → Output
```
## Rules Configuration Structure
### YAML Schema (`rules.yaml`)
```yaml
version: "1.0"
rules:
# Primary Key Rules
primary_key_naming:
enabled: enforce|warn|off
pattern: "^id_" # regex pattern
message: "Primary key columns must start with 'id_'"
primary_key_datatype:
enabled: enforce|warn|off
allowed_types: ["bigserial", "bigint", "int", "serial", "integer"]
message: "Primary keys must use approved integer types"
primary_key_auto_increment:
enabled: enforce|warn|off
require_auto_increment: true|false
message: "Primary keys without auto-increment detected"
# Foreign Key Rules
foreign_key_column_naming:
enabled: enforce|warn|off
pattern: "^rid_"
message: "Foreign key columns must start with 'rid_'"
foreign_key_constraint_naming:
enabled: enforce|warn|off
pattern: "^fk_"
message: "Foreign key constraint names must start with 'fk_'"
foreign_key_index:
enabled: enforce|warn|off
require_index: true
message: "Foreign keys should have indexes"
# Naming Convention Rules
table_naming_case:
enabled: enforce|warn|off
case: "lowercase" # lowercase, uppercase, snake_case, camelCase
pattern: "^[a-z][a-z0-9_]*$"
message: "Table names must be lowercase with underscores"
column_naming_case:
enabled: enforce|warn|off
case: "lowercase"
pattern: "^[a-z][a-z0-9_]*$"
message: "Column names must be lowercase with underscores"
# Length Rules
table_name_length:
enabled: enforce|warn|off
max_length: 64
message: "Table name exceeds maximum length"
column_name_length:
enabled: enforce|warn|off
max_length: 64
message: "Column name exceeds maximum length"
# Reserved Keywords
reserved_keywords:
enabled: enforce|warn|off
check_tables: true
check_columns: true
message: "Using reserved SQL keywords"
# Schema Integrity Rules
missing_primary_key:
enabled: enforce|warn|off
message: "Table missing primary key"
orphaned_foreign_key:
enabled: enforce|warn|off
message: "Foreign key references non-existent table"
circular_dependency:
enabled: enforce|warn|off
message: "Circular foreign key dependency detected"
```
### Rule Levels
- **enforce**: Violations are errors (exit code 1)
- **warn**: Violations are warnings (exit code 0)
- **off**: Rule disabled
## Implementation Details
### 1. Inspector Core (`pkg/inspector/inspector.go`)
```go
type Inspector struct {
config *Config
db *models.Database
}
type ValidationResult struct {
RuleName string
Level string // "error" or "warning"
Message string
Location string // e.g., "schema.table.column"
Context map[string]interface{}
Passed bool
}
type InspectorReport struct {
Summary ReportSummary
Violations []ValidationResult
GeneratedAt time.Time
Database string
SourceFormat string
}
type ReportSummary struct {
TotalRules int
RulesChecked int
RulesSkipped int
ErrorCount int
WarningCount int
PassedCount int
}
func NewInspector(db *models.Database, config *Config) *Inspector
func (i *Inspector) Inspect() (*InspectorReport, error)
func (i *Inspector) validateDatabase() []ValidationResult
func (i *Inspector) validateSchema(schema *models.Schema) []ValidationResult
func (i *Inspector) validateTable(table *models.Table) []ValidationResult
```
### 2. Rule Definitions (`pkg/inspector/rules.go`)
```go
type Config struct {
Version string
Rules map[string]Rule
}
type Rule struct {
Enabled string // "enforce", "warn", "off"
Message string
Pattern string
AllowedTypes []string
MaxLength int
Case string
RequireIndex bool
CheckTables bool
CheckColumns bool
// ... rule-specific fields
}
type RuleValidator interface {
Name() string
Validate(db *models.Database, rule Rule) []ValidationResult
}
func LoadConfig(path string) (*Config, error)
func GetDefaultConfig() *Config
```
**Configuration Loading Behavior:**
- If `--rules` flag is provided but file not found: Use default configuration (don't error)
- If file exists but is invalid YAML: Return error
- Default configuration has sensible rules enabled at "warn" level
- Users can override by creating their own `.relspec-rules.yaml` file
### 3. Validators (`pkg/inspector/validators.go`)
Each validator implements rule logic:
```go
// Primary Key Validators
func validatePrimaryKeyNaming(db *models.Database, rule Rule) []ValidationResult
func validatePrimaryKeyDatatype(db *models.Database, rule Rule) []ValidationResult
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule) []ValidationResult
// Foreign Key Validators
func validateForeignKeyColumnNaming(db *models.Database, rule Rule) []ValidationResult
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule) []ValidationResult
func validateForeignKeyIndex(db *models.Database, rule Rule) []ValidationResult
// Naming Convention Validators
func validateTableNamingCase(db *models.Database, rule Rule) []ValidationResult
func validateColumnNamingCase(db *models.Database, rule Rule) []ValidationResult
// Length Validators
func validateTableNameLength(db *models.Database, rule Rule) []ValidationResult
func validateColumnNameLength(db *models.Database, rule Rule) []ValidationResult
// Reserved Keywords Validator
func validateReservedKeywords(db *models.Database, rule Rule) []ValidationResult
// Integrity Validators
func validateMissingPrimaryKey(db *models.Database, rule Rule) []ValidationResult
func validateOrphanedForeignKey(db *models.Database, rule Rule) []ValidationResult
func validateCircularDependency(db *models.Database, rule Rule) []ValidationResult
// Registry of all validators
var validators = map[string]RuleValidator{
"primary_key_naming": primaryKeyNamingValidator{},
// ...
}
```
### 4. Report Formatting (`pkg/inspector/report.go`)
```go
type ReportFormatter interface {
Format(report *InspectorReport) (string, error)
}
type MarkdownFormatter struct {
UseColors bool // ANSI colors for terminal output
}
type JSONFormatter struct{}
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error)
func (f *JSONFormatter) Format(report *InspectorReport) (string, error)
// Helper to detect if output is a TTY (terminal)
func isTerminal(w io.Writer) bool
```
**Output Behavior:**
- Markdown format will use ANSI color codes when outputting to a terminal (TTY)
- When piped or redirected to a file, plain markdown without colors
- Colors: Red for errors, Yellow for warnings, Green for passed checks
**Markdown Format Example:**
```markdown
# RelSpec Inspector Report
**Database:** my_database
**Source Format:** pgsql
**Generated:** 2025-12-31 10:30:45
## Summary
- Rules Checked: 12
- Errors: 3
- Warnings: 5
- Passed: 4
## Violations
### Errors (3)
#### primary_key_naming
**Table:** users, **Column:** user_id
Primary key columns must start with 'id_'
#### table_name_length
**Table:** user_authentication_sessions_with_metadata
Table name exceeds maximum length (64 characters)
### Warnings (5)
#### foreign_key_index
**Table:** orders, **Column:** customer_id
Foreign keys should have indexes
...
```
**JSON Format Example:**
```json
{
"summary": {
"total_rules": 12,
"rules_checked": 12,
"error_count": 3,
"warning_count": 5,
"passed_count": 4
},
"violations": [
{
"rule_name": "primary_key_naming",
"level": "error",
"message": "Primary key columns must start with 'id_'",
"location": "public.users.user_id",
"context": {
"schema": "public",
"table": "users",
"column": "user_id",
"current_name": "user_id",
"expected_pattern": "^id_"
},
"passed": false
}
],
"generated_at": "2025-12-31T10:30:45Z",
"database": "my_database",
"source_format": "pgsql"
}
```
### 5. CLI Command (`cmd/relspec/inspect.go`)
```go
var inspectCmd = &cobra.Command{
Use: "inspect",
Short: "Inspect and validate database schemas against rules",
Long: `Read database schemas from various formats and validate against configurable rules.`,
RunE: runInspect,
}
func init() {
inspectCmd.Flags().String("from", "", "Input format (dbml, pgsql, json, etc.)")
inspectCmd.Flags().String("from-path", "", "Input file path")
inspectCmd.Flags().String("from-conn", "", "Database connection string")
inspectCmd.Flags().String("rules", ".relspec-rules.yaml", "Rules configuration file")
inspectCmd.Flags().String("output-format", "markdown", "Output format (markdown, json)")
inspectCmd.Flags().String("output", "", "Output file (default: stdout)")
inspectCmd.Flags().String("schema", "", "Filter by schema name")
inspectCmd.MarkFlagRequired("from")
}
func runInspect(cmd *cobra.Command, args []string) error {
// 1. Parse flags
// 2. Create reader (reuse pattern from convert.go)
// 3. Read database
// 4. Load rules config (use defaults if file not found)
// 5. Create inspector
// 6. Run inspection
// 7. Detect if output is terminal (for color support)
// 8. Format report (with/without ANSI colors)
// 9. Write output
// 10. Exit with appropriate code (0 if no errors, 1 if errors)
}
```
## Implementation Phases
### Phase 1: Core Infrastructure
1. Create `pkg/inspector/` package structure
2. Implement `Config` and YAML loading
3. Implement `Inspector` core with basic validation framework
4. Create CLI command skeleton
### Phase 2: Basic Validators
1. Implement naming convention validators
- Primary key naming
- Foreign key column naming
- Foreign key constraint naming
- Table/column case validation
2. Implement length validators
3. Implement reserved keywords validator (leverage `pkg/pgsql/keywords.go`)
### Phase 3: Advanced Validators
1. Implement datatype validators
2. Implement integrity validators (missing PK, orphaned FK, circular deps)
3. Implement foreign key index validator
### Phase 4: Reporting
1. Implement `InspectorReport` structure
2. Implement markdown formatter
3. Implement JSON formatter
4. Add summary statistics
### Phase 5: CLI Integration
1. Wire up CLI command with flags
2. Integrate reader factory (from convert.go pattern)
3. Add output file handling
4. Add exit code logic
5. Add progress reporting
### Phase 6: Testing & Documentation
1. Unit tests for validators
2. Integration tests with sample schemas
3. Test with all reader formats
4. Update README with inspector documentation
5. Create example rules configuration file
## Files to Create
1. `pkg/inspector/inspector.go` - Core inspector logic
2. `pkg/inspector/rules.go` - Rule definitions and config loading
3. `pkg/inspector/validators.go` - Validation implementations
4. `pkg/inspector/report.go` - Report formatting
5. `pkg/inspector/config.go` - Config utilities
6. `cmd/relspec/inspect.go` - CLI command
7. `.relspec-rules.yaml.example` - Example configuration
8. `pkg/inspector/inspector_test.go` - Tests
## Files to Modify
1. `cmd/relspec/root.go` - Register inspect command
2. `README.md` - Add inspector documentation (if requested)
## Example Usage
```bash
# Inspect a PostgreSQL database with default rules
relspec inspect --from pgsql --from-conn "postgresql://localhost/mydb"
# Inspect a DBML file with custom rules
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
# Output JSON report to file
relspec inspect --from json --from-path db.json --output-format json --output report.json
# Inspect specific schema only
relspec inspect --from pgsql --from-conn "..." --schema public
# Use custom rules location
relspec inspect --from dbml --from-path schema.dbml --rules /path/to/rules.yaml
```
## Exit Codes
- 0: Success (no errors, only warnings or all passed)
- 1: Validation errors found (rules with level="enforce" failed)
- 2: Runtime error (invalid config, reader error, etc.)
## Dependencies
- Existing: `pkg/models`, `pkg/readers`, `pkg/pgsql/keywords.go`
- New: `gopkg.in/yaml.v3` for YAML parsing (may already be in go.mod)
## Design Decisions
### Confirmed Choices (from user)
1. **Example config file**: Create `.relspec-rules.yaml.example` in repository root with documented examples
2. **Missing rules file**: Use sensible built-in defaults (don't error), all rules at "warn" level by default
3. **Terminal output**: ANSI colors (red/yellow/green) when outputting to terminal, plain markdown when piped/redirected
4. **Foreign key naming**: Separate configurable rules for both FK column names and FK constraint names
### Architecture Rationale
1. **Why YAML for config?**: Human-readable, supports comments, standard for config files
2. **Why three levels (enforce/warn/off)?**: Flexibility for gradual adoption, different contexts
3. **Why markdown + JSON?**: Markdown for human review, JSON for tooling integration
4. **Why pkg/inspector?**: Follows existing package structure, separates concerns
5. **Reuse readers**: Leverage existing reader infrastructure, supports all formats automatically
6. **Exit codes**: Follow standard conventions (0=success, 1=validation fail, 2=error)
## Future Enhancements (Not in Scope)
- Auto-fix mode (automatically rename columns, etc.)
- Custom rule plugins
- HTML report format
- Rule templates for different databases
- CI/CD integration examples
- Performance metrics in report

485
pkg/inspector/README.md Normal file
View File

@@ -0,0 +1,485 @@
# RelSpec Inspector
> Database Schema Validation and Linting Tool
The RelSpec Inspector validates database schemas against configurable rules, helping you maintain consistency, enforce naming conventions, and catch common schema design issues across your database models.
## Overview
The Inspector reads database schemas from any supported RelSpec format and validates them against a set of configurable rules. It generates detailed reports highlighting violations, warnings, and passed checks.
## Features
- **Flexible Rule Configuration**: YAML-based rules with three severity levels (enforce, warn, off)
- **Generic Validators**: Reusable regex-based validators for custom naming conventions
- **Multiple Input Formats**: Works with all RelSpec readers (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
- **Multiple Output Formats**: Markdown with ANSI colors for terminals, JSON for tooling integration
- **Smart Defaults**: Works out-of-the-box with sensible default rules
- **Terminal-Aware**: Automatic color support detection for improved readability
- **Exit Codes**: Proper exit codes for CI/CD integration
[Todo List of Features](./TODO.md)
## Quick Start
### Basic Usage
```bash
# Inspect a PostgreSQL database with default rules
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
# Inspect a DBML file
relspec inspect --from dbml --from-path schema.dbml
# Inspect with custom rules
relspec inspect --from json --from-path db.json --rules my-rules.yaml
# Output JSON report to file
relspec inspect --from pgsql --from-conn "..." \
--output-format json --output report.json
# Inspect specific schema only
relspec inspect --from pgsql --from-conn "..." --schema public
```
### Configuration
Create a `.relspec-rules.yaml` file to customize validation rules. If the file doesn't exist, the inspector uses sensible defaults.
```yaml
version: "1.0"
rules:
# Primary key columns must start with "id_"
primary_key_naming:
enabled: enforce # enforce|warn|off
function: primary_key_naming
pattern: "^id_"
message: "Primary key columns must start with 'id_'"
# Foreign key columns must start with "rid_"
foreign_key_column_naming:
enabled: warn
function: foreign_key_column_naming
pattern: "^rid_"
message: "Foreign key columns should start with 'rid_'"
# Table names must be lowercase snake_case
table_naming_case:
enabled: warn
function: table_regexpr # Generic regex validator
pattern: "^[a-z][a-z0-9_]*$"
message: "Table names should be lowercase with underscores"
# Ensure all tables have primary keys
missing_primary_key:
enabled: warn
function: have_primary_key
message: "Table is missing a primary key"
```
## Built-in Validation Rules
### Primary Key Rules
| Rule | Function | Description |
|------|----------|-------------|
| `primary_key_naming` | `primary_key_naming` | Validate PK column names against regex pattern |
| `primary_key_datatype` | `primary_key_datatype` | Enforce approved PK data types (bigint, serial, etc.) |
| `primary_key_auto_increment` | `primary_key_auto_increment` | Check if PKs have auto-increment enabled |
### Foreign Key Rules
| Rule | Function | Description |
|------|----------|-------------|
| `foreign_key_column_naming` | `foreign_key_column_naming` | Validate FK column names against regex pattern |
| `foreign_key_constraint_naming` | `foreign_key_constraint_naming` | Validate FK constraint names against regex pattern |
| `foreign_key_index` | `foreign_key_index` | Ensure FK columns have indexes for performance |
### Naming Convention Rules
| Rule | Function | Description |
|------|----------|-------------|
| `table_naming_case` | `table_regexpr` | Generic regex validator for table names |
| `column_naming_case` | `column_regexpr` | Generic regex validator for column names |
### Length Rules
| Rule | Function | Description |
|------|----------|-------------|
| `table_name_length` | `table_name_length` | Limit table name length (default: 64 chars) |
| `column_name_length` | `column_name_length` | Limit column name length (default: 64 chars) |
### Reserved Keywords
| Rule | Function | Description |
|------|----------|-------------|
| `reserved_keywords` | `reserved_words` | Detect use of SQL reserved keywords as identifiers |
### Schema Integrity Rules
| Rule | Function | Description |
|------|----------|-------------|
| `missing_primary_key` | `have_primary_key` | Ensure tables have primary keys |
| `orphaned_foreign_key` | `orphaned_foreign_key` | Detect FKs referencing non-existent tables |
| `circular_dependency` | `circular_dependency` | Detect circular FK dependencies |
## Rule Configuration
### Severity Levels
Rules support three severity levels:
- **`enforce`**: Violations are errors (exit code 1)
- **`warn`**: Violations are warnings (exit code 0)
- **`off`**: Rule is disabled
### Rule Structure
```yaml
rule_name:
enabled: enforce|warn|off
function: validator_function_name
message: "Custom message shown on violation"
# Rule-specific parameters
pattern: "^regex_pattern$" # For pattern-based validators
allowed_types: [type1, type2] # For type validators
max_length: 64 # For length validators
check_tables: true # For keyword validator
check_columns: true # For keyword validator
require_index: true # For FK index validator
```
## Generic Validators
The inspector provides generic validator functions that can be reused for custom rules:
### `table_regexpr`
Generic regex validator for table names. Create custom table naming rules:
```yaml
# Example: Ensure table names don't contain numbers
table_no_numbers:
enabled: warn
function: table_regexpr
pattern: "^[a-z_]+$"
message: "Table names should not contain numbers"
# Example: Tables must start with "tbl_"
table_prefix:
enabled: enforce
function: table_regexpr
pattern: "^tbl_[a-z][a-z0-9_]*$"
message: "Table names must start with 'tbl_'"
```
### `column_regexpr`
Generic regex validator for column names. Create custom column naming rules:
```yaml
# Example: Audit columns must end with "_audit"
audit_column_suffix:
enabled: enforce
function: column_regexpr
pattern: ".*_audit$"
message: "Audit columns must end with '_audit'"
# Example: Timestamp columns must end with "_at"
timestamp_suffix:
enabled: warn
function: column_regexpr
pattern: ".*(created|updated|deleted)_at$"
message: "Timestamp columns should end with '_at'"
```
## Output Formats
### Markdown (Default)
Human-readable markdown report with ANSI colors when outputting to a terminal:
```
# RelSpec Inspector Report
**Database:** my_database
**Source Format:** pgsql
**Generated:** 2025-12-31T10:30:45Z
## Summary
- Rules Checked: 13
- Errors: 2
- Warnings: 5
- Passed: 120
## Violations
### Errors (2)
#### primary_key_naming
**Location:** public.users.user_id
**Message:** Primary key columns must start with 'id_'
**Details:** expected_pattern=^id_
### Warnings (5)
#### foreign_key_index
**Location:** public.orders.customer_id
**Message:** Foreign key columns should have indexes
**Details:** has_index=false
```
### JSON
Structured JSON output for tooling integration:
```json
{
"summary": {
"total_rules": 13,
"rules_checked": 13,
"error_count": 2,
"warning_count": 5,
"passed_count": 120
},
"violations": [
{
"rule_name": "primary_key_naming",
"level": "error",
"message": "Primary key columns must start with 'id_'",
"location": "public.users.user_id",
"context": {
"schema": "public",
"table": "users",
"column": "user_id",
"expected_pattern": "^id_"
},
"passed": false
}
],
"generated_at": "2025-12-31T10:30:45Z",
"database": "my_database",
"source_format": "pgsql"
}
```
## CLI Reference
### Flags
| Flag | Type | Description |
|------|------|-------------|
| `--from` | string | **Required**. Source format (dbml, pgsql, json, yaml, gorm, etc.) |
| `--from-path` | string | Source file path (for file-based formats) |
| `--from-conn` | string | Connection string (for database formats) |
| `--rules` | string | Path to rules YAML file (default: `.relspec-rules.yaml`) |
| `--output-format` | string | Output format: `markdown` or `json` (default: `markdown`) |
| `--output` | string | Output file path (default: stdout) |
| `--schema` | string | Filter to specific schema by name |
### Exit Codes
| Code | Meaning |
|------|---------|
| 0 | Success (no errors, only warnings or all passed) |
| 1 | Validation errors found (rules with `enabled: enforce` failed) |
| 2 | Runtime error (invalid config, reader error, etc.) |
## CI/CD Integration
### GitHub Actions Example
```yaml
name: Schema Validation
on: [pull_request]
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install RelSpec
run: go install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
- name: Validate Schema
run: |
relspec inspect \
--from dbml \
--from-path schema.dbml \
--rules .relspec-rules.yaml \
--output-format json \
--output validation-report.json
- name: Upload Report
if: always()
uses: actions/upload-artifact@v3
with:
name: validation-report
path: validation-report.json
```
### Pre-commit Hook Example
```bash
#!/bin/bash
# .git/hooks/pre-commit
echo "Running schema validation..."
relspec inspect \
--from dbml \
--from-path schema.dbml \
--rules .relspec-rules.yaml
exit $?
```
## Example Configuration File
See [`.relspec-rules.yaml.example`](../../.relspec-rules.yaml.example) for a fully documented example configuration with all available rules and customization options.
## Common Use Cases
### Enforce Naming Standards
```yaml
# Ensure consistent naming across your schema
table_naming_case:
enabled: enforce
function: table_regexpr
pattern: "^[a-z][a-z0-9_]*$"
message: "Tables must use snake_case"
column_naming_case:
enabled: enforce
function: column_regexpr
pattern: "^[a-z][a-z0-9_]*$"
message: "Columns must use snake_case"
primary_key_naming:
enabled: enforce
function: primary_key_naming
pattern: "^id$"
message: "Primary key must be named 'id'"
foreign_key_column_naming:
enabled: enforce
function: foreign_key_column_naming
pattern: "^[a-z]+_id$"
message: "Foreign keys must end with '_id'"
```
### Performance Best Practices
```yaml
# Ensure optimal database performance
foreign_key_index:
enabled: enforce
function: foreign_key_index
require_index: true
message: "Foreign keys must have indexes"
primary_key_datatype:
enabled: enforce
function: primary_key_datatype
allowed_types: [bigserial, bigint]
message: "Use bigserial or bigint for primary keys"
```
### Schema Integrity
```yaml
# Prevent common schema issues
missing_primary_key:
enabled: enforce
function: have_primary_key
message: "All tables must have a primary key"
orphaned_foreign_key:
enabled: enforce
function: orphaned_foreign_key
message: "Foreign keys must reference existing tables"
circular_dependency:
enabled: warn
function: circular_dependency
message: "Circular dependencies detected"
```
### Avoid Reserved Keywords
```yaml
reserved_keywords:
enabled: warn
function: reserved_words
check_tables: true
check_columns: true
message: "Avoid using SQL reserved keywords"
```
## Programmatic Usage
You can use the inspector programmatically in your Go code:
```go
package main
import (
"fmt"
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
func main() {
// Load your database model
db := &models.Database{
Name: "my_database",
Schemas: []*models.Schema{
// ... your schema
},
}
// Load rules configuration
config, err := inspector.LoadConfig(".relspec-rules.yaml")
if err != nil {
panic(err)
}
// Create and run inspector
insp := inspector.NewInspector(db, config)
report, err := insp.Inspect()
if err != nil {
panic(err)
}
// Generate report
formatter := inspector.NewMarkdownFormatter(os.Stdout)
output, err := formatter.Format(report)
if err != nil {
panic(err)
}
fmt.Println(output)
// Check for errors
if report.HasErrors() {
os.Exit(1)
}
}
```
## Contributing
Contributions are welcome! To add a new validator:
1. Add the validator function to `validators.go`
2. Register it in `inspector.go` `getValidator()` function
3. Add default configuration to `rules.go` `GetDefaultConfig()`
4. Update this README with the new rule documentation
## License
Apache License 2.0 - See [LICENSE](../../LICENSE) for details.

60
pkg/inspector/TODO.md Normal file
View File

@@ -0,0 +1,60 @@
## Inspector TODO
See the [Inspector README](./README.md) for complete documentation of implemented features.
### Implemented ✓
- [x] Core validation framework with configurable rules
- [x] YAML configuration with three severity levels (enforce/warn/off)
- [x] Generic validators (table_regexpr, column_regexpr)
- [x] Primary key validation (naming, datatype, auto-increment)
- [x] Foreign key validation (column naming, constraint naming, indexes)
- [x] Naming convention validation (snake_case, custom patterns)
- [x] Length validation (table names, column names)
- [x] Reserved keywords detection
- [x] Schema integrity checks (missing PKs, orphaned FKs, circular dependencies)
- [x] Multiple output formats (Markdown with ANSI colors, JSON)
- [x] Terminal-aware color output
- [x] All input formats supported (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
- [x] CI/CD integration support (proper exit codes)
- [x] Comprehensive documentation and examples
### Future Enhancements
#### Reporting Enhancements
- [ ] Add verbose mode to show all passing checks in detail
- [ ] Add summary-only mode (suppress violation details)
- [ ] Group violations by table/schema in report
- [ ] Add statistics: most violated rules, tables with most issues
- [ ] HTML report format with interactive filtering
#### Additional Validators
- [ ] Optimal column order for space and storage efficiency
- [ ] Similar-sounding column names detection (synonyms, typos)
- [ ] Plural/singular table name consistency
- [ ] Column order validation (PK first, FKs next, data columns, timestamps last)
- [ ] Data type consistency across related columns
- [ ] Index coverage analysis
- [ ] Unused indexes detection
- [ ] Missing indexes on commonly filtered columns
- [ ] Table size estimates and warnings for large tables
#### Auto-Fix Capabilities
- [ ] Auto-fix mode (`relspec inspect --fix`)
- [ ] Update foreign key types to match primary key types
- [ ] Rename foreign keys to match primary key names with configurable prefix/suffix
- [ ] Reorder columns according to rules
- [ ] Add missing indexes on foreign keys
- [ ] Generate migration scripts for fixes
- [ ] Dry-run mode to preview changes
#### Advanced Features
- [ ] Custom validator plugins (Go plugin system)
- [ ] Rule templates for different databases (PostgreSQL, MySQL, etc.)
- [ ] Rule inheritance and composition
- [ ] Conditional rules (apply only to certain schemas/tables)
- [ ] Performance metrics in report (validation time per rule)
- [ ] Caching for large databases
- [ ] Incremental validation (only changed tables)
- [ ] Watch mode for continuous validation

182
pkg/inspector/inspector.go Normal file
View File

@@ -0,0 +1,182 @@
package inspector
import (
"fmt"
"time"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// Inspector performs validation on database models
type Inspector struct {
config *Config
db *models.Database
}
// ValidationResult represents the result of a single validation check
type ValidationResult struct {
RuleName string `json:"rule_name"`
Level string `json:"level"` // "error" or "warning"
Message string `json:"message"`
Location string `json:"location"` // e.g., "schema.table.column"
Context map[string]interface{} `json:"context"`
Passed bool `json:"passed"`
}
// InspectorReport contains the complete validation report
type InspectorReport struct {
Summary ReportSummary `json:"summary"`
Violations []ValidationResult `json:"violations"`
GeneratedAt time.Time `json:"generated_at"`
Database string `json:"database"`
SourceFormat string `json:"source_format"`
}
// ReportSummary contains aggregate statistics
type ReportSummary struct {
TotalRules int `json:"total_rules"`
RulesChecked int `json:"rules_checked"`
RulesSkipped int `json:"rules_skipped"`
ErrorCount int `json:"error_count"`
WarningCount int `json:"warning_count"`
PassedCount int `json:"passed_count"`
}
// NewInspector creates a new inspector with the given database and configuration
func NewInspector(db *models.Database, config *Config) *Inspector {
return &Inspector{
config: config,
db: db,
}
}
// Inspect runs all enabled validation rules and returns a report
func (i *Inspector) Inspect() (*InspectorReport, error) {
results := []ValidationResult{}
// Run all enabled validators
for ruleName, rule := range i.config.Rules {
if !rule.IsEnabled() {
continue
}
// Get the validator function for this rule using the function field
validator, exists := getValidator(rule.Function)
if !exists {
// Skip unknown validator functions
continue
}
// Run the validator
ruleResults := validator(i.db, rule, ruleName)
// Set the level based on rule configuration
level := "warning"
if rule.IsEnforced() {
level = "error"
}
for idx := range ruleResults {
ruleResults[idx].Level = level
}
results = append(results, ruleResults...)
}
// Generate summary
summary := i.generateSummary(results)
report := &InspectorReport{
Summary: summary,
Violations: results,
GeneratedAt: time.Now(),
Database: i.db.Name,
SourceFormat: i.db.SourceFormat,
}
return report, nil
}
// generateSummary creates summary statistics from validation results
func (i *Inspector) generateSummary(results []ValidationResult) ReportSummary {
summary := ReportSummary{
TotalRules: len(i.config.Rules),
}
// Count enabled rules
for _, rule := range i.config.Rules {
if rule.IsEnabled() {
summary.RulesChecked++
} else {
summary.RulesSkipped++
}
}
// Count violations by level
for _, result := range results {
if result.Passed {
summary.PassedCount++
} else {
if result.Level == "error" {
summary.ErrorCount++
} else {
summary.WarningCount++
}
}
}
return summary
}
// HasErrors returns true if the report contains any errors
func (r *InspectorReport) HasErrors() bool {
return r.Summary.ErrorCount > 0
}
// validatorFunc is a function that validates a rule against a database
type validatorFunc func(*models.Database, Rule, string) []ValidationResult
// getValidator returns the validator function for a given function name
func getValidator(functionName string) (validatorFunc, bool) {
validators := map[string]validatorFunc{
"primary_key_naming": validatePrimaryKeyNaming,
"primary_key_datatype": validatePrimaryKeyDatatype,
"primary_key_auto_increment": validatePrimaryKeyAutoIncrement,
"foreign_key_column_naming": validateForeignKeyColumnNaming,
"foreign_key_constraint_naming": validateForeignKeyConstraintNaming,
"foreign_key_index": validateForeignKeyIndex,
"table_regexpr": validateTableNamingCase,
"column_regexpr": validateColumnNamingCase,
"table_name_length": validateTableNameLength,
"column_name_length": validateColumnNameLength,
"reserved_words": validateReservedKeywords,
"have_primary_key": validateMissingPrimaryKey,
"orphaned_foreign_key": validateOrphanedForeignKey,
"circular_dependency": validateCircularDependency,
}
fn, exists := validators[functionName]
return fn, exists
}
// createResult is a helper to create a validation result
func createResult(ruleName string, passed bool, message string, location string, context map[string]interface{}) ValidationResult {
return ValidationResult{
RuleName: ruleName,
Message: message,
Location: location,
Context: context,
Passed: passed,
}
}
// formatLocation creates a location string from schema, table, and optional column
func formatLocation(schema, table, column string) string {
if column != "" {
return fmt.Sprintf("%s.%s.%s", schema, table, column)
}
if table != "" {
return fmt.Sprintf("%s.%s", schema, table)
}
return schema
}

229
pkg/inspector/report.go Normal file
View File

@@ -0,0 +1,229 @@
package inspector
import (
"encoding/json"
"fmt"
"io"
"os"
"strings"
"time"
)
// ANSI color codes
const (
colorReset = "\033[0m"
colorRed = "\033[31m"
colorYellow = "\033[33m"
colorGreen = "\033[32m"
colorBold = "\033[1m"
)
// ReportFormatter defines the interface for report formatters
type ReportFormatter interface {
Format(report *InspectorReport) (string, error)
}
// MarkdownFormatter formats reports as markdown
type MarkdownFormatter struct {
UseColors bool
}
// JSONFormatter formats reports as JSON
type JSONFormatter struct{}
// NewMarkdownFormatter creates a markdown formatter with color support detection
func NewMarkdownFormatter(writer io.Writer) *MarkdownFormatter {
return &MarkdownFormatter{
UseColors: isTerminal(writer),
}
}
// NewJSONFormatter creates a JSON formatter
func NewJSONFormatter() *JSONFormatter {
return &JSONFormatter{}
}
// Format generates a markdown report
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error) {
var sb strings.Builder
// Header
sb.WriteString(f.formatHeader("RelSpec Inspector Report"))
sb.WriteString("\n\n")
// Metadata
sb.WriteString(f.formatBold("Database:") + " " + report.Database + "\n")
sb.WriteString(f.formatBold("Source Format:") + " " + report.SourceFormat + "\n")
sb.WriteString(f.formatBold("Generated:") + " " + report.GeneratedAt.Format(time.RFC3339) + "\n")
sb.WriteString("\n")
// Summary
sb.WriteString(f.formatHeader("Summary"))
sb.WriteString("\n")
sb.WriteString(fmt.Sprintf("- Rules Checked: %d\n", report.Summary.RulesChecked))
// Color-code error and warning counts
if report.Summary.ErrorCount > 0 {
sb.WriteString(f.colorize(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount), colorRed))
} else {
sb.WriteString(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount))
}
if report.Summary.WarningCount > 0 {
sb.WriteString(f.colorize(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount), colorYellow))
} else {
sb.WriteString(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount))
}
if report.Summary.PassedCount > 0 {
sb.WriteString(f.colorize(fmt.Sprintf("- Passed: %d\n", report.Summary.PassedCount), colorGreen))
}
sb.WriteString("\n")
// Group violations by level
errors := []ValidationResult{}
warnings := []ValidationResult{}
for _, v := range report.Violations {
if !v.Passed {
if v.Level == "error" {
errors = append(errors, v)
} else {
warnings = append(warnings, v)
}
}
}
// Report violations
if len(errors) > 0 || len(warnings) > 0 {
sb.WriteString(f.formatHeader("Violations"))
sb.WriteString("\n")
// Errors
if len(errors) > 0 {
sb.WriteString(f.formatSubheader(fmt.Sprintf("Errors (%d)", len(errors)), colorRed))
sb.WriteString("\n")
for _, violation := range errors {
sb.WriteString(f.formatViolation(violation, colorRed))
}
sb.WriteString("\n")
}
// Warnings
if len(warnings) > 0 {
sb.WriteString(f.formatSubheader(fmt.Sprintf("Warnings (%d)", len(warnings)), colorYellow))
sb.WriteString("\n")
for _, violation := range warnings {
sb.WriteString(f.formatViolation(violation, colorYellow))
}
}
} else {
sb.WriteString(f.colorize("✓ No violations found!\n", colorGreen))
}
return sb.String(), nil
}
// Format generates a JSON report
func (f *JSONFormatter) Format(report *InspectorReport) (string, error) {
data, err := json.MarshalIndent(report, "", " ")
if err != nil {
return "", fmt.Errorf("failed to marshal report to JSON: %w", err)
}
return string(data), nil
}
// Helper methods for MarkdownFormatter
func (f *MarkdownFormatter) formatHeader(text string) string {
return f.formatBold("# " + text)
}
func (f *MarkdownFormatter) formatSubheader(text string, color string) string {
header := "### " + text
if f.UseColors {
return color + colorBold + header + colorReset
}
return header
}
func (f *MarkdownFormatter) formatBold(text string) string {
if f.UseColors {
return colorBold + text + colorReset
}
return "**" + text + "**"
}
func (f *MarkdownFormatter) colorize(text string, color string) string {
if f.UseColors {
return color + text + colorReset
}
return text
}
func (f *MarkdownFormatter) formatViolation(v ValidationResult, color string) string {
var sb strings.Builder
// Rule name as header
if f.UseColors {
sb.WriteString(color + "#### " + v.RuleName + colorReset + "\n")
} else {
sb.WriteString("#### " + v.RuleName + "\n")
}
// Location and message
sb.WriteString(f.formatBold("Location:") + " " + v.Location + "\n")
sb.WriteString(f.formatBold("Message:") + " " + v.Message + "\n")
// Context details (optional, only show interesting ones)
if len(v.Context) > 0 {
contextStr := f.formatContext(v.Context)
if contextStr != "" {
sb.WriteString(f.formatBold("Details:") + " " + contextStr + "\n")
}
}
sb.WriteString("\n")
return sb.String()
}
func (f *MarkdownFormatter) formatContext(context map[string]interface{}) string {
// Extract relevant context information
var parts []string
// Skip schema, table, column as they're in location
skipKeys := map[string]bool{
"schema": true,
"table": true,
"column": true,
}
for key, value := range context {
if skipKeys[key] {
continue
}
parts = append(parts, fmt.Sprintf("%s=%v", key, value))
}
return strings.Join(parts, ", ")
}
// isTerminal checks if the writer is a terminal (supports ANSI colors)
func isTerminal(w io.Writer) bool {
file, ok := w.(*os.File)
if !ok {
return false
}
// Check if the file descriptor is a terminal
stat, err := file.Stat()
if err != nil {
return false
}
// Check if it's a character device (terminal)
// This works on Unix-like systems
return (stat.Mode() & os.ModeCharDevice) != 0
}

169
pkg/inspector/rules.go Normal file
View File

@@ -0,0 +1,169 @@
package inspector
import (
"fmt"
"os"
"gopkg.in/yaml.v3"
)
// Config represents the inspector rules configuration
type Config struct {
Version string `yaml:"version"`
Rules map[string]Rule `yaml:"rules"`
}
// Rule represents a single validation rule
type Rule struct {
Enabled string `yaml:"enabled"` // "enforce", "warn", "off"
Function string `yaml:"function"` // validator function name
Message string `yaml:"message"`
Pattern string `yaml:"pattern,omitempty"`
AllowedTypes []string `yaml:"allowed_types,omitempty"`
MaxLength int `yaml:"max_length,omitempty"`
Case string `yaml:"case,omitempty"`
RequireIndex bool `yaml:"require_index,omitempty"`
CheckTables bool `yaml:"check_tables,omitempty"`
CheckColumns bool `yaml:"check_columns,omitempty"`
RequireAutoIncrement bool `yaml:"require_auto_increment,omitempty"`
}
// LoadConfig loads configuration from a YAML file
// If the file doesn't exist, returns default configuration
// If the file exists but is invalid, returns an error
func LoadConfig(path string) (*Config, error) {
// Check if file exists
if _, err := os.Stat(path); os.IsNotExist(err) {
// File doesn't exist, use defaults
return GetDefaultConfig(), nil
}
// Read file
data, err := os.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("failed to read config file: %w", err)
}
// Parse YAML
var config Config
if err := yaml.Unmarshal(data, &config); err != nil {
return nil, fmt.Errorf("failed to parse config YAML: %w", err)
}
return &config, nil
}
// GetDefaultConfig returns the default inspector configuration
// All rules are enabled at "warn" level by default
func GetDefaultConfig() *Config {
return &Config{
Version: "1.0",
Rules: map[string]Rule{
// Primary Key Rules
"primary_key_naming": {
Enabled: "warn",
Function: "primary_key_naming",
Pattern: "^id_",
Message: "Primary key columns should start with 'id_'",
},
"primary_key_datatype": {
Enabled: "warn",
Function: "primary_key_datatype",
AllowedTypes: []string{"bigserial", "bigint", "int", "serial", "integer", "int4", "int8"},
Message: "Primary keys should use integer types (bigserial, bigint, int, serial)",
},
"primary_key_auto_increment": {
Enabled: "off",
Function: "primary_key_auto_increment",
RequireAutoIncrement: true,
Message: "Primary key without auto-increment detected",
},
// Foreign Key Rules
"foreign_key_column_naming": {
Enabled: "warn",
Function: "foreign_key_column_naming",
Pattern: "^rid_",
Message: "Foreign key columns should start with 'rid_'",
},
"foreign_key_constraint_naming": {
Enabled: "warn",
Function: "foreign_key_constraint_naming",
Pattern: "^fk_",
Message: "Foreign key constraint names should start with 'fk_'",
},
"foreign_key_index": {
Enabled: "warn",
Function: "foreign_key_index",
RequireIndex: true,
Message: "Foreign key columns should have indexes for optimal performance",
},
// Naming Convention Rules
"table_naming_case": {
Enabled: "warn",
Function: "table_regexpr",
Case: "lowercase",
Pattern: "^[a-z][a-z0-9_]*$",
Message: "Table names should be lowercase with underscores (snake_case)",
},
"column_naming_case": {
Enabled: "warn",
Function: "column_regexpr",
Case: "lowercase",
Pattern: "^[a-z][a-z0-9_]*$",
Message: "Column names should be lowercase with underscores (snake_case)",
},
// Length Rules
"table_name_length": {
Enabled: "warn",
Function: "table_name_length",
MaxLength: 64,
Message: "Table name exceeds recommended maximum length of 64 characters",
},
"column_name_length": {
Enabled: "warn",
Function: "column_name_length",
MaxLength: 64,
Message: "Column name exceeds recommended maximum length of 64 characters",
},
// Reserved Keywords
"reserved_keywords": {
Enabled: "warn",
Function: "reserved_words",
CheckTables: true,
CheckColumns: true,
Message: "Using SQL reserved keywords as identifiers can cause issues",
},
// Schema Integrity Rules
"missing_primary_key": {
Enabled: "warn",
Function: "have_primary_key",
Message: "Table is missing a primary key",
},
"orphaned_foreign_key": {
Enabled: "warn",
Function: "orphaned_foreign_key",
Message: "Foreign key references a non-existent table",
},
"circular_dependency": {
Enabled: "warn",
Function: "circular_dependency",
Message: "Circular foreign key dependency detected",
},
},
}
}
// IsEnabled returns true if the rule is enabled (either "enforce" or "warn")
func (r *Rule) IsEnabled() bool {
return r.Enabled == "enforce" || r.Enabled == "warn"
}
// IsEnforced returns true if the rule is set to "enforce" level
func (r *Rule) IsEnforced() bool {
return r.Enabled == "enforce"
}

603
pkg/inspector/validators.go Normal file
View File

@@ -0,0 +1,603 @@
package inspector
import (
"regexp"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
)
// validatePrimaryKeyNaming checks that primary key column names match a pattern
func validatePrimaryKeyNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
pattern, err := regexp.Compile(rule.Pattern)
if err != nil {
return results
}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
for _, col := range table.Columns {
if col.IsPrimaryKey {
location := formatLocation(schema.Name, table.Name, col.Name)
passed := pattern.MatchString(col.Name)
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": col.Name,
"expected_pattern": rule.Pattern,
},
))
}
}
}
}
return results
}
// validatePrimaryKeyDatatype checks that primary keys use approved data types
func validatePrimaryKeyDatatype(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
for _, col := range table.Columns {
if col.IsPrimaryKey {
location := formatLocation(schema.Name, table.Name, col.Name)
// Normalize type (remove size/precision)
normalizedType := normalizeDataType(col.Type)
passed := contains(rule.AllowedTypes, normalizedType)
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": col.Name,
"current_type": col.Type,
"allowed_types": rule.AllowedTypes,
},
))
}
}
}
}
return results
}
// validatePrimaryKeyAutoIncrement checks primary key auto-increment settings
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
for _, col := range table.Columns {
if col.IsPrimaryKey {
location := formatLocation(schema.Name, table.Name, col.Name)
// Check if auto-increment matches requirement
passed := col.AutoIncrement == rule.RequireAutoIncrement
if !passed {
results = append(results, createResult(
ruleName,
false,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": col.Name,
"has_auto_increment": col.AutoIncrement,
"require_auto_increment": rule.RequireAutoIncrement,
},
))
}
}
}
}
}
return results
}
// validateForeignKeyColumnNaming checks that foreign key column names match a pattern
func validateForeignKeyColumnNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
pattern, err := regexp.Compile(rule.Pattern)
if err != nil {
return results
}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
// Check foreign key constraints
for _, constraint := range table.Constraints {
if constraint.Type == models.ForeignKeyConstraint {
for _, colName := range constraint.Columns {
location := formatLocation(schema.Name, table.Name, colName)
passed := pattern.MatchString(colName)
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": colName,
"constraint": constraint.Name,
"expected_pattern": rule.Pattern,
},
))
}
}
}
}
}
return results
}
// validateForeignKeyConstraintNaming checks that foreign key constraint names match a pattern
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
pattern, err := regexp.Compile(rule.Pattern)
if err != nil {
return results
}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
for _, constraint := range table.Constraints {
if constraint.Type == models.ForeignKeyConstraint {
location := formatLocation(schema.Name, table.Name, "")
passed := pattern.MatchString(constraint.Name)
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"constraint": constraint.Name,
"expected_pattern": rule.Pattern,
},
))
}
}
}
}
return results
}
// validateForeignKeyIndex checks that foreign key columns have indexes
func validateForeignKeyIndex(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
if !rule.RequireIndex {
return results
}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
// Get all foreign key columns
fkColumns := make(map[string]bool)
for _, constraint := range table.Constraints {
if constraint.Type == models.ForeignKeyConstraint {
for _, col := range constraint.Columns {
fkColumns[col] = true
}
}
}
// Check if each FK column has an index
for fkCol := range fkColumns {
hasIndex := false
// Check table indexes
for _, index := range table.Indexes {
// Index is good if FK column is the first column
if len(index.Columns) > 0 && index.Columns[0] == fkCol {
hasIndex = true
break
}
}
location := formatLocation(schema.Name, table.Name, fkCol)
results = append(results, createResult(
ruleName,
hasIndex,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": fkCol,
"has_index": hasIndex,
},
))
}
}
}
return results
}
// validateTableNamingCase checks table name casing
func validateTableNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
pattern, err := regexp.Compile(rule.Pattern)
if err != nil {
return results
}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
location := formatLocation(schema.Name, table.Name, "")
passed := pattern.MatchString(table.Name)
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"expected_case": rule.Case,
"expected_pattern": rule.Pattern,
},
))
}
}
return results
}
// validateColumnNamingCase checks column name casing
func validateColumnNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
pattern, err := regexp.Compile(rule.Pattern)
if err != nil {
return results
}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
for _, col := range table.Columns {
location := formatLocation(schema.Name, table.Name, col.Name)
passed := pattern.MatchString(col.Name)
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": col.Name,
"expected_case": rule.Case,
"expected_pattern": rule.Pattern,
},
))
}
}
}
return results
}
// validateTableNameLength checks table name length
func validateTableNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
location := formatLocation(schema.Name, table.Name, "")
passed := len(table.Name) <= rule.MaxLength
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"length": len(table.Name),
"max_length": rule.MaxLength,
},
))
}
}
return results
}
// validateColumnNameLength checks column name length
func validateColumnNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
for _, col := range table.Columns {
location := formatLocation(schema.Name, table.Name, col.Name)
passed := len(col.Name) <= rule.MaxLength
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": col.Name,
"length": len(col.Name),
"max_length": rule.MaxLength,
},
))
}
}
}
return results
}
// validateReservedKeywords checks for reserved SQL keywords
func validateReservedKeywords(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
// Build keyword map from PostgreSQL keywords
keywordSlice := pgsql.GetPostgresKeywords()
keywords := make(map[string]bool)
for _, kw := range keywordSlice {
keywords[strings.ToUpper(kw)] = true
}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
// Check table name
if rule.CheckTables {
location := formatLocation(schema.Name, table.Name, "")
passed := !keywords[strings.ToUpper(table.Name)]
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"object_type": "table",
},
))
}
// Check column names
if rule.CheckColumns {
for _, col := range table.Columns {
location := formatLocation(schema.Name, table.Name, col.Name)
passed := !keywords[strings.ToUpper(col.Name)]
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"column": col.Name,
"object_type": "column",
},
))
}
}
}
}
return results
}
// validateMissingPrimaryKey checks for tables without primary keys
func validateMissingPrimaryKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
hasPrimaryKey := false
// Check columns for primary key
for _, col := range table.Columns {
if col.IsPrimaryKey {
hasPrimaryKey = true
break
}
}
// Also check constraints
if !hasPrimaryKey {
for _, constraint := range table.Constraints {
if constraint.Type == models.PrimaryKeyConstraint {
hasPrimaryKey = true
break
}
}
}
location := formatLocation(schema.Name, table.Name, "")
results = append(results, createResult(
ruleName,
hasPrimaryKey,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
},
))
}
}
return results
}
// validateOrphanedForeignKey checks for foreign keys referencing non-existent tables
func validateOrphanedForeignKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
// Build a map of existing tables for quick lookup
tableExists := make(map[string]bool)
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
key := schema.Name + "." + table.Name
tableExists[key] = true
}
}
// Check all foreign key constraints
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
for _, constraint := range table.Constraints {
if constraint.Type == models.ForeignKeyConstraint {
// Build referenced table key
refSchema := constraint.ReferencedSchema
if refSchema == "" {
refSchema = schema.Name
}
refKey := refSchema + "." + constraint.ReferencedTable
location := formatLocation(schema.Name, table.Name, "")
passed := tableExists[refKey]
results = append(results, createResult(
ruleName,
passed,
rule.Message,
location,
map[string]interface{}{
"schema": schema.Name,
"table": table.Name,
"constraint": constraint.Name,
"referenced_schema": refSchema,
"referenced_table": constraint.ReferencedTable,
},
))
}
}
}
}
return results
}
// validateCircularDependency checks for circular foreign key dependencies
func validateCircularDependency(db *models.Database, rule Rule, ruleName string) []ValidationResult {
results := []ValidationResult{}
// Build dependency graph
dependencies := make(map[string][]string)
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
tableKey := schema.Name + "." + table.Name
for _, constraint := range table.Constraints {
if constraint.Type == models.ForeignKeyConstraint {
refSchema := constraint.ReferencedSchema
if refSchema == "" {
refSchema = schema.Name
}
refKey := refSchema + "." + constraint.ReferencedTable
dependencies[tableKey] = append(dependencies[tableKey], refKey)
}
}
}
}
// Check for cycles using DFS
for tableKey := range dependencies {
visited := make(map[string]bool)
recStack := make(map[string]bool)
if hasCycle(tableKey, dependencies, visited, recStack) {
parts := strings.Split(tableKey, ".")
location := formatLocation(parts[0], parts[1], "")
results = append(results, createResult(
ruleName,
false,
rule.Message,
location,
map[string]interface{}{
"schema": parts[0],
"table": parts[1],
},
))
}
}
return results
}
// Helper functions
// hasCycle performs DFS to detect cycles in dependency graph
func hasCycle(node string, graph map[string][]string, visited, recStack map[string]bool) bool {
visited[node] = true
recStack[node] = true
for _, neighbor := range graph[node] {
if !visited[neighbor] {
if hasCycle(neighbor, graph, visited, recStack) {
return true
}
} else if recStack[neighbor] {
return true
}
}
recStack[node] = false
return false
}
// normalizeDataType removes size/precision from data type
func normalizeDataType(dataType string) string {
// Remove everything in parentheses
idx := strings.Index(dataType, "(")
if idx > 0 {
dataType = dataType[:idx]
}
return strings.ToLower(strings.TrimSpace(dataType))
}
// contains checks if a string slice contains a value
func contains(slice []string, value string) bool {
for _, item := range slice {
if strings.EqualFold(item, value) {
return true
}
}
return false
}