Files
relspecgo/cmd/relspec/inspect.go
Hein 97a57f5dc8
Some checks failed
CI / Test (1.24) (push) Successful in -25m44s
CI / Test (1.25) (push) Successful in -25m40s
CI / Build (push) Successful in -25m53s
CI / Lint (push) Successful in -25m45s
Integration Tests / Integration Tests (push) Failing after -26m2s
feature: Inspector Gadget
2025-12-31 01:40:08 +02:00

322 lines
10 KiB
Go

package main
import (
"fmt"
"os"
"strings"
"github.com/spf13/cobra"
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
)
var (
inspectSourceType string
inspectSourcePath string
inspectSourceConn string
inspectRulesPath string
inspectOutputFormat string
inspectOutputPath string
inspectSchemaFilter string
)
var inspectCmd = &cobra.Command{
Use: "inspect",
Short: "Inspect and validate database schemas against rules",
Long: `Inspect database schemas from various formats and validate against configurable rules.
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
JSON, YAML, etc.) and generates validation reports.
Input formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- graphql: GraphQL schema files (.graphql, SDL)
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go, file or directory)
- bun: Bun model files (Go, file or directory)
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
- prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL database (live connection)
Output formats:
- markdown: Human-readable markdown report (default, with ANSI colors for terminal)
- json: JSON report for tooling integration
PostgreSQL Connection String Examples:
postgres://username:password@localhost:5432/database_name
postgres://username:password@localhost/database_name
postgresql://user:pass@host:5432/dbname?sslmode=disable
postgresql://user:pass@host/dbname?sslmode=require
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
Examples:
# Inspect a PostgreSQL database with default rules
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
# Inspect a DBML file with custom rules
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
# Inspect and output JSON report to file
relspec inspect --from json --from-path db.json \
--output-format json --output report.json
# Inspect specific schema only
relspec inspect --from pgsql --from-conn "..." --schema public`,
RunE: runInspect,
}
func init() {
inspectCmd.Flags().StringVar(&inspectSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
inspectCmd.Flags().StringVar(&inspectSourcePath, "from-path", "", "Source file path (for file-based formats)")
inspectCmd.Flags().StringVar(&inspectSourceConn, "from-conn", "", "Source connection string (for database formats)")
inspectCmd.Flags().StringVar(&inspectRulesPath, "rules", ".relspec-rules.yaml", "Path to rules configuration file (uses defaults if not found)")
inspectCmd.Flags().StringVar(&inspectOutputFormat, "output-format", "markdown", "Output format (markdown, json)")
inspectCmd.Flags().StringVar(&inspectOutputPath, "output", "", "Output file path (default: stdout)")
inspectCmd.Flags().StringVar(&inspectSchemaFilter, "schema", "", "Filter to a specific schema by name")
err := inspectCmd.MarkFlagRequired("from")
if err != nil {
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
}
}
func runInspect(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Inspector ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
// Read source database
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", inspectSourceType)
if inspectSourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", inspectSourcePath)
}
if inspectSourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(inspectSourceConn))
}
db, err := readDatabaseForInspect(inspectSourceType, inspectSourcePath, inspectSourceConn)
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
// Apply schema filter if specified
if inspectSchemaFilter != "" {
db = filterDatabaseBySchema(db, inspectSchemaFilter)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
totalTables := 0
for _, schema := range db.Schemas {
totalTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
// Load rules configuration
fmt.Fprintf(os.Stderr, "[2/3] Loading validation rules...\n")
fmt.Fprintf(os.Stderr, " Rules: %s\n", inspectRulesPath)
config, err := inspector.LoadConfig(inspectRulesPath)
if err != nil {
return fmt.Errorf("failed to load rules config: %w", err)
}
enabledCount := 0
for _, rule := range config.Rules {
if rule.IsEnabled() {
enabledCount++
}
}
fmt.Fprintf(os.Stderr, " ✓ Loaded %d rule(s) (%d enabled)\n\n", len(config.Rules), enabledCount)
// Run inspection
fmt.Fprintf(os.Stderr, "[3/3] Running validation...\n")
insp := inspector.NewInspector(db, config)
report, err := insp.Inspect()
if err != nil {
return fmt.Errorf("inspection failed: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Inspection complete\n")
fmt.Fprintf(os.Stderr, " Errors: %d\n", report.Summary.ErrorCount)
fmt.Fprintf(os.Stderr, " Warnings: %d\n\n", report.Summary.WarningCount)
// Format and output report
var formattedReport string
switch strings.ToLower(inspectOutputFormat) {
case "json":
formatter := inspector.NewJSONFormatter()
formattedReport, err = formatter.Format(report)
case "markdown", "md":
// Determine output writer for terminal detection
var output *os.File
if inspectOutputPath != "" {
output, err = os.Create(inspectOutputPath)
if err != nil {
return fmt.Errorf("failed to create output file: %w", err)
}
defer output.Close()
} else {
output = os.Stdout
}
formatter := inspector.NewMarkdownFormatter(output)
formattedReport, err = formatter.Format(report)
default:
return fmt.Errorf("unsupported output format: %s", inspectOutputFormat)
}
if err != nil {
return fmt.Errorf("failed to format report: %w", err)
}
// Write output
if inspectOutputPath != "" {
err = os.WriteFile(inspectOutputPath, []byte(formattedReport), 0644)
if err != nil {
return fmt.Errorf("failed to write output file: %w", err)
}
fmt.Fprintf(os.Stderr, "Report written to: %s\n", inspectOutputPath)
} else {
fmt.Println(formattedReport)
}
fmt.Fprintf(os.Stderr, "\n=== Inspection Complete ===\n")
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
// Exit with appropriate code
if report.HasErrors() {
return fmt.Errorf("inspection found %d error(s)", report.Summary.ErrorCount)
}
return nil
}
func readDatabaseForInspect(dbType, filePath, connString string) (*models.Database, error) {
var reader readers.Reader
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DBML format")
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DCTX format")
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB format")
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "graphql":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GraphQL format")
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
if filePath == "" {
return nil, fmt.Errorf("file path is required for JSON format")
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml", "yml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for YAML format")
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for GORM format")
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "bun":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Bun format")
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Drizzle format")
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("file path is required for Prisma format")
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("file path is required for TypeORM format")
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql", "postgres", "postgresql":
if connString == "" {
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
default:
return nil, fmt.Errorf("unsupported database type: %s", dbType)
}
db, err := reader.ReadDatabase()
if err != nil {
return nil, err
}
return db, nil
}
func filterDatabaseBySchema(db *models.Database, schemaName string) *models.Database {
filtered := &models.Database{
Name: db.Name,
Description: db.Description,
DatabaseType: db.DatabaseType,
DatabaseVersion: db.DatabaseVersion,
SourceFormat: db.SourceFormat,
Schemas: []*models.Schema{},
}
for _, schema := range db.Schemas {
if schema.Name == schemaName {
filtered.Schemas = append(filtered.Schemas, schema)
break
}
}
return filtered
}