Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 64aeac972a | |||
| 97a57f5dc8 | |||
| adfe126758 | |||
| 1d193c84d7 | |||
| 1d627c74b1 | |||
| 7c6a355458 | |||
| c0ef26b660 | |||
| cb38f95b79 | |||
| 196d87bc29 | |||
| beb1100d86 | |||
| 410b1ee743 | |||
| b5d39aeee4 | |||
| 5fb9a8f231 | |||
| 27da24f575 | |||
| 0fb3469dbd |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: CI
|
name: CI
|
||||||
|
run-name: "Test on master branch"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
|
|||||||
67
.github/workflows/integration-tests.yml
vendored
67
.github/workflows/integration-tests.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: Integration Tests
|
name: Integration Tests
|
||||||
|
run-name: "Integration Tests"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
@@ -11,6 +11,21 @@ jobs:
|
|||||||
name: Integration Tests
|
name: Integration Tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: relspec
|
||||||
|
POSTGRES_PASSWORD: relspec_test_password
|
||||||
|
POSTGRES_DB: relspec_test
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -31,41 +46,19 @@ jobs:
|
|||||||
- name: Download dependencies
|
- name: Download dependencies
|
||||||
run: go mod download
|
run: go mod download
|
||||||
|
|
||||||
- name: Start PostgreSQL container
|
|
||||||
run: |
|
|
||||||
docker run -d \
|
|
||||||
--name relspec-test-postgres \
|
|
||||||
--network host \
|
|
||||||
-e POSTGRES_USER=relspec \
|
|
||||||
-e POSTGRES_PASSWORD=relspec_test_password \
|
|
||||||
-e POSTGRES_DB=relspec_test \
|
|
||||||
postgres:16-alpine
|
|
||||||
|
|
||||||
- name: Wait for PostgreSQL to be ready
|
|
||||||
run: |
|
|
||||||
echo "Waiting for PostgreSQL to start..."
|
|
||||||
for i in {1..30}; do
|
|
||||||
if docker exec relspec-test-postgres pg_isready -U relspec -d relspec_test > /dev/null 2>&1; then
|
|
||||||
echo "PostgreSQL is ready!"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
echo "Waiting... ($i/30)"
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
sleep 2
|
|
||||||
|
|
||||||
- name: Copy init script into container
|
|
||||||
run: |
|
|
||||||
docker cp tests/postgres/init.sql relspec-test-postgres:/tmp/init.sql
|
|
||||||
|
|
||||||
- name: Initialize test database
|
- name: Initialize test database
|
||||||
|
env:
|
||||||
|
PGPASSWORD: relspec_test_password
|
||||||
run: |
|
run: |
|
||||||
docker exec relspec-test-postgres psql -U relspec -d relspec_test -f /tmp/init.sql
|
# Services are accessible via hostname matching the service name
|
||||||
|
psql -h postgres -U relspec -d relspec_test -f tests/postgres/init.sql
|
||||||
|
|
||||||
- name: Verify database setup
|
- name: Verify database setup
|
||||||
|
env:
|
||||||
|
PGPASSWORD: relspec_test_password
|
||||||
run: |
|
run: |
|
||||||
echo "Verifying database initialization..."
|
echo "Verifying database initialization..."
|
||||||
docker exec relspec-test-postgres psql -U relspec -d relspec_test -c "
|
psql -h postgres -U relspec -d relspec_test -c "
|
||||||
SELECT
|
SELECT
|
||||||
(SELECT COUNT(*) FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND nspname NOT LIKE 'pg_%') as schemas,
|
(SELECT COUNT(*) FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND nspname NOT LIKE 'pg_%') as schemas,
|
||||||
(SELECT COUNT(*) FROM pg_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as tables,
|
(SELECT COUNT(*) FROM pg_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as tables,
|
||||||
@@ -75,17 +68,5 @@ jobs:
|
|||||||
|
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
env:
|
env:
|
||||||
RELSPEC_TEST_PG_CONN: postgres://relspec:relspec_test_password@localhost:5432/relspec_test
|
RELSPEC_TEST_PG_CONN: postgres://relspec:relspec_test_password@postgres:5432/relspec_test
|
||||||
run: make test-integration
|
run: make test-integration
|
||||||
|
|
||||||
- name: Stop PostgreSQL container
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker stop relspec-test-postgres || true
|
|
||||||
docker rm relspec-test-postgres || true
|
|
||||||
|
|
||||||
- name: Summary
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
echo "Integration tests completed."
|
|
||||||
echo "PostgreSQL container has been cleaned up."
|
|
||||||
|
|||||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: Release
|
name: Release
|
||||||
|
run-name: "Making Release"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
|
|||||||
69
Makefile
69
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration release release-version
|
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration start stop release release-version
|
||||||
|
|
||||||
# Binary name
|
# Binary name
|
||||||
BINARY_NAME=relspec
|
BINARY_NAME=relspec
|
||||||
@@ -14,6 +14,26 @@ GOGET=$(GOCMD) get
|
|||||||
GOMOD=$(GOCMD) mod
|
GOMOD=$(GOCMD) mod
|
||||||
GOCLEAN=$(GOCMD) clean
|
GOCLEAN=$(GOCMD) clean
|
||||||
|
|
||||||
|
# Auto-detect container runtime (Docker or Podman)
|
||||||
|
CONTAINER_RUNTIME := $(shell \
|
||||||
|
if command -v podman > /dev/null 2>&1; then \
|
||||||
|
echo "podman"; \
|
||||||
|
elif command -v docker > /dev/null 2>&1; then \
|
||||||
|
echo "docker"; \
|
||||||
|
else \
|
||||||
|
echo "none"; \
|
||||||
|
fi)
|
||||||
|
|
||||||
|
# Detect compose command
|
||||||
|
COMPOSE_CMD := $(shell \
|
||||||
|
if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
echo "podman-compose"; \
|
||||||
|
elif command -v docker-compose > /dev/null 2>&1; then \
|
||||||
|
echo "docker-compose"; \
|
||||||
|
else \
|
||||||
|
echo "docker compose"; \
|
||||||
|
fi)
|
||||||
|
|
||||||
all: lint test build ## Run linting, tests, and build
|
all: lint test build ## Run linting, tests, and build
|
||||||
|
|
||||||
build: ## Build the binary
|
build: ## Build the binary
|
||||||
@@ -32,7 +52,7 @@ test-integration: ## Run integration tests (requires RELSPEC_TEST_PG_CONN enviro
|
|||||||
@echo "Running integration tests..."
|
@echo "Running integration tests..."
|
||||||
@if [ -z "$$RELSPEC_TEST_PG_CONN" ]; then \
|
@if [ -z "$$RELSPEC_TEST_PG_CONN" ]; then \
|
||||||
echo "Error: RELSPEC_TEST_PG_CONN environment variable is not set"; \
|
echo "Error: RELSPEC_TEST_PG_CONN environment variable is not set"; \
|
||||||
echo "Example: export RELSPEC_TEST_PG_CONN='postgres://relspec:relspec_test_password@localhost:5432/relspec_test'"; \
|
echo "Example: export RELSPEC_TEST_PG_CONN='postgres://relspec:relspec_test_password@localhost:5439/relspec_test'"; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
@echo "Running PostgreSQL reader tests..."
|
@echo "Running PostgreSQL reader tests..."
|
||||||
@@ -81,34 +101,53 @@ deps: ## Download dependencies
|
|||||||
$(GOMOD) tidy
|
$(GOMOD) tidy
|
||||||
@echo "Dependencies updated"
|
@echo "Dependencies updated"
|
||||||
|
|
||||||
|
start: docker-up ## Alias for docker-up (start PostgreSQL test database)
|
||||||
|
|
||||||
|
stop: docker-down ## Alias for docker-down (stop PostgreSQL test database)
|
||||||
|
|
||||||
docker-up: ## Start PostgreSQL test database
|
docker-up: ## Start PostgreSQL test database
|
||||||
@echo "Starting PostgreSQL test database..."
|
@echo "Starting PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
@if [ "$(CONTAINER_RUNTIME)" = "none" ]; then \
|
||||||
docker-compose up -d postgres; \
|
echo "Error: Neither Docker nor Podman is installed"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
podman run -d --name relspec-test-postgres \
|
||||||
|
-e POSTGRES_USER=relspec \
|
||||||
|
-e POSTGRES_PASSWORD=relspec_test_password \
|
||||||
|
-e POSTGRES_DB=relspec_test \
|
||||||
|
-p 5439:5432 \
|
||||||
|
-v ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql:Z \
|
||||||
|
postgres:16-alpine 2>/dev/null || echo "Container already running"; \
|
||||||
else \
|
else \
|
||||||
docker compose up -d postgres; \
|
$(COMPOSE_CMD) up -d postgres; \
|
||||||
fi
|
fi
|
||||||
@echo "Waiting for PostgreSQL to be ready..."
|
@echo "Waiting for PostgreSQL to be ready..."
|
||||||
@sleep 3
|
@sleep 3
|
||||||
@echo "PostgreSQL is running on port 5433"
|
@echo "PostgreSQL is running on port 5439"
|
||||||
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5433/relspec_test"
|
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5439/relspec_test"
|
||||||
|
|
||||||
docker-down: ## Stop PostgreSQL test database
|
docker-down: ## Stop PostgreSQL test database
|
||||||
@echo "Stopping PostgreSQL test database..."
|
@echo "Stopping PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
docker-compose down; \
|
podman stop relspec-test-postgres 2>/dev/null || true; \
|
||||||
|
podman rm relspec-test-postgres 2>/dev/null || true; \
|
||||||
else \
|
else \
|
||||||
docker compose down; \
|
$(COMPOSE_CMD) down; \
|
||||||
fi
|
fi
|
||||||
@echo "PostgreSQL stopped"
|
@echo "PostgreSQL stopped"
|
||||||
|
|
||||||
docker-test: ## Run PostgreSQL integration tests with Docker
|
docker-test: ## Run PostgreSQL integration tests with Docker/Podman
|
||||||
@./tests/postgres/run_tests.sh
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
./tests/postgres/run_tests_podman.sh; \
|
||||||
|
else \
|
||||||
|
./tests/postgres/run_tests.sh; \
|
||||||
|
fi
|
||||||
|
|
||||||
docker-test-integration: docker-up ## Start DB and run integration tests
|
docker-test-integration: docker-up ## Start DB and run integration tests
|
||||||
@echo "Running integration tests..."
|
@echo "Running integration tests..."
|
||||||
@sleep 2
|
@sleep 2
|
||||||
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5433/relspec_test" \
|
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5439/relspec_test" \
|
||||||
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
||||||
@make docker-down
|
@make docker-down
|
||||||
|
|
||||||
|
|||||||
80
README.md
80
README.md
@@ -1,16 +1,24 @@
|
|||||||
# RelSpec
|
# RelSpec
|
||||||
|
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/releases/latest)
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/ci.yml)
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/integration-tests.yml)
|
||||||
|
[](https://go.dev/dl/)
|
||||||
|
[](LICENSE)
|
||||||
|
|
||||||
> Database Relations Specification Tool for Go
|
> Database Relations Specification Tool for Go
|
||||||
|
|
||||||
RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
|
RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
RelSpec provides bidirectional conversion and comparison between various database specification formats, allowing you to:
|
RelSpec provides bidirectional conversion, comparison, and validation of database specification formats, allowing you to:
|
||||||
- Inspect live databases and extract their structure
|
- Inspect live databases and extract their structure
|
||||||
- Convert between different ORM models (GORM, Bun , etc.)
|
- Validate schemas against configurable rules and naming conventions
|
||||||
|
- Convert between different ORM models (GORM, Bun, etc.)
|
||||||
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
||||||
- Generate standardized specification files (JSON, YAML, etc.)
|
- Generate standardized specification files (JSON, YAML, etc.)
|
||||||
|
- Compare database schemas and track changes
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -60,6 +68,19 @@ RelSpec can write database schemas to multiple formats:
|
|||||||
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
||||||
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
|
### Inspector (Schema Validation)
|
||||||
|
|
||||||
|
RelSpec includes a powerful schema validation and linting tool:
|
||||||
|
|
||||||
|
- [Inspector](pkg/inspector/README.md) - Validate database schemas against configurable rules
|
||||||
|
- Enforce naming conventions (snake_case, camelCase, custom patterns)
|
||||||
|
- Check primary key and foreign key standards
|
||||||
|
- Detect missing indexes on foreign keys
|
||||||
|
- Prevent use of SQL reserved keywords
|
||||||
|
- Ensure schema integrity (missing PKs, orphaned FKs, circular dependencies)
|
||||||
|
- Support for custom validation rules
|
||||||
|
- Multiple output formats (Markdown with colors, JSON)
|
||||||
|
- CI/CD integration ready
|
||||||
|
|
||||||
## Use of AI
|
## Use of AI
|
||||||
[Rules and use of AI](./AI_USE.md)
|
[Rules and use of AI](./AI_USE.md)
|
||||||
@@ -74,30 +95,65 @@ go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
### Schema Conversion
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Inspect database and generate GORM models
|
# Convert PostgreSQL database to GORM models
|
||||||
relspec --input db --conn "postgres://..." --output gorm --out-file models.go
|
relspec convert --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||||
|
--to gorm --to-path models/ --package models
|
||||||
|
|
||||||
# Convert GORM models to Bun
|
# Convert GORM models to Bun
|
||||||
relspec --input gorm --in-file existing.go --output bun --out-file bun_models.go
|
relspec convert --from gorm --from-path models.go \
|
||||||
|
--to bun --to-path bun_models.go --package models
|
||||||
|
|
||||||
# Export database schema to JSON
|
# Export database schema to JSON
|
||||||
relspec --input db --conn "mysql://..." --output json --out-file schema.json
|
relspec convert --from pgsql --from-conn "postgres://..." \
|
||||||
|
--to json --to-path schema.json
|
||||||
|
|
||||||
# Convert Clarion DCTX to YAML
|
# Convert DBML to PostgreSQL SQL
|
||||||
relspec --input dctx --in-file legacy.dctx --output yaml --out-file schema.yaml
|
relspec convert --from dbml --from-path schema.dbml \
|
||||||
|
--to pgsql --to-path schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Validation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Validate a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Validate DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules .relspec-rules.yaml
|
||||||
|
|
||||||
|
# Generate JSON validation report
|
||||||
|
relspec inspect --from json --from-path db.json \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Validate specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Comparison
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Compare two database schemas
|
||||||
|
relspec diff --from pgsql --from-conn "postgres://localhost/db1" \
|
||||||
|
--to pgsql --to-conn "postgres://localhost/db2"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Project Structure
|
## Project Structure
|
||||||
|
|
||||||
```
|
```
|
||||||
relspecgo/
|
relspecgo/
|
||||||
├── cmd/ # CLI application
|
├── cmd/
|
||||||
|
│ └── relspec/ # CLI application (convert, inspect, diff, scripts)
|
||||||
├── pkg/
|
├── pkg/
|
||||||
│ ├── readers/ # Input format readers
|
│ ├── readers/ # Input format readers (DBML, GORM, PostgreSQL, etc.)
|
||||||
│ ├── writers/ # Output format writers
|
│ ├── writers/ # Output format writers (GORM, Bun, SQL, etc.)
|
||||||
|
│ ├── inspector/ # Schema validation and linting
|
||||||
|
│ ├── diff/ # Schema comparison
|
||||||
│ ├── models/ # Internal data models
|
│ ├── models/ # Internal data models
|
||||||
│ └── transform/ # Transformation logic
|
│ ├── transform/ # Transformation logic
|
||||||
|
│ └── pgsql/ # PostgreSQL utilities (keywords, data types)
|
||||||
├── examples/ # Usage examples
|
├── examples/ # Usage examples
|
||||||
└── tests/ # Test files
|
└── tests/ # Test files
|
||||||
```
|
```
|
||||||
|
|||||||
1
TODO.md
1
TODO.md
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
|
|
||||||
## Input Readers / Writers
|
## Input Readers / Writers
|
||||||
|
|
||||||
- [✔️] **Database Inspector**
|
- [✔️] **Database Inspector**
|
||||||
- [✔️] PostgreSQL driver
|
- [✔️] PostgreSQL driver
|
||||||
- [ ] MySQL driver
|
- [ ] MySQL driver
|
||||||
|
|||||||
321
cmd/relspec/inspect.go
Normal file
321
cmd/relspec/inspect.go
Normal file
@@ -0,0 +1,321 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
inspectSourceType string
|
||||||
|
inspectSourcePath string
|
||||||
|
inspectSourceConn string
|
||||||
|
inspectRulesPath string
|
||||||
|
inspectOutputFormat string
|
||||||
|
inspectOutputPath string
|
||||||
|
inspectSchemaFilter string
|
||||||
|
)
|
||||||
|
|
||||||
|
var inspectCmd = &cobra.Command{
|
||||||
|
Use: "inspect",
|
||||||
|
Short: "Inspect and validate database schemas against rules",
|
||||||
|
Long: `Inspect database schemas from various formats and validate against configurable rules.
|
||||||
|
|
||||||
|
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
|
||||||
|
JSON, YAML, etc.) and generates validation reports.
|
||||||
|
|
||||||
|
Input formats:
|
||||||
|
- dbml: DBML schema files
|
||||||
|
- dctx: DCTX schema files
|
||||||
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
|
- json: JSON database schema
|
||||||
|
- yaml: YAML database schema
|
||||||
|
- gorm: GORM model files (Go, file or directory)
|
||||||
|
- bun: Bun model files (Go, file or directory)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- markdown: Human-readable markdown report (default, with ANSI colors for terminal)
|
||||||
|
- json: JSON report for tooling integration
|
||||||
|
|
||||||
|
PostgreSQL Connection String Examples:
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Inspect and output JSON report to file
|
||||||
|
relspec inspect --from json --from-path db.json \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public`,
|
||||||
|
RunE: runInspect,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectRulesPath, "rules", ".relspec-rules.yaml", "Path to rules configuration file (uses defaults if not found)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectOutputFormat, "output-format", "markdown", "Output format (markdown, json)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectOutputPath, "output", "", "Output file path (default: stdout)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSchemaFilter, "schema", "", "Filter to a specific schema by name")
|
||||||
|
|
||||||
|
err := inspectCmd.MarkFlagRequired("from")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runInspect(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Inspector ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Read source database
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", inspectSourceType)
|
||||||
|
if inspectSourcePath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", inspectSourcePath)
|
||||||
|
}
|
||||||
|
if inspectSourceConn != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(inspectSourceConn))
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := readDatabaseForInspect(inspectSourceType, inspectSourcePath, inspectSourceConn)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read source: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply schema filter if specified
|
||||||
|
if inspectSchemaFilter != "" {
|
||||||
|
db = filterDatabaseBySchema(db, inspectSchemaFilter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||||
|
|
||||||
|
totalTables := 0
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
totalTables += len(schema.Tables)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||||
|
|
||||||
|
// Load rules configuration
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/3] Loading validation rules...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Rules: %s\n", inspectRulesPath)
|
||||||
|
|
||||||
|
config, err := inspector.LoadConfig(inspectRulesPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load rules config: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
enabledCount := 0
|
||||||
|
for _, rule := range config.Rules {
|
||||||
|
if rule.IsEnabled() {
|
||||||
|
enabledCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Loaded %d rule(s) (%d enabled)\n\n", len(config.Rules), enabledCount)
|
||||||
|
|
||||||
|
// Run inspection
|
||||||
|
fmt.Fprintf(os.Stderr, "[3/3] Running validation...\n")
|
||||||
|
|
||||||
|
insp := inspector.NewInspector(db, config)
|
||||||
|
report, err := insp.Inspect()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("inspection failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Inspection complete\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Errors: %d\n", report.Summary.ErrorCount)
|
||||||
|
fmt.Fprintf(os.Stderr, " Warnings: %d\n\n", report.Summary.WarningCount)
|
||||||
|
|
||||||
|
// Format and output report
|
||||||
|
var formattedReport string
|
||||||
|
switch strings.ToLower(inspectOutputFormat) {
|
||||||
|
case "json":
|
||||||
|
formatter := inspector.NewJSONFormatter()
|
||||||
|
formattedReport, err = formatter.Format(report)
|
||||||
|
case "markdown", "md":
|
||||||
|
// Determine output writer for terminal detection
|
||||||
|
var output *os.File
|
||||||
|
if inspectOutputPath != "" {
|
||||||
|
output, err = os.Create(inspectOutputPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create output file: %w", err)
|
||||||
|
}
|
||||||
|
defer output.Close()
|
||||||
|
} else {
|
||||||
|
output = os.Stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
formatter := inspector.NewMarkdownFormatter(output)
|
||||||
|
formattedReport, err = formatter.Format(report)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported output format: %s", inspectOutputFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to format report: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write output
|
||||||
|
if inspectOutputPath != "" {
|
||||||
|
err = os.WriteFile(inspectOutputPath, []byte(formattedReport), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write output file: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "Report written to: %s\n", inspectOutputPath)
|
||||||
|
} else {
|
||||||
|
fmt.Println(formattedReport)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Inspection Complete ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Exit with appropriate code
|
||||||
|
if report.HasErrors() {
|
||||||
|
return fmt.Errorf("inspection found %d error(s)", report.Summary.ErrorCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func readDatabaseForInspect(dbType, filePath, connString string) (*models.Database, error) {
|
||||||
|
var reader readers.Reader
|
||||||
|
|
||||||
|
switch strings.ToLower(dbType) {
|
||||||
|
case "dbml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DBML format")
|
||||||
|
}
|
||||||
|
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "dctx":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DCTX format")
|
||||||
|
}
|
||||||
|
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drawdb":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DrawDB format")
|
||||||
|
}
|
||||||
|
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "graphql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "json":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for JSON format")
|
||||||
|
}
|
||||||
|
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "yaml", "yml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for YAML format")
|
||||||
|
}
|
||||||
|
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "gorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GORM format")
|
||||||
|
}
|
||||||
|
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "bun":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Bun format")
|
||||||
|
}
|
||||||
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Drizzle format")
|
||||||
|
}
|
||||||
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Prisma format")
|
||||||
|
}
|
||||||
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for TypeORM format")
|
||||||
|
}
|
||||||
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "pgsql", "postgres", "postgresql":
|
||||||
|
if connString == "" {
|
||||||
|
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
|
||||||
|
}
|
||||||
|
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func filterDatabaseBySchema(db *models.Database, schemaName string) *models.Database {
|
||||||
|
filtered := &models.Database{
|
||||||
|
Name: db.Name,
|
||||||
|
Description: db.Description,
|
||||||
|
DatabaseType: db.DatabaseType,
|
||||||
|
DatabaseVersion: db.DatabaseVersion,
|
||||||
|
SourceFormat: db.SourceFormat,
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name == schemaName {
|
||||||
|
filtered.Schemas = append(filtered.Schemas, schema)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
@@ -18,4 +18,6 @@ JSON, YAML, SQL, etc.).`,
|
|||||||
func init() {
|
func init() {
|
||||||
rootCmd.AddCommand(convertCmd)
|
rootCmd.AddCommand(convertCmd)
|
||||||
rootCmd.AddCommand(diffCmd)
|
rootCmd.AddCommand(diffCmd)
|
||||||
|
rootCmd.AddCommand(inspectCmd)
|
||||||
|
rootCmd.AddCommand(scriptsCmd)
|
||||||
}
|
}
|
||||||
|
|||||||
263
cmd/relspec/scripts.go
Normal file
263
cmd/relspec/scripts.go
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
scriptsDir string
|
||||||
|
scriptsConn string
|
||||||
|
scriptsSchemaName string
|
||||||
|
scriptsDBName string
|
||||||
|
)
|
||||||
|
|
||||||
|
var scriptsCmd = &cobra.Command{
|
||||||
|
Use: "scripts",
|
||||||
|
Short: "Manage and execute SQL migration scripts",
|
||||||
|
Long: `Manage and execute SQL migration scripts from a directory.
|
||||||
|
|
||||||
|
Scripts must follow the naming pattern (both separators supported):
|
||||||
|
{priority}_{sequence}_{name}.sql or .pgsql
|
||||||
|
{priority}-{sequence}-{name}.sql or .pgsql
|
||||||
|
|
||||||
|
Example filenames (underscore format):
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1
|
||||||
|
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||||
|
2_001_add_indexes.pgsql # Priority 2, Sequence 1
|
||||||
|
|
||||||
|
Example filenames (hyphen format):
|
||||||
|
1-001-create-users.sql # Priority 1, Sequence 1
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||||
|
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||||
|
|
||||||
|
Both formats can be mixed in the same directory.
|
||||||
|
Scripts are executed in order: Priority (ascending), then Sequence (ascending).`,
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptsListCmd = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "List SQL scripts from a directory",
|
||||||
|
Long: `List SQL scripts from a directory and show their execution order.
|
||||||
|
|
||||||
|
The scripts are read from the specified directory and displayed in the order
|
||||||
|
they would be executed (Priority ascending, then Sequence ascending).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
relspec scripts list --dir ./migrations`,
|
||||||
|
RunE: runScriptsList,
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptsExecuteCmd = &cobra.Command{
|
||||||
|
Use: "execute",
|
||||||
|
Short: "Execute SQL scripts against a database",
|
||||||
|
Long: `Execute SQL scripts from a directory against a PostgreSQL database.
|
||||||
|
|
||||||
|
Scripts are executed in order: Priority (ascending), then Sequence (ascending).
|
||||||
|
Execution stops immediately on the first error.
|
||||||
|
|
||||||
|
The directory is scanned recursively for files matching the patterns:
|
||||||
|
{priority}_{sequence}_{name}.sql or .pgsql (underscore format)
|
||||||
|
{priority}-{sequence}-{name}.sql or .pgsql (hyphen format)
|
||||||
|
|
||||||
|
PostgreSQL Connection String Examples:
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Execute migration scripts
|
||||||
|
relspec scripts execute --dir ./migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||||
|
|
||||||
|
# Execute with custom schema name
|
||||||
|
relspec scripts execute --dir ./migrations \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--schema public
|
||||||
|
|
||||||
|
# Execute with SSL disabled
|
||||||
|
relspec scripts execute --dir ./sql \
|
||||||
|
--conn "postgres://user:pass@localhost/db?sslmode=disable"`,
|
||||||
|
RunE: runScriptsExecute,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// List command flags
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||||
|
err := scriptsListCmd.MarkFlagRequired("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute command flags
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||||
|
|
||||||
|
err = scriptsExecuteCmd.MarkFlagRequired("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = scriptsExecuteCmd.MarkFlagRequired("conn")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking conn flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add subcommands to scripts command
|
||||||
|
scriptsCmd.AddCommand(scriptsListCmd)
|
||||||
|
scriptsCmd.AddCommand(scriptsExecuteCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runScriptsList(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts List ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Directory: %s\n\n", scriptsDir)
|
||||||
|
|
||||||
|
// Read scripts from directory
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: scriptsDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": scriptsSchemaName,
|
||||||
|
"database_name": scriptsDBName,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "No schemas found\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "No SQL scripts found matching pattern {priority}_{sequence}_{name}.sql\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort scripts by Priority then Sequence
|
||||||
|
sortedScripts := make([]*struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sqlLines int
|
||||||
|
}, len(schema.Scripts))
|
||||||
|
|
||||||
|
for i, script := range schema.Scripts {
|
||||||
|
// Count non-empty lines in SQL
|
||||||
|
sqlLines := 0
|
||||||
|
for _, line := range []byte(script.SQL) {
|
||||||
|
if line == '\n' {
|
||||||
|
sqlLines++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(script.SQL) > 0 {
|
||||||
|
sqlLines++ // Count last line if no trailing newline
|
||||||
|
}
|
||||||
|
|
||||||
|
sortedScripts[i] = &struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sqlLines int
|
||||||
|
}{
|
||||||
|
name: script.Name,
|
||||||
|
priority: script.Priority,
|
||||||
|
sequence: script.Sequence,
|
||||||
|
sqlLines: sqlLines,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||||
|
if sortedScripts[i].priority != sortedScripts[j].priority {
|
||||||
|
return sortedScripts[i].priority < sortedScripts[j].priority
|
||||||
|
}
|
||||||
|
return sortedScripts[i].sequence < sortedScripts[j].sequence
|
||||||
|
})
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "Found %d script(s) in execution order:\n\n", len(sortedScripts))
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "No.", "Priority", "Sequence", "Name", "Lines")
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "----", "--------", "--------", "------------------------------", "-----")
|
||||||
|
|
||||||
|
for i, script := range sortedScripts {
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4d %-10d %-8d %-30s %d\n",
|
||||||
|
i+1,
|
||||||
|
script.priority,
|
||||||
|
script.sequence,
|
||||||
|
script.name,
|
||||||
|
script.sqlLines,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runScriptsExecute(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts Execution ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n", getCurrentTimestamp())
|
||||||
|
fmt.Fprintf(os.Stderr, "Directory: %s\n", scriptsDir)
|
||||||
|
fmt.Fprintf(os.Stderr, "Database: %s\n\n", maskPassword(scriptsConn))
|
||||||
|
|
||||||
|
// Step 1: Read scripts from directory
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/2] Reading SQL scripts...\n")
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: scriptsDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": scriptsSchemaName,
|
||||||
|
"database_name": scriptsDBName,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return fmt.Errorf("no schemas found")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, " No scripts found. Nothing to execute.\n\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Found %d script(s)\n\n", len(schema.Scripts))
|
||||||
|
|
||||||
|
// Step 2: Execute scripts
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/2] Executing scripts in order (Priority → Sequence)...\n\n")
|
||||||
|
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": scriptsConn,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteSchema(schema); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
return fmt.Errorf("execution failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||||
|
fmt.Fprintf(os.Stderr, "Successfully executed %d script(s)\n\n", len(schema.Scripts))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -9,7 +9,7 @@ services:
|
|||||||
POSTGRES_PASSWORD: relspec_test_password
|
POSTGRES_PASSWORD: relspec_test_password
|
||||||
POSTGRES_DB: relspec_test
|
POSTGRES_DB: relspec_test
|
||||||
ports:
|
ports:
|
||||||
- "5433:5432" # Using 5433 to avoid conflicts with local PostgreSQL
|
- "5439:5432" # Using 5439 to avoid conflicts with local PostgreSQL
|
||||||
volumes:
|
volumes:
|
||||||
- ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql
|
- ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|||||||
360
docs/SCRIPTS_COMMAND.md
Normal file
360
docs/SCRIPTS_COMMAND.md
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
# RelSpec Scripts Command
|
||||||
|
|
||||||
|
The `relspec scripts` command provides tools for managing and executing SQL migration scripts from a directory structure.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The scripts command supports two main operations:
|
||||||
|
- **list**: List SQL scripts from a directory in execution order
|
||||||
|
- **execute**: Execute SQL scripts against a PostgreSQL database
|
||||||
|
|
||||||
|
Scripts are read from a directory (recursively) and executed in a deterministic order based on **Priority** (ascending) and **Sequence** (ascending).
|
||||||
|
|
||||||
|
## File Naming Convention
|
||||||
|
|
||||||
|
SQL scripts must follow this naming pattern (both separators are supported):
|
||||||
|
|
||||||
|
```
|
||||||
|
{priority}_{sequence}_{name}.{sql|pgsql} (underscore format)
|
||||||
|
{priority}-{sequence}-{name}.{sql|pgsql} (hyphen format)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- **priority**: Integer (0-9999) - Execution priority level (lower executes first)
|
||||||
|
- **sequence**: Integer (0-9999) - Order within priority level (lower executes first)
|
||||||
|
- **separator**: Underscore `_` or hyphen `-` (both formats can be mixed)
|
||||||
|
- **name**: Descriptive name (alphanumeric, underscores, hyphens)
|
||||||
|
- **extension**: `.sql` or `.pgsql`
|
||||||
|
|
||||||
|
### Valid Examples
|
||||||
|
|
||||||
|
**Underscore format:**
|
||||||
|
```
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1
|
||||||
|
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||||
|
1_003_create_comments.pgsql # Priority 1, Sequence 3
|
||||||
|
2_001_add_indexes.sql # Priority 2, Sequence 1
|
||||||
|
2_002_add_constraints.sql # Priority 2, Sequence 2
|
||||||
|
3_001_seed_users.sql # Priority 3, Sequence 1
|
||||||
|
```
|
||||||
|
|
||||||
|
**Hyphen format:**
|
||||||
|
```
|
||||||
|
1-001-create-users.sql # Priority 1, Sequence 1
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||||
|
1-003-create-comments.pgsql # Priority 1, Sequence 3
|
||||||
|
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||||
|
```
|
||||||
|
|
||||||
|
**Mixed format (both in same directory):**
|
||||||
|
```
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1 (underscore)
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2 (hyphen)
|
||||||
|
2_001_add_indexes.sql # Priority 2, Sequence 1 (underscore)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Execution Order**: 1→2→3→4→5→6 (sorted by Priority, then Sequence)
|
||||||
|
|
||||||
|
### Invalid Examples (Will be ignored)
|
||||||
|
|
||||||
|
```
|
||||||
|
migration.sql # Missing priority/sequence
|
||||||
|
create_users.sql # Missing priority/sequence
|
||||||
|
1_create_users.sql # Missing sequence
|
||||||
|
1_001_test.txt # Wrong extension
|
||||||
|
README.md # Not a SQL file
|
||||||
|
```
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
Scripts can be organized in subdirectories. The scanner recursively finds all matching SQL files:
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_schema.sql
|
||||||
|
├── 1_002_create_users.sql
|
||||||
|
├── tables/
|
||||||
|
│ ├── 1_003_create_posts.sql
|
||||||
|
│ └── 1_004_create_comments.pgsql
|
||||||
|
├── indexes/
|
||||||
|
│ └── 2_001_add_indexes.sql
|
||||||
|
└── data/
|
||||||
|
└── 3_001_seed_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
All files will be found and executed in Priority→Sequence order regardless of directory structure.
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
### relspec scripts list
|
||||||
|
|
||||||
|
List all SQL scripts in a directory and show their execution order.
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir <directory> [flags]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flags:**
|
||||||
|
- `--dir <path>` (required): Directory containing SQL scripts
|
||||||
|
- `--schema <name>`: Schema name (default: "public")
|
||||||
|
- `--database <name>`: Database name (default: "database")
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir ./migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
```
|
||||||
|
=== SQL Scripts List ===
|
||||||
|
Directory: ./migrations
|
||||||
|
|
||||||
|
Found 5 script(s) in execution order:
|
||||||
|
|
||||||
|
No. Priority Sequence Name Lines
|
||||||
|
---- -------- -------- ------------------------------ -----
|
||||||
|
1 1 1 create_users 7
|
||||||
|
2 1 2 create_posts 8
|
||||||
|
3 2 1 add_indexes 4
|
||||||
|
4 2 2 add_constraints 6
|
||||||
|
5 3 1 seed_data 4
|
||||||
|
```
|
||||||
|
|
||||||
|
### relspec scripts execute
|
||||||
|
|
||||||
|
Execute SQL scripts from a directory against a PostgreSQL database.
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
```bash
|
||||||
|
relspec scripts execute --dir <directory> --conn <connection-string> [flags]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flags:**
|
||||||
|
- `--dir <path>` (required): Directory containing SQL scripts
|
||||||
|
- `--conn <string>` (required): PostgreSQL connection string
|
||||||
|
- `--schema <name>`: Schema name (default: "public")
|
||||||
|
- `--database <name>`: Database name (default: "database")
|
||||||
|
|
||||||
|
**Connection String Formats:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Standard PostgreSQL URLs
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
|
||||||
|
# Key-value format
|
||||||
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
```
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Execute migration scripts
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||||
|
|
||||||
|
# Execute with custom schema
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--schema public
|
||||||
|
|
||||||
|
# Execute with SSL disabled
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./sql \
|
||||||
|
--conn "postgres://user:pass@localhost/db?sslmode=disable"
|
||||||
|
|
||||||
|
# Execute using key-value connection string
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "host=localhost port=5432 user=admin password=secret dbname=prod"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
```
|
||||||
|
=== SQL Scripts Execution ===
|
||||||
|
Started at: 2025-12-30 22:30:15
|
||||||
|
Directory: ./migrations
|
||||||
|
Database: postgres://user:***@localhost:5432/mydb
|
||||||
|
|
||||||
|
[1/2] Reading SQL scripts...
|
||||||
|
✓ Found 4 script(s)
|
||||||
|
|
||||||
|
[2/2] Executing scripts in order (Priority → Sequence)...
|
||||||
|
|
||||||
|
Executing script: create_users (Priority=1, Sequence=1)
|
||||||
|
✓ Successfully executed: create_users
|
||||||
|
Executing script: create_posts (Priority=1, Sequence=2)
|
||||||
|
✓ Successfully executed: create_posts
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
✓ Successfully executed: add_indexes
|
||||||
|
Executing script: seed_data (Priority=2, Sequence=2)
|
||||||
|
✓ Successfully executed: seed_data
|
||||||
|
|
||||||
|
=== Execution Complete ===
|
||||||
|
Completed at: 2025-12-30 22:30:16
|
||||||
|
Successfully executed 4 script(s)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Execution Behavior
|
||||||
|
|
||||||
|
### Execution Order
|
||||||
|
|
||||||
|
Scripts are **always** executed in this order:
|
||||||
|
1. Sort by **Priority** (ascending)
|
||||||
|
2. Within same priority, sort by **Sequence** (ascending)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
Priority 1, Sequence 1 → Executes 1st
|
||||||
|
Priority 1, Sequence 2 → Executes 2nd
|
||||||
|
Priority 1, Sequence 10 → Executes 3rd
|
||||||
|
Priority 2, Sequence 1 → Executes 4th
|
||||||
|
Priority 2, Sequence 5 → Executes 5th
|
||||||
|
Priority 10, Sequence 1 → Executes 6th
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
- **Stop on First Error**: Execution stops immediately when any script fails
|
||||||
|
- **No Automatic Rollback**: Scripts executed before the failure remain committed
|
||||||
|
- **Error Details**: Full error message with script name, priority, and sequence
|
||||||
|
|
||||||
|
Example error output:
|
||||||
|
```
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
Error: execution failed: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||||
|
ERROR: syntax error at or near "IDNEX" (SQLSTATE 42601)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Transaction Behavior
|
||||||
|
|
||||||
|
- Each script executes in its own implicit transaction (PostgreSQL default)
|
||||||
|
- No automatic transaction wrapping across multiple scripts
|
||||||
|
- For atomic migrations, manually wrap SQL in `BEGIN/COMMIT` blocks
|
||||||
|
|
||||||
|
### Empty Scripts
|
||||||
|
|
||||||
|
Scripts with empty SQL content are silently skipped.
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
### Development Migrations
|
||||||
|
|
||||||
|
Organize database changes by priority levels:
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_xxx_schema.sql # Priority 1: Core schema
|
||||||
|
├── 1_xxx_tables.sql
|
||||||
|
├── 2_xxx_indexes.sql # Priority 2: Performance
|
||||||
|
├── 2_xxx_constraints.sql
|
||||||
|
└── 3_xxx_seed.sql # Priority 3: Data
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multi-Environment Deployments
|
||||||
|
|
||||||
|
Use priority levels for environment-specific scripts:
|
||||||
|
|
||||||
|
```
|
||||||
|
deploy/
|
||||||
|
├── 1_xxx_core_schema.sql # Priority 1: All environments
|
||||||
|
├── 2_xxx_dev_data.sql # Priority 2: Dev only
|
||||||
|
├── 2_xxx_staging_data.sql # Priority 2: Staging only
|
||||||
|
└── 3_xxx_prod_data.sql # Priority 3: Production only
|
||||||
|
```
|
||||||
|
|
||||||
|
### Incremental Rollouts
|
||||||
|
|
||||||
|
Use sequence for ordered feature rollouts:
|
||||||
|
|
||||||
|
```
|
||||||
|
features/
|
||||||
|
├── 1_001_feature_a_schema.sql
|
||||||
|
├── 1_002_feature_a_data.sql
|
||||||
|
├── 1_003_feature_b_schema.sql
|
||||||
|
├── 1_004_feature_b_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with RelSpec
|
||||||
|
|
||||||
|
The scripts command uses:
|
||||||
|
- **Reader**: `pkg/readers/sqldir/` - Reads SQL files into `models.Schema.Scripts`
|
||||||
|
- **Writer**: `pkg/writers/sqlexec/` - Executes scripts from `models.Schema.Scripts`
|
||||||
|
|
||||||
|
You can use these packages programmatically:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
db, _ := reader.ReadDatabase()
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/mydb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### Naming
|
||||||
|
|
||||||
|
- Use zero-padded sequences: `001`, `002`, `010` (not `1`, `2`, `10`)
|
||||||
|
- Use descriptive names: `create_users_table`, not `table1`
|
||||||
|
- Group related changes: same priority for related DDL
|
||||||
|
|
||||||
|
### Organization
|
||||||
|
|
||||||
|
- Keep scripts small and focused (one logical change per file)
|
||||||
|
- Use priority levels to organize phases (schema → indexes → data)
|
||||||
|
- Document complex migrations with SQL comments
|
||||||
|
|
||||||
|
### Safety
|
||||||
|
|
||||||
|
- Always test migrations in development first
|
||||||
|
- Use `scripts list` to verify execution order before running
|
||||||
|
- Back up production databases before executing
|
||||||
|
- Consider using transactions for critical changes
|
||||||
|
- Review generated SQL before execution
|
||||||
|
|
||||||
|
### Version Control
|
||||||
|
|
||||||
|
- Commit scripts to version control
|
||||||
|
- Never modify executed scripts (create new ones instead)
|
||||||
|
- Use meaningful commit messages
|
||||||
|
- Tag releases with migration checkpoints
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- PostgreSQL only (currently)
|
||||||
|
- No built-in rollback support
|
||||||
|
- No migration state tracking (no "already executed" detection)
|
||||||
|
- No dry-run mode
|
||||||
|
- Stops on first error (no partial execution tracking)
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
Potential future features:
|
||||||
|
- Migration state tracking (executed scripts table)
|
||||||
|
- Rollback script support (using `models.Script.Rollback` field)
|
||||||
|
- Dry-run mode (validate without executing)
|
||||||
|
- Transaction wrapping (all-or-nothing execution)
|
||||||
|
- Multi-database support (MySQL, SQLite, etc.)
|
||||||
|
- Parallel execution for independent scripts
|
||||||
393
docs/SCRIPTS_EXAMPLES.md
Normal file
393
docs/SCRIPTS_EXAMPLES.md
Normal file
@@ -0,0 +1,393 @@
|
|||||||
|
# RelSpec Scripts Command - Quick Examples
|
||||||
|
|
||||||
|
## Basic Workflow
|
||||||
|
|
||||||
|
### 1. Create migration directory structure
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Create migration scripts
|
||||||
|
|
||||||
|
Both underscore and hyphen formats are supported. Examples below use underscore format,
|
||||||
|
but you can also use: `1-001-create-users-table.sql`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Priority 1: Core schema
|
||||||
|
cat > migrations/1_001_create_users_table.sql << 'EOF'
|
||||||
|
CREATE TABLE users (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
username VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
email VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
password_hash VARCHAR(255) NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_users_username ON users(username);
|
||||||
|
CREATE INDEX idx_users_email ON users(email);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > migrations/1_002_create_posts_table.sql << 'EOF'
|
||||||
|
CREATE TABLE posts (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
title VARCHAR(200) NOT NULL,
|
||||||
|
content TEXT,
|
||||||
|
published BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Priority 2: Additional indexes
|
||||||
|
cat > migrations/2_001_add_post_indexes.sql << 'EOF'
|
||||||
|
CREATE INDEX idx_posts_user_id ON posts(user_id);
|
||||||
|
CREATE INDEX idx_posts_published ON posts(published);
|
||||||
|
CREATE INDEX idx_posts_created_at ON posts(created_at);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Priority 3: Seed data
|
||||||
|
cat > migrations/3_001_seed_admin_user.sql << 'EOF'
|
||||||
|
INSERT INTO users (username, email, password_hash)
|
||||||
|
VALUES ('admin', 'admin@example.com', 'hashed_password_here')
|
||||||
|
ON CONFLICT (username) DO NOTHING;
|
||||||
|
EOF
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. List scripts to verify order
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
```
|
||||||
|
=== SQL Scripts List ===
|
||||||
|
Directory: migrations
|
||||||
|
|
||||||
|
Found 4 script(s) in execution order:
|
||||||
|
|
||||||
|
No. Priority Sequence Name Lines
|
||||||
|
---- -------- -------- ------------------------------ -----
|
||||||
|
1 1 1 create_users_table 13
|
||||||
|
2 1 2 create_posts_table 11
|
||||||
|
3 2 1 add_post_indexes 4
|
||||||
|
4 3 1 seed_admin_user 4
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Execute against database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://myuser:mypass@localhost:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Real-World Examples
|
||||||
|
|
||||||
|
### Example 1: E-commerce Database Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Directory structure
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_products.sql
|
||||||
|
├── 1_003_create_orders.sql
|
||||||
|
├── 1_004_create_order_items.sql
|
||||||
|
├── 2_001_add_indexes.sql
|
||||||
|
├── 2_002_add_constraints.sql
|
||||||
|
├── 3_001_seed_categories.sql
|
||||||
|
└── 3_002_seed_sample_products.sql
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://ecommerce_user:pass@db.example.com:5432/ecommerce_prod?sslmode=require"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: Multi-Schema Database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Organize by schema using subdirectories
|
||||||
|
migrations/
|
||||||
|
├── public/
|
||||||
|
│ ├── 1_001_create_users.sql
|
||||||
|
│ └── 1_002_create_sessions.sql
|
||||||
|
├── analytics/
|
||||||
|
│ ├── 1_001_create_events.sql
|
||||||
|
│ └── 2_001_create_views.sql
|
||||||
|
└── reporting/
|
||||||
|
└── 1_001_create_reports.sql
|
||||||
|
|
||||||
|
# Execute (all schemas processed together)
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/multi_schema_db" \
|
||||||
|
--schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 3: Development Environment Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create local development database
|
||||||
|
createdb myapp_dev
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./db/migrations \
|
||||||
|
--conn "postgres://localhost/myapp_dev?sslmode=disable"
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
psql myapp_dev -c "\dt"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 4: CI/CD Pipeline
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# .github/workflows/deploy.yml
|
||||||
|
- name: Run database migrations
|
||||||
|
run: |
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "${{ secrets.DATABASE_URL }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 5: Docker Compose Integration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# docker-compose.yml
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: myapp
|
||||||
|
POSTGRES_USER: myuser
|
||||||
|
POSTGRES_PASSWORD: mypass
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
|
||||||
|
migrate:
|
||||||
|
image: relspec:latest
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
volumes:
|
||||||
|
- ./migrations:/migrations
|
||||||
|
command: >
|
||||||
|
scripts execute
|
||||||
|
--dir /migrations
|
||||||
|
--conn "postgres://myuser:mypass@postgres:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run migrations with docker-compose
|
||||||
|
docker-compose up -d postgres
|
||||||
|
sleep 5 # Wait for postgres to be ready
|
||||||
|
docker-compose run --rm migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 6: Incremental Feature Rollout
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Feature branch structure
|
||||||
|
migrations/
|
||||||
|
├── 1_100_user_profiles_schema.sql # Feature: User profiles
|
||||||
|
├── 1_101_user_profiles_constraints.sql
|
||||||
|
├── 1_102_user_profiles_indexes.sql
|
||||||
|
├── 2_100_notifications_schema.sql # Feature: Notifications
|
||||||
|
├── 2_101_notifications_constraints.sql
|
||||||
|
└── 2_102_notifications_indexes.sql
|
||||||
|
|
||||||
|
# Deploy just user profiles (Priority 1)
|
||||||
|
# Then later deploy notifications (Priority 2)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 7: Rollback Strategy (Manual)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Forward migration
|
||||||
|
cat > migrations/1_001_add_column.sql << 'EOF'
|
||||||
|
ALTER TABLE users ADD COLUMN phone VARCHAR(20);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create manual rollback script (not auto-executed)
|
||||||
|
cat > rollbacks/1_001_remove_column.sql << 'EOF'
|
||||||
|
ALTER TABLE users DROP COLUMN phone;
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# If needed, manually execute rollback
|
||||||
|
psql myapp -f rollbacks/1_001_remove_column.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 8: Complex Schema Changes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# migrations/1_001_alter_users_table.sql
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- Add new column
|
||||||
|
ALTER TABLE users ADD COLUMN full_name VARCHAR(200);
|
||||||
|
|
||||||
|
-- Populate from existing data
|
||||||
|
UPDATE users SET full_name = username WHERE full_name IS NULL;
|
||||||
|
|
||||||
|
-- Make it required
|
||||||
|
ALTER TABLE users ALTER COLUMN full_name SET NOT NULL;
|
||||||
|
|
||||||
|
-- Add index
|
||||||
|
CREATE INDEX idx_users_full_name ON users(full_name);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
Execute:
|
||||||
|
```bash
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## File Naming Format Examples
|
||||||
|
|
||||||
|
### Underscore Format (Traditional)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_posts.sql
|
||||||
|
├── 2_001_add_indexes.sql
|
||||||
|
└── 3_001_seed_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Hyphen Format (Alternative)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1-001-create-users.sql
|
||||||
|
├── 1-002-create-posts.sql
|
||||||
|
├── 10-10-create-newid.pgsql
|
||||||
|
└── 2-001-add-indexes.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mixed Format (Both in Same Directory)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql # Underscore format
|
||||||
|
├── 1-002-create-posts.sql # Hyphen format
|
||||||
|
├── 2_001_add_indexes.sql # Underscore format
|
||||||
|
└── 10-10-special-migration.pgsql # Hyphen format
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** All three approaches work identically - use whichever naming style you prefer!
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Pattern 1: Schema → Indexes → Constraints → Data
|
||||||
|
|
||||||
|
```
|
||||||
|
1_xxx_*.sql # Tables and basic structure
|
||||||
|
2_xxx_*.sql # Indexes for performance
|
||||||
|
3_xxx_*.sql # Foreign keys and constraints
|
||||||
|
4_xxx_*.sql # Seed/reference data
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 2: Feature-Based Organization
|
||||||
|
|
||||||
|
```
|
||||||
|
1_001_feature_auth_users.sql
|
||||||
|
1_002_feature_auth_sessions.sql
|
||||||
|
1_003_feature_auth_permissions.sql
|
||||||
|
2_001_feature_blog_posts.sql
|
||||||
|
2_002_feature_blog_comments.sql
|
||||||
|
3_001_feature_payments_transactions.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 3: Date-Based Versioning
|
||||||
|
|
||||||
|
```
|
||||||
|
1_20250130_create_users.sql
|
||||||
|
2_20250131_add_user_indexes.sql
|
||||||
|
3_20250201_create_posts.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 4: Environment-Specific Scripts
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Base migrations (all environments)
|
||||||
|
migrations/base/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_products.sql
|
||||||
|
|
||||||
|
# Development-specific
|
||||||
|
migrations/dev/
|
||||||
|
└── 9_001_seed_test_data.sql
|
||||||
|
|
||||||
|
# Production-specific
|
||||||
|
migrations/prod/
|
||||||
|
└── 9_001_seed_production_config.sql
|
||||||
|
|
||||||
|
# Execute different paths based on environment
|
||||||
|
ENV=dev
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations/base \
|
||||||
|
--conn "postgres://localhost/myapp_${ENV}"
|
||||||
|
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations/${ENV} \
|
||||||
|
--conn "postgres://localhost/myapp_${ENV}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Check script order before execution
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test against local database first
|
||||||
|
```bash
|
||||||
|
# Create test database
|
||||||
|
createdb myapp_test
|
||||||
|
|
||||||
|
# Test migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/myapp_test"
|
||||||
|
|
||||||
|
# Inspect results
|
||||||
|
psql myapp_test
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
dropdb myapp_test
|
||||||
|
```
|
||||||
|
|
||||||
|
### Validate SQL syntax
|
||||||
|
```bash
|
||||||
|
# Use PostgreSQL to check syntax without executing
|
||||||
|
for f in migrations/*.sql; do
|
||||||
|
echo "Checking $f..."
|
||||||
|
psql myapp -c "BEGIN; \i $f; ROLLBACK;" --single-transaction
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
### Debug connection issues
|
||||||
|
```bash
|
||||||
|
# Test connection string
|
||||||
|
psql "postgres://user:pass@localhost:5432/myapp"
|
||||||
|
|
||||||
|
# If that works, use the same string for relspec
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tips
|
||||||
|
|
||||||
|
1. **Always review execution order** with `list` before running `execute`
|
||||||
|
2. **Test in development** before running against production
|
||||||
|
3. **Use zero-padded sequences** (001, 002, not 1, 2) for consistent sorting
|
||||||
|
4. **Keep scripts idempotent** when possible (use IF NOT EXISTS, ON CONFLICT, etc.)
|
||||||
|
5. **Back up production** before running migrations
|
||||||
|
6. **Use transactions** for complex multi-statement migrations
|
||||||
|
7. **Document breaking changes** with SQL comments in the migration files
|
||||||
|
8. **Version control everything** - commit migrations with code changes
|
||||||
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
# RelSpec Inspector Rules Configuration Example
|
||||||
|
# Copy this file to .relspec-rules.yaml and customize as needed
|
||||||
|
|
||||||
|
version: "1.0"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
# ============================================================================
|
||||||
|
# PRIMARY KEY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate primary key column naming convention
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: warn # enforce|warn|off
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id_" # Regex pattern - PK columns must start with "id_"
|
||||||
|
message: "Primary key columns should start with 'id_'"
|
||||||
|
|
||||||
|
# Validate primary key data types
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: warn
|
||||||
|
function: primary_key_datatype
|
||||||
|
allowed_types:
|
||||||
|
- bigserial
|
||||||
|
- bigint
|
||||||
|
- int
|
||||||
|
- serial
|
||||||
|
- integer
|
||||||
|
- int4
|
||||||
|
- int8
|
||||||
|
message: "Primary keys should use integer types (bigserial, bigint, int, serial)"
|
||||||
|
|
||||||
|
# Check if primary keys have auto-increment enabled
|
||||||
|
primary_key_auto_increment:
|
||||||
|
enabled: off # Often disabled as not all PKs need auto-increment
|
||||||
|
function: primary_key_auto_increment
|
||||||
|
require_auto_increment: true
|
||||||
|
message: "Primary key without auto-increment detected"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# FOREIGN KEY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate foreign key column naming convention
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^rid_" # FK columns must start with "rid_" (referenced id)
|
||||||
|
message: "Foreign key columns should start with 'rid_'"
|
||||||
|
|
||||||
|
# Validate foreign key constraint naming convention
|
||||||
|
foreign_key_constraint_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_constraint_naming
|
||||||
|
pattern: "^fk_" # FK constraints must start with "fk_"
|
||||||
|
message: "Foreign key constraint names should start with 'fk_'"
|
||||||
|
|
||||||
|
# Ensure foreign key columns have indexes for performance
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_index
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign key columns should have indexes for optimal performance"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# NAMING CONVENTION RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate table naming follows snake_case convention
|
||||||
|
table_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr # Generic regex validator for table names
|
||||||
|
case: lowercase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||||
|
message: "Table names should be lowercase with underscores (snake_case)"
|
||||||
|
|
||||||
|
# Validate column naming follows snake_case convention
|
||||||
|
column_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: column_regexpr # Generic regex validator for column names
|
||||||
|
case: lowercase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||||
|
message: "Column names should be lowercase with underscores (snake_case)"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# LENGTH RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Limit table name length (PostgreSQL max is 63, but 64 is common practice)
|
||||||
|
table_name_length:
|
||||||
|
enabled: warn
|
||||||
|
function: table_name_length
|
||||||
|
max_length: 64
|
||||||
|
message: "Table name exceeds recommended maximum length of 64 characters"
|
||||||
|
|
||||||
|
# Limit column name length
|
||||||
|
column_name_length:
|
||||||
|
enabled: warn
|
||||||
|
function: column_name_length
|
||||||
|
max_length: 64
|
||||||
|
message: "Column name exceeds recommended maximum length of 64 characters"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# RESERVED KEYWORDS
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Warn about using SQL reserved keywords as identifiers
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: warn
|
||||||
|
function: reserved_words
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Using SQL reserved keywords as identifiers can cause issues"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# SCHEMA INTEGRITY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Ensure all tables have primary keys
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: warn
|
||||||
|
function: have_primary_key
|
||||||
|
message: "Table is missing a primary key"
|
||||||
|
|
||||||
|
# Detect orphaned foreign keys (referencing non-existent tables)
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: warn
|
||||||
|
function: orphaned_foreign_key
|
||||||
|
message: "Foreign key references a non-existent table"
|
||||||
|
|
||||||
|
# Detect circular foreign key dependencies
|
||||||
|
circular_dependency:
|
||||||
|
enabled: warn
|
||||||
|
function: circular_dependency
|
||||||
|
message: "Circular foreign key dependency detected"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# RULE CONFIGURATION NOTES
|
||||||
|
# ============================================================================
|
||||||
|
#
|
||||||
|
# enabled: Controls rule enforcement level
|
||||||
|
# - enforce: Violations are errors (exit code 1)
|
||||||
|
# - warn: Violations are warnings (exit code 0)
|
||||||
|
# - off: Rule is disabled
|
||||||
|
#
|
||||||
|
# function: The validation function to execute
|
||||||
|
# - Must match a registered validator function
|
||||||
|
# - Generic functions like table_regexpr and column_regexpr can be reused
|
||||||
|
#
|
||||||
|
# pattern: Regular expression for pattern matching
|
||||||
|
# - Used by naming validators
|
||||||
|
# - Must be valid Go regex syntax
|
||||||
|
#
|
||||||
|
# message: Custom message shown when rule is violated
|
||||||
|
# - Should be clear and actionable
|
||||||
|
# - Explains what the violation is and how to fix it
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
|
# CUSTOM RULES EXAMPLES
|
||||||
|
# ============================================================================
|
||||||
|
#
|
||||||
|
# You can add custom rules using the generic validator functions:
|
||||||
|
#
|
||||||
|
# # Example: Ensure table names don't contain numbers
|
||||||
|
# table_no_numbers:
|
||||||
|
# enabled: warn
|
||||||
|
# function: table_regexpr
|
||||||
|
# pattern: "^[a-z_]+$"
|
||||||
|
# message: "Table names should not contain numbers"
|
||||||
|
#
|
||||||
|
# # Example: Audit columns must end with _audit
|
||||||
|
# audit_column_suffix:
|
||||||
|
# enabled: enforce
|
||||||
|
# function: column_regexpr
|
||||||
|
# pattern: ".*_audit$"
|
||||||
|
# message: "Audit columns must end with '_audit'"
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
472
pkg/inspector/PLAN.md
Normal file
472
pkg/inspector/PLAN.md
Normal file
@@ -0,0 +1,472 @@
|
|||||||
|
# Inspector Feature Implementation Plan
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Add a model inspection feature that validates database schemas against configurable rules. The inspector will read any supported format, apply validation rules from a YAML config, and output a report in markdown or JSON format.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Core Components
|
||||||
|
|
||||||
|
1. **CLI Command** (`cmd/relspec/inspect.go`)
|
||||||
|
- New subcommand: `relspec inspect`
|
||||||
|
- Flags:
|
||||||
|
- `--from` (required): Input format (dbml, pgsql, json, etc.)
|
||||||
|
- `--from-path`: File path for file-based formats
|
||||||
|
- `--from-conn`: Connection string for database formats
|
||||||
|
- `--rules` (optional): Path to rules YAML file (default: `.relspec-rules.yaml`)
|
||||||
|
- `--output-format`: Report format (markdown, json) (default: markdown)
|
||||||
|
- `--output`: Output file path (default: stdout)
|
||||||
|
- `--schema`: Schema name filter (optional)
|
||||||
|
|
||||||
|
2. **Inspector Package** (`pkg/inspector/`)
|
||||||
|
- `inspector.go`: Main inspector logic
|
||||||
|
- `rules.go`: Rule definitions and configuration
|
||||||
|
- `validators.go`: Individual validation rule implementations
|
||||||
|
- `report.go`: Report generation (markdown, JSON)
|
||||||
|
- `config.go`: YAML config loading and parsing
|
||||||
|
|
||||||
|
### Data Flow
|
||||||
|
```
|
||||||
|
Input Format → Reader → Database Model → Inspector → Validation Results → Report Formatter → Output
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rules Configuration Structure
|
||||||
|
|
||||||
|
### YAML Schema (`rules.yaml`)
|
||||||
|
```yaml
|
||||||
|
version: "1.0"
|
||||||
|
rules:
|
||||||
|
# Primary Key Rules
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^id_" # regex pattern
|
||||||
|
message: "Primary key columns must start with 'id_'"
|
||||||
|
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
allowed_types: ["bigserial", "bigint", "int", "serial", "integer"]
|
||||||
|
message: "Primary keys must use approved integer types"
|
||||||
|
|
||||||
|
primary_key_auto_increment:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
require_auto_increment: true|false
|
||||||
|
message: "Primary keys without auto-increment detected"
|
||||||
|
|
||||||
|
# Foreign Key Rules
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^rid_"
|
||||||
|
message: "Foreign key columns must start with 'rid_'"
|
||||||
|
|
||||||
|
foreign_key_constraint_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^fk_"
|
||||||
|
message: "Foreign key constraint names must start with 'fk_'"
|
||||||
|
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign keys should have indexes"
|
||||||
|
|
||||||
|
# Naming Convention Rules
|
||||||
|
table_naming_case:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
case: "lowercase" # lowercase, uppercase, snake_case, camelCase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names must be lowercase with underscores"
|
||||||
|
|
||||||
|
column_naming_case:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
case: "lowercase"
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Column names must be lowercase with underscores"
|
||||||
|
|
||||||
|
# Length Rules
|
||||||
|
table_name_length:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
max_length: 64
|
||||||
|
message: "Table name exceeds maximum length"
|
||||||
|
|
||||||
|
column_name_length:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
max_length: 64
|
||||||
|
message: "Column name exceeds maximum length"
|
||||||
|
|
||||||
|
# Reserved Keywords
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Using reserved SQL keywords"
|
||||||
|
|
||||||
|
# Schema Integrity Rules
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Table missing primary key"
|
||||||
|
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Foreign key references non-existent table"
|
||||||
|
|
||||||
|
circular_dependency:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Circular foreign key dependency detected"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rule Levels
|
||||||
|
- **enforce**: Violations are errors (exit code 1)
|
||||||
|
- **warn**: Violations are warnings (exit code 0)
|
||||||
|
- **off**: Rule disabled
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### 1. Inspector Core (`pkg/inspector/inspector.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Inspector struct {
|
||||||
|
config *Config
|
||||||
|
db *models.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
type ValidationResult struct {
|
||||||
|
RuleName string
|
||||||
|
Level string // "error" or "warning"
|
||||||
|
Message string
|
||||||
|
Location string // e.g., "schema.table.column"
|
||||||
|
Context map[string]interface{}
|
||||||
|
Passed bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type InspectorReport struct {
|
||||||
|
Summary ReportSummary
|
||||||
|
Violations []ValidationResult
|
||||||
|
GeneratedAt time.Time
|
||||||
|
Database string
|
||||||
|
SourceFormat string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReportSummary struct {
|
||||||
|
TotalRules int
|
||||||
|
RulesChecked int
|
||||||
|
RulesSkipped int
|
||||||
|
ErrorCount int
|
||||||
|
WarningCount int
|
||||||
|
PassedCount int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewInspector(db *models.Database, config *Config) *Inspector
|
||||||
|
func (i *Inspector) Inspect() (*InspectorReport, error)
|
||||||
|
func (i *Inspector) validateDatabase() []ValidationResult
|
||||||
|
func (i *Inspector) validateSchema(schema *models.Schema) []ValidationResult
|
||||||
|
func (i *Inspector) validateTable(table *models.Table) []ValidationResult
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Rule Definitions (`pkg/inspector/rules.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Config struct {
|
||||||
|
Version string
|
||||||
|
Rules map[string]Rule
|
||||||
|
}
|
||||||
|
|
||||||
|
type Rule struct {
|
||||||
|
Enabled string // "enforce", "warn", "off"
|
||||||
|
Message string
|
||||||
|
Pattern string
|
||||||
|
AllowedTypes []string
|
||||||
|
MaxLength int
|
||||||
|
Case string
|
||||||
|
RequireIndex bool
|
||||||
|
CheckTables bool
|
||||||
|
CheckColumns bool
|
||||||
|
// ... rule-specific fields
|
||||||
|
}
|
||||||
|
|
||||||
|
type RuleValidator interface {
|
||||||
|
Name() string
|
||||||
|
Validate(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadConfig(path string) (*Config, error)
|
||||||
|
func GetDefaultConfig() *Config
|
||||||
|
```
|
||||||
|
|
||||||
|
**Configuration Loading Behavior:**
|
||||||
|
- If `--rules` flag is provided but file not found: Use default configuration (don't error)
|
||||||
|
- If file exists but is invalid YAML: Return error
|
||||||
|
- Default configuration has sensible rules enabled at "warn" level
|
||||||
|
- Users can override by creating their own `.relspec-rules.yaml` file
|
||||||
|
|
||||||
|
### 3. Validators (`pkg/inspector/validators.go`)
|
||||||
|
|
||||||
|
Each validator implements rule logic:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Primary Key Validators
|
||||||
|
func validatePrimaryKeyNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validatePrimaryKeyDatatype(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Foreign Key Validators
|
||||||
|
func validateForeignKeyColumnNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateForeignKeyIndex(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Naming Convention Validators
|
||||||
|
func validateTableNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateColumnNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Length Validators
|
||||||
|
func validateTableNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateColumnNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Reserved Keywords Validator
|
||||||
|
func validateReservedKeywords(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Integrity Validators
|
||||||
|
func validateMissingPrimaryKey(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateOrphanedForeignKey(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateCircularDependency(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Registry of all validators
|
||||||
|
var validators = map[string]RuleValidator{
|
||||||
|
"primary_key_naming": primaryKeyNamingValidator{},
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Report Formatting (`pkg/inspector/report.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type ReportFormatter interface {
|
||||||
|
Format(report *InspectorReport) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type MarkdownFormatter struct {
|
||||||
|
UseColors bool // ANSI colors for terminal output
|
||||||
|
}
|
||||||
|
type JSONFormatter struct{}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error)
|
||||||
|
func (f *JSONFormatter) Format(report *InspectorReport) (string, error)
|
||||||
|
|
||||||
|
// Helper to detect if output is a TTY (terminal)
|
||||||
|
func isTerminal(w io.Writer) bool
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output Behavior:**
|
||||||
|
- Markdown format will use ANSI color codes when outputting to a terminal (TTY)
|
||||||
|
- When piped or redirected to a file, plain markdown without colors
|
||||||
|
- Colors: Red for errors, Yellow for warnings, Green for passed checks
|
||||||
|
|
||||||
|
**Markdown Format Example:**
|
||||||
|
```markdown
|
||||||
|
# RelSpec Inspector Report
|
||||||
|
|
||||||
|
**Database:** my_database
|
||||||
|
**Source Format:** pgsql
|
||||||
|
**Generated:** 2025-12-31 10:30:45
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
- Rules Checked: 12
|
||||||
|
- Errors: 3
|
||||||
|
- Warnings: 5
|
||||||
|
- Passed: 4
|
||||||
|
|
||||||
|
## Violations
|
||||||
|
|
||||||
|
### Errors (3)
|
||||||
|
|
||||||
|
#### primary_key_naming
|
||||||
|
**Table:** users, **Column:** user_id
|
||||||
|
Primary key columns must start with 'id_'
|
||||||
|
|
||||||
|
#### table_name_length
|
||||||
|
**Table:** user_authentication_sessions_with_metadata
|
||||||
|
Table name exceeds maximum length (64 characters)
|
||||||
|
|
||||||
|
### Warnings (5)
|
||||||
|
|
||||||
|
#### foreign_key_index
|
||||||
|
**Table:** orders, **Column:** customer_id
|
||||||
|
Foreign keys should have indexes
|
||||||
|
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
**JSON Format Example:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"summary": {
|
||||||
|
"total_rules": 12,
|
||||||
|
"rules_checked": 12,
|
||||||
|
"error_count": 3,
|
||||||
|
"warning_count": 5,
|
||||||
|
"passed_count": 4
|
||||||
|
},
|
||||||
|
"violations": [
|
||||||
|
{
|
||||||
|
"rule_name": "primary_key_naming",
|
||||||
|
"level": "error",
|
||||||
|
"message": "Primary key columns must start with 'id_'",
|
||||||
|
"location": "public.users.user_id",
|
||||||
|
"context": {
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "user_id",
|
||||||
|
"current_name": "user_id",
|
||||||
|
"expected_pattern": "^id_"
|
||||||
|
},
|
||||||
|
"passed": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"generated_at": "2025-12-31T10:30:45Z",
|
||||||
|
"database": "my_database",
|
||||||
|
"source_format": "pgsql"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. CLI Command (`cmd/relspec/inspect.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
var inspectCmd = &cobra.Command{
|
||||||
|
Use: "inspect",
|
||||||
|
Short: "Inspect and validate database schemas against rules",
|
||||||
|
Long: `Read database schemas from various formats and validate against configurable rules.`,
|
||||||
|
RunE: runInspect,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
inspectCmd.Flags().String("from", "", "Input format (dbml, pgsql, json, etc.)")
|
||||||
|
inspectCmd.Flags().String("from-path", "", "Input file path")
|
||||||
|
inspectCmd.Flags().String("from-conn", "", "Database connection string")
|
||||||
|
inspectCmd.Flags().String("rules", ".relspec-rules.yaml", "Rules configuration file")
|
||||||
|
inspectCmd.Flags().String("output-format", "markdown", "Output format (markdown, json)")
|
||||||
|
inspectCmd.Flags().String("output", "", "Output file (default: stdout)")
|
||||||
|
inspectCmd.Flags().String("schema", "", "Filter by schema name")
|
||||||
|
inspectCmd.MarkFlagRequired("from")
|
||||||
|
}
|
||||||
|
|
||||||
|
func runInspect(cmd *cobra.Command, args []string) error {
|
||||||
|
// 1. Parse flags
|
||||||
|
// 2. Create reader (reuse pattern from convert.go)
|
||||||
|
// 3. Read database
|
||||||
|
// 4. Load rules config (use defaults if file not found)
|
||||||
|
// 5. Create inspector
|
||||||
|
// 6. Run inspection
|
||||||
|
// 7. Detect if output is terminal (for color support)
|
||||||
|
// 8. Format report (with/without ANSI colors)
|
||||||
|
// 9. Write output
|
||||||
|
// 10. Exit with appropriate code (0 if no errors, 1 if errors)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Phases
|
||||||
|
|
||||||
|
### Phase 1: Core Infrastructure
|
||||||
|
1. Create `pkg/inspector/` package structure
|
||||||
|
2. Implement `Config` and YAML loading
|
||||||
|
3. Implement `Inspector` core with basic validation framework
|
||||||
|
4. Create CLI command skeleton
|
||||||
|
|
||||||
|
### Phase 2: Basic Validators
|
||||||
|
1. Implement naming convention validators
|
||||||
|
- Primary key naming
|
||||||
|
- Foreign key column naming
|
||||||
|
- Foreign key constraint naming
|
||||||
|
- Table/column case validation
|
||||||
|
2. Implement length validators
|
||||||
|
3. Implement reserved keywords validator (leverage `pkg/pgsql/keywords.go`)
|
||||||
|
|
||||||
|
### Phase 3: Advanced Validators
|
||||||
|
1. Implement datatype validators
|
||||||
|
2. Implement integrity validators (missing PK, orphaned FK, circular deps)
|
||||||
|
3. Implement foreign key index validator
|
||||||
|
|
||||||
|
### Phase 4: Reporting
|
||||||
|
1. Implement `InspectorReport` structure
|
||||||
|
2. Implement markdown formatter
|
||||||
|
3. Implement JSON formatter
|
||||||
|
4. Add summary statistics
|
||||||
|
|
||||||
|
### Phase 5: CLI Integration
|
||||||
|
1. Wire up CLI command with flags
|
||||||
|
2. Integrate reader factory (from convert.go pattern)
|
||||||
|
3. Add output file handling
|
||||||
|
4. Add exit code logic
|
||||||
|
5. Add progress reporting
|
||||||
|
|
||||||
|
### Phase 6: Testing & Documentation
|
||||||
|
1. Unit tests for validators
|
||||||
|
2. Integration tests with sample schemas
|
||||||
|
3. Test with all reader formats
|
||||||
|
4. Update README with inspector documentation
|
||||||
|
5. Create example rules configuration file
|
||||||
|
|
||||||
|
## Files to Create
|
||||||
|
|
||||||
|
1. `pkg/inspector/inspector.go` - Core inspector logic
|
||||||
|
2. `pkg/inspector/rules.go` - Rule definitions and config loading
|
||||||
|
3. `pkg/inspector/validators.go` - Validation implementations
|
||||||
|
4. `pkg/inspector/report.go` - Report formatting
|
||||||
|
5. `pkg/inspector/config.go` - Config utilities
|
||||||
|
6. `cmd/relspec/inspect.go` - CLI command
|
||||||
|
7. `.relspec-rules.yaml.example` - Example configuration
|
||||||
|
8. `pkg/inspector/inspector_test.go` - Tests
|
||||||
|
|
||||||
|
## Files to Modify
|
||||||
|
|
||||||
|
1. `cmd/relspec/root.go` - Register inspect command
|
||||||
|
2. `README.md` - Add inspector documentation (if requested)
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgresql://localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Output JSON report to file
|
||||||
|
relspec inspect --from json --from-path db.json --output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
|
||||||
|
# Use custom rules location
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules /path/to/rules.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Exit Codes
|
||||||
|
- 0: Success (no errors, only warnings or all passed)
|
||||||
|
- 1: Validation errors found (rules with level="enforce" failed)
|
||||||
|
- 2: Runtime error (invalid config, reader error, etc.)
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
- Existing: `pkg/models`, `pkg/readers`, `pkg/pgsql/keywords.go`
|
||||||
|
- New: `gopkg.in/yaml.v3` for YAML parsing (may already be in go.mod)
|
||||||
|
|
||||||
|
## Design Decisions
|
||||||
|
|
||||||
|
### Confirmed Choices (from user)
|
||||||
|
1. **Example config file**: Create `.relspec-rules.yaml.example` in repository root with documented examples
|
||||||
|
2. **Missing rules file**: Use sensible built-in defaults (don't error), all rules at "warn" level by default
|
||||||
|
3. **Terminal output**: ANSI colors (red/yellow/green) when outputting to terminal, plain markdown when piped/redirected
|
||||||
|
4. **Foreign key naming**: Separate configurable rules for both FK column names and FK constraint names
|
||||||
|
|
||||||
|
### Architecture Rationale
|
||||||
|
1. **Why YAML for config?**: Human-readable, supports comments, standard for config files
|
||||||
|
2. **Why three levels (enforce/warn/off)?**: Flexibility for gradual adoption, different contexts
|
||||||
|
3. **Why markdown + JSON?**: Markdown for human review, JSON for tooling integration
|
||||||
|
4. **Why pkg/inspector?**: Follows existing package structure, separates concerns
|
||||||
|
5. **Reuse readers**: Leverage existing reader infrastructure, supports all formats automatically
|
||||||
|
6. **Exit codes**: Follow standard conventions (0=success, 1=validation fail, 2=error)
|
||||||
|
|
||||||
|
## Future Enhancements (Not in Scope)
|
||||||
|
- Auto-fix mode (automatically rename columns, etc.)
|
||||||
|
- Custom rule plugins
|
||||||
|
- HTML report format
|
||||||
|
- Rule templates for different databases
|
||||||
|
- CI/CD integration examples
|
||||||
|
- Performance metrics in report
|
||||||
485
pkg/inspector/README.md
Normal file
485
pkg/inspector/README.md
Normal file
@@ -0,0 +1,485 @@
|
|||||||
|
# RelSpec Inspector
|
||||||
|
|
||||||
|
> Database Schema Validation and Linting Tool
|
||||||
|
|
||||||
|
The RelSpec Inspector validates database schemas against configurable rules, helping you maintain consistency, enforce naming conventions, and catch common schema design issues across your database models.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Inspector reads database schemas from any supported RelSpec format and validates them against a set of configurable rules. It generates detailed reports highlighting violations, warnings, and passed checks.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Flexible Rule Configuration**: YAML-based rules with three severity levels (enforce, warn, off)
|
||||||
|
- **Generic Validators**: Reusable regex-based validators for custom naming conventions
|
||||||
|
- **Multiple Input Formats**: Works with all RelSpec readers (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||||
|
- **Multiple Output Formats**: Markdown with ANSI colors for terminals, JSON for tooling integration
|
||||||
|
- **Smart Defaults**: Works out-of-the-box with sensible default rules
|
||||||
|
- **Terminal-Aware**: Automatic color support detection for improved readability
|
||||||
|
- **Exit Codes**: Proper exit codes for CI/CD integration
|
||||||
|
|
||||||
|
[Todo List of Features](./TODO.md)
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml
|
||||||
|
|
||||||
|
# Inspect with custom rules
|
||||||
|
relspec inspect --from json --from-path db.json --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Output JSON report to file
|
||||||
|
relspec inspect --from pgsql --from-conn "..." \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
Create a `.relspec-rules.yaml` file to customize validation rules. If the file doesn't exist, the inspector uses sensible defaults.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
version: "1.0"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
# Primary key columns must start with "id_"
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce # enforce|warn|off
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id_"
|
||||||
|
message: "Primary key columns must start with 'id_'"
|
||||||
|
|
||||||
|
# Foreign key columns must start with "rid_"
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^rid_"
|
||||||
|
message: "Foreign key columns should start with 'rid_'"
|
||||||
|
|
||||||
|
# Table names must be lowercase snake_case
|
||||||
|
table_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr # Generic regex validator
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names should be lowercase with underscores"
|
||||||
|
|
||||||
|
# Ensure all tables have primary keys
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: warn
|
||||||
|
function: have_primary_key
|
||||||
|
message: "Table is missing a primary key"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Built-in Validation Rules
|
||||||
|
|
||||||
|
### Primary Key Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `primary_key_naming` | `primary_key_naming` | Validate PK column names against regex pattern |
|
||||||
|
| `primary_key_datatype` | `primary_key_datatype` | Enforce approved PK data types (bigint, serial, etc.) |
|
||||||
|
| `primary_key_auto_increment` | `primary_key_auto_increment` | Check if PKs have auto-increment enabled |
|
||||||
|
|
||||||
|
### Foreign Key Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `foreign_key_column_naming` | `foreign_key_column_naming` | Validate FK column names against regex pattern |
|
||||||
|
| `foreign_key_constraint_naming` | `foreign_key_constraint_naming` | Validate FK constraint names against regex pattern |
|
||||||
|
| `foreign_key_index` | `foreign_key_index` | Ensure FK columns have indexes for performance |
|
||||||
|
|
||||||
|
### Naming Convention Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `table_naming_case` | `table_regexpr` | Generic regex validator for table names |
|
||||||
|
| `column_naming_case` | `column_regexpr` | Generic regex validator for column names |
|
||||||
|
|
||||||
|
### Length Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `table_name_length` | `table_name_length` | Limit table name length (default: 64 chars) |
|
||||||
|
| `column_name_length` | `column_name_length` | Limit column name length (default: 64 chars) |
|
||||||
|
|
||||||
|
### Reserved Keywords
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `reserved_keywords` | `reserved_words` | Detect use of SQL reserved keywords as identifiers |
|
||||||
|
|
||||||
|
### Schema Integrity Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `missing_primary_key` | `have_primary_key` | Ensure tables have primary keys |
|
||||||
|
| `orphaned_foreign_key` | `orphaned_foreign_key` | Detect FKs referencing non-existent tables |
|
||||||
|
| `circular_dependency` | `circular_dependency` | Detect circular FK dependencies |
|
||||||
|
|
||||||
|
## Rule Configuration
|
||||||
|
|
||||||
|
### Severity Levels
|
||||||
|
|
||||||
|
Rules support three severity levels:
|
||||||
|
|
||||||
|
- **`enforce`**: Violations are errors (exit code 1)
|
||||||
|
- **`warn`**: Violations are warnings (exit code 0)
|
||||||
|
- **`off`**: Rule is disabled
|
||||||
|
|
||||||
|
### Rule Structure
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
rule_name:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
function: validator_function_name
|
||||||
|
message: "Custom message shown on violation"
|
||||||
|
# Rule-specific parameters
|
||||||
|
pattern: "^regex_pattern$" # For pattern-based validators
|
||||||
|
allowed_types: [type1, type2] # For type validators
|
||||||
|
max_length: 64 # For length validators
|
||||||
|
check_tables: true # For keyword validator
|
||||||
|
check_columns: true # For keyword validator
|
||||||
|
require_index: true # For FK index validator
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generic Validators
|
||||||
|
|
||||||
|
The inspector provides generic validator functions that can be reused for custom rules:
|
||||||
|
|
||||||
|
### `table_regexpr`
|
||||||
|
|
||||||
|
Generic regex validator for table names. Create custom table naming rules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Example: Ensure table names don't contain numbers
|
||||||
|
table_no_numbers:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^[a-z_]+$"
|
||||||
|
message: "Table names should not contain numbers"
|
||||||
|
|
||||||
|
# Example: Tables must start with "tbl_"
|
||||||
|
table_prefix:
|
||||||
|
enabled: enforce
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^tbl_[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names must start with 'tbl_'"
|
||||||
|
```
|
||||||
|
|
||||||
|
### `column_regexpr`
|
||||||
|
|
||||||
|
Generic regex validator for column names. Create custom column naming rules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Example: Audit columns must end with "_audit"
|
||||||
|
audit_column_suffix:
|
||||||
|
enabled: enforce
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: ".*_audit$"
|
||||||
|
message: "Audit columns must end with '_audit'"
|
||||||
|
|
||||||
|
# Example: Timestamp columns must end with "_at"
|
||||||
|
timestamp_suffix:
|
||||||
|
enabled: warn
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: ".*(created|updated|deleted)_at$"
|
||||||
|
message: "Timestamp columns should end with '_at'"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Formats
|
||||||
|
|
||||||
|
### Markdown (Default)
|
||||||
|
|
||||||
|
Human-readable markdown report with ANSI colors when outputting to a terminal:
|
||||||
|
|
||||||
|
```
|
||||||
|
# RelSpec Inspector Report
|
||||||
|
|
||||||
|
**Database:** my_database
|
||||||
|
**Source Format:** pgsql
|
||||||
|
**Generated:** 2025-12-31T10:30:45Z
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
- Rules Checked: 13
|
||||||
|
- Errors: 2
|
||||||
|
- Warnings: 5
|
||||||
|
- Passed: 120
|
||||||
|
|
||||||
|
## Violations
|
||||||
|
|
||||||
|
### Errors (2)
|
||||||
|
|
||||||
|
#### primary_key_naming
|
||||||
|
**Location:** public.users.user_id
|
||||||
|
**Message:** Primary key columns must start with 'id_'
|
||||||
|
**Details:** expected_pattern=^id_
|
||||||
|
|
||||||
|
### Warnings (5)
|
||||||
|
|
||||||
|
#### foreign_key_index
|
||||||
|
**Location:** public.orders.customer_id
|
||||||
|
**Message:** Foreign key columns should have indexes
|
||||||
|
**Details:** has_index=false
|
||||||
|
```
|
||||||
|
|
||||||
|
### JSON
|
||||||
|
|
||||||
|
Structured JSON output for tooling integration:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"summary": {
|
||||||
|
"total_rules": 13,
|
||||||
|
"rules_checked": 13,
|
||||||
|
"error_count": 2,
|
||||||
|
"warning_count": 5,
|
||||||
|
"passed_count": 120
|
||||||
|
},
|
||||||
|
"violations": [
|
||||||
|
{
|
||||||
|
"rule_name": "primary_key_naming",
|
||||||
|
"level": "error",
|
||||||
|
"message": "Primary key columns must start with 'id_'",
|
||||||
|
"location": "public.users.user_id",
|
||||||
|
"context": {
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "user_id",
|
||||||
|
"expected_pattern": "^id_"
|
||||||
|
},
|
||||||
|
"passed": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"generated_at": "2025-12-31T10:30:45Z",
|
||||||
|
"database": "my_database",
|
||||||
|
"source_format": "pgsql"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Reference
|
||||||
|
|
||||||
|
### Flags
|
||||||
|
|
||||||
|
| Flag | Type | Description |
|
||||||
|
|------|------|-------------|
|
||||||
|
| `--from` | string | **Required**. Source format (dbml, pgsql, json, yaml, gorm, etc.) |
|
||||||
|
| `--from-path` | string | Source file path (for file-based formats) |
|
||||||
|
| `--from-conn` | string | Connection string (for database formats) |
|
||||||
|
| `--rules` | string | Path to rules YAML file (default: `.relspec-rules.yaml`) |
|
||||||
|
| `--output-format` | string | Output format: `markdown` or `json` (default: `markdown`) |
|
||||||
|
| `--output` | string | Output file path (default: stdout) |
|
||||||
|
| `--schema` | string | Filter to specific schema by name |
|
||||||
|
|
||||||
|
### Exit Codes
|
||||||
|
|
||||||
|
| Code | Meaning |
|
||||||
|
|------|---------|
|
||||||
|
| 0 | Success (no errors, only warnings or all passed) |
|
||||||
|
| 1 | Validation errors found (rules with `enabled: enforce` failed) |
|
||||||
|
| 2 | Runtime error (invalid config, reader error, etc.) |
|
||||||
|
|
||||||
|
## CI/CD Integration
|
||||||
|
|
||||||
|
### GitHub Actions Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: Schema Validation
|
||||||
|
|
||||||
|
on: [pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install RelSpec
|
||||||
|
run: go install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
||||||
|
|
||||||
|
- name: Validate Schema
|
||||||
|
run: |
|
||||||
|
relspec inspect \
|
||||||
|
--from dbml \
|
||||||
|
--from-path schema.dbml \
|
||||||
|
--rules .relspec-rules.yaml \
|
||||||
|
--output-format json \
|
||||||
|
--output validation-report.json
|
||||||
|
|
||||||
|
- name: Upload Report
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: validation-report
|
||||||
|
path: validation-report.json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pre-commit Hook Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# .git/hooks/pre-commit
|
||||||
|
|
||||||
|
echo "Running schema validation..."
|
||||||
|
|
||||||
|
relspec inspect \
|
||||||
|
--from dbml \
|
||||||
|
--from-path schema.dbml \
|
||||||
|
--rules .relspec-rules.yaml
|
||||||
|
|
||||||
|
exit $?
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Configuration File
|
||||||
|
|
||||||
|
See [`.relspec-rules.yaml.example`](../../.relspec-rules.yaml.example) for a fully documented example configuration with all available rules and customization options.
|
||||||
|
|
||||||
|
## Common Use Cases
|
||||||
|
|
||||||
|
### Enforce Naming Standards
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Ensure consistent naming across your schema
|
||||||
|
table_naming_case:
|
||||||
|
enabled: enforce
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Tables must use snake_case"
|
||||||
|
|
||||||
|
column_naming_case:
|
||||||
|
enabled: enforce
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Columns must use snake_case"
|
||||||
|
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id$"
|
||||||
|
message: "Primary key must be named 'id'"
|
||||||
|
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: enforce
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^[a-z]+_id$"
|
||||||
|
message: "Foreign keys must end with '_id'"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Performance Best Practices
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Ensure optimal database performance
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: enforce
|
||||||
|
function: foreign_key_index
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign keys must have indexes"
|
||||||
|
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: enforce
|
||||||
|
function: primary_key_datatype
|
||||||
|
allowed_types: [bigserial, bigint]
|
||||||
|
message: "Use bigserial or bigint for primary keys"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Integrity
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Prevent common schema issues
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: enforce
|
||||||
|
function: have_primary_key
|
||||||
|
message: "All tables must have a primary key"
|
||||||
|
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: enforce
|
||||||
|
function: orphaned_foreign_key
|
||||||
|
message: "Foreign keys must reference existing tables"
|
||||||
|
|
||||||
|
circular_dependency:
|
||||||
|
enabled: warn
|
||||||
|
function: circular_dependency
|
||||||
|
message: "Circular dependencies detected"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Avoid Reserved Keywords
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: warn
|
||||||
|
function: reserved_words
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Avoid using SQL reserved keywords"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Programmatic Usage
|
||||||
|
|
||||||
|
You can use the inspector programmatically in your Go code:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Load your database model
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "my_database",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
// ... your schema
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load rules configuration
|
||||||
|
config, err := inspector.LoadConfig(".relspec-rules.yaml")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create and run inspector
|
||||||
|
insp := inspector.NewInspector(db, config)
|
||||||
|
report, err := insp.Inspect()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate report
|
||||||
|
formatter := inspector.NewMarkdownFormatter(os.Stdout)
|
||||||
|
output, err := formatter.Format(report)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(output)
|
||||||
|
|
||||||
|
// Check for errors
|
||||||
|
if report.HasErrors() {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions are welcome! To add a new validator:
|
||||||
|
|
||||||
|
1. Add the validator function to `validators.go`
|
||||||
|
2. Register it in `inspector.go` `getValidator()` function
|
||||||
|
3. Add default configuration to `rules.go` `GetDefaultConfig()`
|
||||||
|
4. Update this README with the new rule documentation
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Apache License 2.0 - See [LICENSE](../../LICENSE) for details.
|
||||||
65
pkg/inspector/TODO.md
Normal file
65
pkg/inspector/TODO.md
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
## Inspector TODO
|
||||||
|
|
||||||
|
See the [Inspector README](./README.md) for complete documentation of implemented features.
|
||||||
|
|
||||||
|
### Implemented ✓
|
||||||
|
|
||||||
|
- [x] Core validation framework with configurable rules
|
||||||
|
- [x] YAML configuration with three severity levels (enforce/warn/off)
|
||||||
|
- [x] Generic validators (table_regexpr, column_regexpr)
|
||||||
|
- [x] Primary key validation (naming, datatype, auto-increment)
|
||||||
|
- [x] Foreign key validation (column naming, constraint naming, indexes)
|
||||||
|
- [x] Naming convention validation (snake_case, custom patterns)
|
||||||
|
- [x] Length validation (table names, column names)
|
||||||
|
- [x] Reserved keywords detection
|
||||||
|
- [x] Schema integrity checks (missing PKs, orphaned FKs, circular dependencies)
|
||||||
|
- [x] Multiple output formats (Markdown with ANSI colors, JSON)
|
||||||
|
- [x] Terminal-aware color output
|
||||||
|
- [x] All input formats supported (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||||
|
- [x] CI/CD integration support (proper exit codes)
|
||||||
|
- [x] Comprehensive documentation and examples
|
||||||
|
|
||||||
|
### Future Enhancements
|
||||||
|
|
||||||
|
#### Reporting Enhancements
|
||||||
|
- [ ] Add verbose mode to show all passing checks in detail
|
||||||
|
- [ ] Add summary-only mode (suppress violation details)
|
||||||
|
- [ ] Group violations by table/schema in report
|
||||||
|
- [ ] Add statistics: most violated rules, tables with most issues
|
||||||
|
- [ ] HTML report format with interactive filtering
|
||||||
|
|
||||||
|
#### Additional Validators
|
||||||
|
- [ ] Optimal column order for space and storage efficiency
|
||||||
|
- [ ] Similar-sounding column names detection (synonyms, typos)
|
||||||
|
- [ ] Plural/singular table name consistency
|
||||||
|
- [ ] Column order validation (PK first, FKs next, data columns, timestamps last)
|
||||||
|
- [ ] Data type consistency across related columns
|
||||||
|
- [ ] Index coverage analysis
|
||||||
|
- [ ] Unused indexes detection
|
||||||
|
- [ ] Missing indexes on commonly filtered columns
|
||||||
|
- [ ] Table size estimates and warnings for large tables
|
||||||
|
- [ ] Function naming conventions (here we have my rules used in Bitech etc. Work from a rules file.)
|
||||||
|
- [ ] View naming conventions
|
||||||
|
- [ ] Enum naming conventions
|
||||||
|
- [ ] Custom type naming conventions
|
||||||
|
- [ ] Table name consistency across related tables
|
||||||
|
|
||||||
|
#### Auto-Fix Capabilities
|
||||||
|
- [ ] Auto-fix mode (`relspec inspect --fix`)
|
||||||
|
- [ ] Update foreign key types to match primary key types
|
||||||
|
- [ ] Rename foreign keys to match primary key names with configurable prefix/suffix
|
||||||
|
- [ ] Reorder columns according to rules
|
||||||
|
- [ ] Add missing indexes on foreign keys
|
||||||
|
- [ ] Generate migration scripts for fixes
|
||||||
|
- [ ] Dry-run mode to preview changes
|
||||||
|
|
||||||
|
#### Advanced Features
|
||||||
|
- [ ] Custom validator plugins (Go plugin system)
|
||||||
|
- [ ] Rule templates for different databases (PostgreSQL, MySQL, etc.)
|
||||||
|
- [ ] Rule inheritance and composition
|
||||||
|
- [ ] Conditional rules (apply only to certain schemas/tables)
|
||||||
|
- [ ] Performance metrics in report (validation time per rule)
|
||||||
|
- [ ] Caching for large databases
|
||||||
|
- [ ] Incremental validation (only changed tables)
|
||||||
|
- [ ] Watch mode for continuous validation
|
||||||
|
|
||||||
182
pkg/inspector/inspector.go
Normal file
182
pkg/inspector/inspector.go
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Inspector performs validation on database models
|
||||||
|
type Inspector struct {
|
||||||
|
config *Config
|
||||||
|
db *models.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidationResult represents the result of a single validation check
|
||||||
|
type ValidationResult struct {
|
||||||
|
RuleName string `json:"rule_name"`
|
||||||
|
Level string `json:"level"` // "error" or "warning"
|
||||||
|
Message string `json:"message"`
|
||||||
|
Location string `json:"location"` // e.g., "schema.table.column"
|
||||||
|
Context map[string]interface{} `json:"context"`
|
||||||
|
Passed bool `json:"passed"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InspectorReport contains the complete validation report
|
||||||
|
type InspectorReport struct {
|
||||||
|
Summary ReportSummary `json:"summary"`
|
||||||
|
Violations []ValidationResult `json:"violations"`
|
||||||
|
GeneratedAt time.Time `json:"generated_at"`
|
||||||
|
Database string `json:"database"`
|
||||||
|
SourceFormat string `json:"source_format"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReportSummary contains aggregate statistics
|
||||||
|
type ReportSummary struct {
|
||||||
|
TotalRules int `json:"total_rules"`
|
||||||
|
RulesChecked int `json:"rules_checked"`
|
||||||
|
RulesSkipped int `json:"rules_skipped"`
|
||||||
|
ErrorCount int `json:"error_count"`
|
||||||
|
WarningCount int `json:"warning_count"`
|
||||||
|
PassedCount int `json:"passed_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewInspector creates a new inspector with the given database and configuration
|
||||||
|
func NewInspector(db *models.Database, config *Config) *Inspector {
|
||||||
|
return &Inspector{
|
||||||
|
config: config,
|
||||||
|
db: db,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inspect runs all enabled validation rules and returns a report
|
||||||
|
func (i *Inspector) Inspect() (*InspectorReport, error) {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Run all enabled validators
|
||||||
|
for ruleName, rule := range i.config.Rules {
|
||||||
|
if !rule.IsEnabled() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the validator function for this rule using the function field
|
||||||
|
validator, exists := getValidator(rule.Function)
|
||||||
|
if !exists {
|
||||||
|
// Skip unknown validator functions
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the validator
|
||||||
|
ruleResults := validator(i.db, rule, ruleName)
|
||||||
|
|
||||||
|
// Set the level based on rule configuration
|
||||||
|
level := "warning"
|
||||||
|
if rule.IsEnforced() {
|
||||||
|
level = "error"
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range ruleResults {
|
||||||
|
ruleResults[idx].Level = level
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, ruleResults...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate summary
|
||||||
|
summary := i.generateSummary(results)
|
||||||
|
|
||||||
|
report := &InspectorReport{
|
||||||
|
Summary: summary,
|
||||||
|
Violations: results,
|
||||||
|
GeneratedAt: time.Now(),
|
||||||
|
Database: i.db.Name,
|
||||||
|
SourceFormat: i.db.SourceFormat,
|
||||||
|
}
|
||||||
|
|
||||||
|
return report, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateSummary creates summary statistics from validation results
|
||||||
|
func (i *Inspector) generateSummary(results []ValidationResult) ReportSummary {
|
||||||
|
summary := ReportSummary{
|
||||||
|
TotalRules: len(i.config.Rules),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count enabled rules
|
||||||
|
for _, rule := range i.config.Rules {
|
||||||
|
if rule.IsEnabled() {
|
||||||
|
summary.RulesChecked++
|
||||||
|
} else {
|
||||||
|
summary.RulesSkipped++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count violations by level
|
||||||
|
for _, result := range results {
|
||||||
|
if result.Passed {
|
||||||
|
summary.PassedCount++
|
||||||
|
} else {
|
||||||
|
if result.Level == "error" {
|
||||||
|
summary.ErrorCount++
|
||||||
|
} else {
|
||||||
|
summary.WarningCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasErrors returns true if the report contains any errors
|
||||||
|
func (r *InspectorReport) HasErrors() bool {
|
||||||
|
return r.Summary.ErrorCount > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatorFunc is a function that validates a rule against a database
|
||||||
|
type validatorFunc func(*models.Database, Rule, string) []ValidationResult
|
||||||
|
|
||||||
|
// getValidator returns the validator function for a given function name
|
||||||
|
func getValidator(functionName string) (validatorFunc, bool) {
|
||||||
|
validators := map[string]validatorFunc{
|
||||||
|
"primary_key_naming": validatePrimaryKeyNaming,
|
||||||
|
"primary_key_datatype": validatePrimaryKeyDatatype,
|
||||||
|
"primary_key_auto_increment": validatePrimaryKeyAutoIncrement,
|
||||||
|
"foreign_key_column_naming": validateForeignKeyColumnNaming,
|
||||||
|
"foreign_key_constraint_naming": validateForeignKeyConstraintNaming,
|
||||||
|
"foreign_key_index": validateForeignKeyIndex,
|
||||||
|
"table_regexpr": validateTableNamingCase,
|
||||||
|
"column_regexpr": validateColumnNamingCase,
|
||||||
|
"table_name_length": validateTableNameLength,
|
||||||
|
"column_name_length": validateColumnNameLength,
|
||||||
|
"reserved_words": validateReservedKeywords,
|
||||||
|
"have_primary_key": validateMissingPrimaryKey,
|
||||||
|
"orphaned_foreign_key": validateOrphanedForeignKey,
|
||||||
|
"circular_dependency": validateCircularDependency,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn, exists := validators[functionName]
|
||||||
|
return fn, exists
|
||||||
|
}
|
||||||
|
|
||||||
|
// createResult is a helper to create a validation result
|
||||||
|
func createResult(ruleName string, passed bool, message string, location string, context map[string]interface{}) ValidationResult {
|
||||||
|
return ValidationResult{
|
||||||
|
RuleName: ruleName,
|
||||||
|
Message: message,
|
||||||
|
Location: location,
|
||||||
|
Context: context,
|
||||||
|
Passed: passed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatLocation creates a location string from schema, table, and optional column
|
||||||
|
func formatLocation(schema, table, column string) string {
|
||||||
|
if column != "" {
|
||||||
|
return fmt.Sprintf("%s.%s.%s", schema, table, column)
|
||||||
|
}
|
||||||
|
if table != "" {
|
||||||
|
return fmt.Sprintf("%s.%s", schema, table)
|
||||||
|
}
|
||||||
|
return schema
|
||||||
|
}
|
||||||
229
pkg/inspector/report.go
Normal file
229
pkg/inspector/report.go
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ANSI color codes
|
||||||
|
const (
|
||||||
|
colorReset = "\033[0m"
|
||||||
|
colorRed = "\033[31m"
|
||||||
|
colorYellow = "\033[33m"
|
||||||
|
colorGreen = "\033[32m"
|
||||||
|
colorBold = "\033[1m"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ReportFormatter defines the interface for report formatters
|
||||||
|
type ReportFormatter interface {
|
||||||
|
Format(report *InspectorReport) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarkdownFormatter formats reports as markdown
|
||||||
|
type MarkdownFormatter struct {
|
||||||
|
UseColors bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// JSONFormatter formats reports as JSON
|
||||||
|
type JSONFormatter struct{}
|
||||||
|
|
||||||
|
// NewMarkdownFormatter creates a markdown formatter with color support detection
|
||||||
|
func NewMarkdownFormatter(writer io.Writer) *MarkdownFormatter {
|
||||||
|
return &MarkdownFormatter{
|
||||||
|
UseColors: isTerminal(writer),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewJSONFormatter creates a JSON formatter
|
||||||
|
func NewJSONFormatter() *JSONFormatter {
|
||||||
|
return &JSONFormatter{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format generates a markdown report
|
||||||
|
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error) {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Header
|
||||||
|
sb.WriteString(f.formatHeader("RelSpec Inspector Report"))
|
||||||
|
sb.WriteString("\n\n")
|
||||||
|
|
||||||
|
// Metadata
|
||||||
|
sb.WriteString(f.formatBold("Database:") + " " + report.Database + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Source Format:") + " " + report.SourceFormat + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Generated:") + " " + report.GeneratedAt.Format(time.RFC3339) + "\n")
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
sb.WriteString(f.formatHeader("Summary"))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
sb.WriteString(fmt.Sprintf("- Rules Checked: %d\n", report.Summary.RulesChecked))
|
||||||
|
|
||||||
|
// Color-code error and warning counts
|
||||||
|
if report.Summary.ErrorCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount), colorRed))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount))
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.WarningCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount), colorYellow))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount))
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.PassedCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Passed: %d\n", report.Summary.PassedCount), colorGreen))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Group violations by level
|
||||||
|
errors := []ValidationResult{}
|
||||||
|
warnings := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, v := range report.Violations {
|
||||||
|
if !v.Passed {
|
||||||
|
if v.Level == "error" {
|
||||||
|
errors = append(errors, v)
|
||||||
|
} else {
|
||||||
|
warnings = append(warnings, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Report violations
|
||||||
|
if len(errors) > 0 || len(warnings) > 0 {
|
||||||
|
sb.WriteString(f.formatHeader("Violations"))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Errors
|
||||||
|
if len(errors) > 0 {
|
||||||
|
sb.WriteString(f.formatSubheader(fmt.Sprintf("Errors (%d)", len(errors)), colorRed))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
for _, violation := range errors {
|
||||||
|
sb.WriteString(f.formatViolation(violation, colorRed))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warnings
|
||||||
|
if len(warnings) > 0 {
|
||||||
|
sb.WriteString(f.formatSubheader(fmt.Sprintf("Warnings (%d)", len(warnings)), colorYellow))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
for _, violation := range warnings {
|
||||||
|
sb.WriteString(f.formatViolation(violation, colorYellow))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sb.WriteString(f.colorize("✓ No violations found!\n", colorGreen))
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format generates a JSON report
|
||||||
|
func (f *JSONFormatter) Format(report *InspectorReport) (string, error) {
|
||||||
|
data, err := json.MarshalIndent(report, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to marshal report to JSON: %w", err)
|
||||||
|
}
|
||||||
|
return string(data), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper methods for MarkdownFormatter
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatHeader(text string) string {
|
||||||
|
return f.formatBold("# " + text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatSubheader(text string, color string) string {
|
||||||
|
header := "### " + text
|
||||||
|
if f.UseColors {
|
||||||
|
return color + colorBold + header + colorReset
|
||||||
|
}
|
||||||
|
return header
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatBold(text string) string {
|
||||||
|
if f.UseColors {
|
||||||
|
return colorBold + text + colorReset
|
||||||
|
}
|
||||||
|
return "**" + text + "**"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) colorize(text string, color string) string {
|
||||||
|
if f.UseColors {
|
||||||
|
return color + text + colorReset
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatViolation(v ValidationResult, color string) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Rule name as header
|
||||||
|
if f.UseColors {
|
||||||
|
sb.WriteString(color + "#### " + v.RuleName + colorReset + "\n")
|
||||||
|
} else {
|
||||||
|
sb.WriteString("#### " + v.RuleName + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Location and message
|
||||||
|
sb.WriteString(f.formatBold("Location:") + " " + v.Location + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Message:") + " " + v.Message + "\n")
|
||||||
|
|
||||||
|
// Context details (optional, only show interesting ones)
|
||||||
|
if len(v.Context) > 0 {
|
||||||
|
contextStr := f.formatContext(v.Context)
|
||||||
|
if contextStr != "" {
|
||||||
|
sb.WriteString(f.formatBold("Details:") + " " + contextStr + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatContext(context map[string]interface{}) string {
|
||||||
|
// Extract relevant context information
|
||||||
|
var parts []string
|
||||||
|
|
||||||
|
// Skip schema, table, column as they're in location
|
||||||
|
skipKeys := map[string]bool{
|
||||||
|
"schema": true,
|
||||||
|
"table": true,
|
||||||
|
"column": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value := range context {
|
||||||
|
if skipKeys[key] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
parts = append(parts, fmt.Sprintf("%s=%v", key, value))
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(parts, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// isTerminal checks if the writer is a terminal (supports ANSI colors)
|
||||||
|
func isTerminal(w io.Writer) bool {
|
||||||
|
file, ok := w.(*os.File)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the file descriptor is a terminal
|
||||||
|
stat, err := file.Stat()
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a character device (terminal)
|
||||||
|
// This works on Unix-like systems
|
||||||
|
return (stat.Mode() & os.ModeCharDevice) != 0
|
||||||
|
}
|
||||||
169
pkg/inspector/rules.go
Normal file
169
pkg/inspector/rules.go
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config represents the inspector rules configuration
|
||||||
|
type Config struct {
|
||||||
|
Version string `yaml:"version"`
|
||||||
|
Rules map[string]Rule `yaml:"rules"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rule represents a single validation rule
|
||||||
|
type Rule struct {
|
||||||
|
Enabled string `yaml:"enabled"` // "enforce", "warn", "off"
|
||||||
|
Function string `yaml:"function"` // validator function name
|
||||||
|
Message string `yaml:"message"`
|
||||||
|
Pattern string `yaml:"pattern,omitempty"`
|
||||||
|
AllowedTypes []string `yaml:"allowed_types,omitempty"`
|
||||||
|
MaxLength int `yaml:"max_length,omitempty"`
|
||||||
|
Case string `yaml:"case,omitempty"`
|
||||||
|
RequireIndex bool `yaml:"require_index,omitempty"`
|
||||||
|
CheckTables bool `yaml:"check_tables,omitempty"`
|
||||||
|
CheckColumns bool `yaml:"check_columns,omitempty"`
|
||||||
|
RequireAutoIncrement bool `yaml:"require_auto_increment,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadConfig loads configuration from a YAML file
|
||||||
|
// If the file doesn't exist, returns default configuration
|
||||||
|
// If the file exists but is invalid, returns an error
|
||||||
|
func LoadConfig(path string) (*Config, error) {
|
||||||
|
// Check if file exists
|
||||||
|
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||||
|
// File doesn't exist, use defaults
|
||||||
|
return GetDefaultConfig(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read file
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read config file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse YAML
|
||||||
|
var config Config
|
||||||
|
if err := yaml.Unmarshal(data, &config); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse config YAML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDefaultConfig returns the default inspector configuration
|
||||||
|
// All rules are enabled at "warn" level by default
|
||||||
|
func GetDefaultConfig() *Config {
|
||||||
|
return &Config{
|
||||||
|
Version: "1.0",
|
||||||
|
Rules: map[string]Rule{
|
||||||
|
// Primary Key Rules
|
||||||
|
"primary_key_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "primary_key_naming",
|
||||||
|
Pattern: "^id_",
|
||||||
|
Message: "Primary key columns should start with 'id_'",
|
||||||
|
},
|
||||||
|
"primary_key_datatype": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "primary_key_datatype",
|
||||||
|
AllowedTypes: []string{"bigserial", "bigint", "int", "serial", "integer", "int4", "int8"},
|
||||||
|
Message: "Primary keys should use integer types (bigserial, bigint, int, serial)",
|
||||||
|
},
|
||||||
|
"primary_key_auto_increment": {
|
||||||
|
Enabled: "off",
|
||||||
|
Function: "primary_key_auto_increment",
|
||||||
|
RequireAutoIncrement: true,
|
||||||
|
Message: "Primary key without auto-increment detected",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Foreign Key Rules
|
||||||
|
"foreign_key_column_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_column_naming",
|
||||||
|
Pattern: "^rid_",
|
||||||
|
Message: "Foreign key columns should start with 'rid_'",
|
||||||
|
},
|
||||||
|
"foreign_key_constraint_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_constraint_naming",
|
||||||
|
Pattern: "^fk_",
|
||||||
|
Message: "Foreign key constraint names should start with 'fk_'",
|
||||||
|
},
|
||||||
|
"foreign_key_index": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_index",
|
||||||
|
RequireIndex: true,
|
||||||
|
Message: "Foreign key columns should have indexes for optimal performance",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Naming Convention Rules
|
||||||
|
"table_naming_case": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "table_regexpr",
|
||||||
|
Case: "lowercase",
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Message: "Table names should be lowercase with underscores (snake_case)",
|
||||||
|
},
|
||||||
|
"column_naming_case": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "column_regexpr",
|
||||||
|
Case: "lowercase",
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Message: "Column names should be lowercase with underscores (snake_case)",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Length Rules
|
||||||
|
"table_name_length": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "table_name_length",
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Table name exceeds recommended maximum length of 64 characters",
|
||||||
|
},
|
||||||
|
"column_name_length": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "column_name_length",
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Column name exceeds recommended maximum length of 64 characters",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Reserved Keywords
|
||||||
|
"reserved_keywords": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "reserved_words",
|
||||||
|
CheckTables: true,
|
||||||
|
CheckColumns: true,
|
||||||
|
Message: "Using SQL reserved keywords as identifiers can cause issues",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Schema Integrity Rules
|
||||||
|
"missing_primary_key": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "have_primary_key",
|
||||||
|
Message: "Table is missing a primary key",
|
||||||
|
},
|
||||||
|
"orphaned_foreign_key": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "orphaned_foreign_key",
|
||||||
|
Message: "Foreign key references a non-existent table",
|
||||||
|
},
|
||||||
|
"circular_dependency": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "circular_dependency",
|
||||||
|
Message: "Circular foreign key dependency detected",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEnabled returns true if the rule is enabled (either "enforce" or "warn")
|
||||||
|
func (r *Rule) IsEnabled() bool {
|
||||||
|
return r.Enabled == "enforce" || r.Enabled == "warn"
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEnforced returns true if the rule is set to "enforce" level
|
||||||
|
func (r *Rule) IsEnforced() bool {
|
||||||
|
return r.Enabled == "enforce"
|
||||||
|
}
|
||||||
603
pkg/inspector/validators.go
Normal file
603
pkg/inspector/validators.go
Normal file
@@ -0,0 +1,603 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
|
||||||
|
)
|
||||||
|
|
||||||
|
// validatePrimaryKeyNaming checks that primary key column names match a pattern
|
||||||
|
func validatePrimaryKeyNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := pattern.MatchString(col.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatePrimaryKeyDatatype checks that primary keys use approved data types
|
||||||
|
func validatePrimaryKeyDatatype(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
|
||||||
|
// Normalize type (remove size/precision)
|
||||||
|
normalizedType := normalizeDataType(col.Type)
|
||||||
|
passed := contains(rule.AllowedTypes, normalizedType)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"current_type": col.Type,
|
||||||
|
"allowed_types": rule.AllowedTypes,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatePrimaryKeyAutoIncrement checks primary key auto-increment settings
|
||||||
|
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
|
||||||
|
// Check if auto-increment matches requirement
|
||||||
|
passed := col.AutoIncrement == rule.RequireAutoIncrement
|
||||||
|
|
||||||
|
if !passed {
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
false,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"has_auto_increment": col.AutoIncrement,
|
||||||
|
"require_auto_increment": rule.RequireAutoIncrement,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyColumnNaming checks that foreign key column names match a pattern
|
||||||
|
func validateForeignKeyColumnNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check foreign key constraints
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, colName)
|
||||||
|
passed := pattern.MatchString(colName)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": colName,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyConstraintNaming checks that foreign key constraint names match a pattern
|
||||||
|
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := pattern.MatchString(constraint.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyIndex checks that foreign key columns have indexes
|
||||||
|
func validateForeignKeyIndex(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
if !rule.RequireIndex {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Get all foreign key columns
|
||||||
|
fkColumns := make(map[string]bool)
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, col := range constraint.Columns {
|
||||||
|
fkColumns[col] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if each FK column has an index
|
||||||
|
for fkCol := range fkColumns {
|
||||||
|
hasIndex := false
|
||||||
|
|
||||||
|
// Check table indexes
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
// Index is good if FK column is the first column
|
||||||
|
if len(index.Columns) > 0 && index.Columns[0] == fkCol {
|
||||||
|
hasIndex = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, fkCol)
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
hasIndex,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": fkCol,
|
||||||
|
"has_index": hasIndex,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateTableNamingCase checks table name casing
|
||||||
|
func validateTableNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := pattern.MatchString(table.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"expected_case": rule.Case,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateColumnNamingCase checks column name casing
|
||||||
|
func validateColumnNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := pattern.MatchString(col.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"expected_case": rule.Case,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateTableNameLength checks table name length
|
||||||
|
func validateTableNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := len(table.Name) <= rule.MaxLength
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"length": len(table.Name),
|
||||||
|
"max_length": rule.MaxLength,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateColumnNameLength checks column name length
|
||||||
|
func validateColumnNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := len(col.Name) <= rule.MaxLength
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"length": len(col.Name),
|
||||||
|
"max_length": rule.MaxLength,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateReservedKeywords checks for reserved SQL keywords
|
||||||
|
func validateReservedKeywords(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build keyword map from PostgreSQL keywords
|
||||||
|
keywordSlice := pgsql.GetPostgresKeywords()
|
||||||
|
keywords := make(map[string]bool)
|
||||||
|
for _, kw := range keywordSlice {
|
||||||
|
keywords[strings.ToUpper(kw)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check table name
|
||||||
|
if rule.CheckTables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := !keywords[strings.ToUpper(table.Name)]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"object_type": "table",
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check column names
|
||||||
|
if rule.CheckColumns {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := !keywords[strings.ToUpper(col.Name)]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"object_type": "column",
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateMissingPrimaryKey checks for tables without primary keys
|
||||||
|
func validateMissingPrimaryKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
hasPrimaryKey := false
|
||||||
|
|
||||||
|
// Check columns for primary key
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
hasPrimaryKey = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also check constraints
|
||||||
|
if !hasPrimaryKey {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.PrimaryKeyConstraint {
|
||||||
|
hasPrimaryKey = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
hasPrimaryKey,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateOrphanedForeignKey checks for foreign keys referencing non-existent tables
|
||||||
|
func validateOrphanedForeignKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build a map of existing tables for quick lookup
|
||||||
|
tableExists := make(map[string]bool)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
key := schema.Name + "." + table.Name
|
||||||
|
tableExists[key] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check all foreign key constraints
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
// Build referenced table key
|
||||||
|
refSchema := constraint.ReferencedSchema
|
||||||
|
if refSchema == "" {
|
||||||
|
refSchema = schema.Name
|
||||||
|
}
|
||||||
|
refKey := refSchema + "." + constraint.ReferencedTable
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := tableExists[refKey]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"referenced_schema": refSchema,
|
||||||
|
"referenced_table": constraint.ReferencedTable,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateCircularDependency checks for circular foreign key dependencies
|
||||||
|
func validateCircularDependency(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build dependency graph
|
||||||
|
dependencies := make(map[string][]string)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
refSchema := constraint.ReferencedSchema
|
||||||
|
if refSchema == "" {
|
||||||
|
refSchema = schema.Name
|
||||||
|
}
|
||||||
|
refKey := refSchema + "." + constraint.ReferencedTable
|
||||||
|
|
||||||
|
dependencies[tableKey] = append(dependencies[tableKey], refKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for cycles using DFS
|
||||||
|
for tableKey := range dependencies {
|
||||||
|
visited := make(map[string]bool)
|
||||||
|
recStack := make(map[string]bool)
|
||||||
|
|
||||||
|
if hasCycle(tableKey, dependencies, visited, recStack) {
|
||||||
|
parts := strings.Split(tableKey, ".")
|
||||||
|
location := formatLocation(parts[0], parts[1], "")
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
false,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": parts[0],
|
||||||
|
"table": parts[1],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
|
||||||
|
// hasCycle performs DFS to detect cycles in dependency graph
|
||||||
|
func hasCycle(node string, graph map[string][]string, visited, recStack map[string]bool) bool {
|
||||||
|
visited[node] = true
|
||||||
|
recStack[node] = true
|
||||||
|
|
||||||
|
for _, neighbor := range graph[node] {
|
||||||
|
if !visited[neighbor] {
|
||||||
|
if hasCycle(neighbor, graph, visited, recStack) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if recStack[neighbor] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
recStack[node] = false
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalizeDataType removes size/precision from data type
|
||||||
|
func normalizeDataType(dataType string) string {
|
||||||
|
// Remove everything in parentheses
|
||||||
|
idx := strings.Index(dataType, "(")
|
||||||
|
if idx > 0 {
|
||||||
|
dataType = dataType[:idx]
|
||||||
|
}
|
||||||
|
return strings.ToLower(strings.TrimSpace(dataType))
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains checks if a string slice contains a value
|
||||||
|
func contains(slice []string, value string) bool {
|
||||||
|
for _, item := range slice {
|
||||||
|
if strings.EqualFold(item, value) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
160
pkg/readers/sqldir/README.md
Normal file
160
pkg/readers/sqldir/README.md
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
# SQL Directory Reader
|
||||||
|
|
||||||
|
The SQL Directory Reader (`sqldir`) reads SQL scripts from a directory structure and populates the `Scripts` field of a `Schema`. It supports recursive directory scanning and extracts priority, sequence, and name information from filenames.
|
||||||
|
|
||||||
|
## File Naming Convention
|
||||||
|
|
||||||
|
Scripts must follow this naming pattern (supports both underscores and hyphens as separators):
|
||||||
|
|
||||||
|
```
|
||||||
|
{priority}_{sequence}_{name}.{sql|pgsql}
|
||||||
|
{priority}-{sequence}-{name}.{sql|pgsql}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- **priority**: Integer (0-9999) - Defines execution order (lower executes first)
|
||||||
|
- **sequence**: Integer (0-9999) - Defines order within the same priority level
|
||||||
|
- **separator**: Underscore `_` or hyphen `-` (can be mixed)
|
||||||
|
- **name**: Descriptive name (alphanumeric, underscores, hyphens allowed)
|
||||||
|
- **extension**: `.sql` or `.pgsql`
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_schema.sql # Priority 1, Sequence 1 (underscore format)
|
||||||
|
├── 1-002-create-users-table.sql # Priority 1, Sequence 2 (hyphen format)
|
||||||
|
├── 1_003_create_posts_table.pgsql # Priority 1, Sequence 3 (underscore format)
|
||||||
|
├── 2-001-add-indexes.sql # Priority 2, Sequence 1 (hyphen format)
|
||||||
|
├── 2_002_add_constraints.sql # Priority 2, Sequence 2 (underscore format)
|
||||||
|
├── 10-10-create-newid.pgsql # Priority 10, Sequence 10 (hyphen format)
|
||||||
|
└── subdirectory/
|
||||||
|
└── 3_001_seed_data.sql # Priority 3, Sequence 1 (subdirs supported)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Execution Order**: 1→2→3→4→5→6→7 (sorted by Priority ascending, then Sequence ascending)
|
||||||
|
|
||||||
|
**Both formats can be mixed** in the same directory - the reader handles both seamlessly.
|
||||||
|
|
||||||
|
### Invalid Filenames (Ignored)
|
||||||
|
|
||||||
|
- `migration.sql` - Missing priority/sequence
|
||||||
|
- `1_create_users.sql` - Missing sequence
|
||||||
|
- `create_users.sql` - Missing priority/sequence
|
||||||
|
- `1_001_test.txt` - Wrong extension
|
||||||
|
- `readme.md` - Not a SQL file
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
)
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "public", // Optional, defaults to "public"
|
||||||
|
"database_name": "myapp", // Optional, defaults to "database"
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read all scripts
|
||||||
|
database, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Access scripts
|
||||||
|
for _, schema := range database.Schemas {
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
fmt.Printf("Script: %s (P:%d S:%d)\n",
|
||||||
|
script.Name, script.Priority, script.Sequence)
|
||||||
|
fmt.Printf("SQL: %s\n", script.SQL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Read Schema Only
|
||||||
|
|
||||||
|
```go
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d scripts\n", len(schema.Scripts))
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Recursive Directory Scanning**: Automatically scans all subdirectories
|
||||||
|
- **Multiple Extensions**: Supports both `.sql` and `.pgsql` files
|
||||||
|
- **Flexible Naming**: Extract metadata from filename patterns
|
||||||
|
- **Error Handling**: Validates directory existence and file accessibility
|
||||||
|
- **Schema Integration**: Scripts are added to the standard RelSpec `Schema` model
|
||||||
|
|
||||||
|
## Script Model
|
||||||
|
|
||||||
|
Each script is stored as a `models.Script`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Script struct {
|
||||||
|
Name string // Extracted from filename (e.g., "create_users")
|
||||||
|
Description string // Auto-generated description with file path
|
||||||
|
SQL string // Complete SQL content from file
|
||||||
|
Priority int // Execution priority from filename
|
||||||
|
Sequence uint // Execution sequence from filename
|
||||||
|
// ... other fields available but not populated by this reader
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with SQL Executor
|
||||||
|
|
||||||
|
The SQL Directory Reader is designed to work seamlessly with the SQL Executor Writer:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
db, _ := reader.ReadDatabase()
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/mydb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
writer.WriteDatabase(db) // Executes in Priority→Sequence order
|
||||||
|
```
|
||||||
|
|
||||||
|
See `pkg/writers/sqlexec/README.md` for more details on script execution.
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
The reader will return errors for:
|
||||||
|
- Non-existent directory paths
|
||||||
|
- Inaccessible directories or files
|
||||||
|
- Invalid file permissions
|
||||||
|
- File read failures
|
||||||
|
|
||||||
|
Files that don't match the naming pattern are silently ignored (not treated as errors).
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/readers/sqldir/
|
||||||
|
```
|
||||||
|
|
||||||
|
Tests include:
|
||||||
|
- Valid file parsing
|
||||||
|
- Recursive directory scanning
|
||||||
|
- Invalid filename handling
|
||||||
|
- Empty directory handling
|
||||||
|
- Error conditions
|
||||||
127
pkg/readers/sqldir/example_test.go
Normal file
127
pkg/readers/sqldir/example_test.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package sqldir_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Example demonstrates how to read SQL scripts from a directory and execute them
|
||||||
|
func Example() {
|
||||||
|
// Step 1: Read SQL scripts from a directory
|
||||||
|
// Directory structure example:
|
||||||
|
// migrations/
|
||||||
|
// 1_001_create_schema.sql
|
||||||
|
// 1_002_create_users_table.sql
|
||||||
|
// 1_003_create_posts_table.pgsql
|
||||||
|
// 2_001_add_indexes.sql
|
||||||
|
// 2_002_seed_data.sql
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "public",
|
||||||
|
"database_name": "myapp",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read the database schema with scripts
|
||||||
|
database, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to read scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Read %d schemas\n", len(database.Schemas))
|
||||||
|
fmt.Printf("Found %d scripts in schema '%s'\n",
|
||||||
|
len(database.Schemas[0].Scripts),
|
||||||
|
database.Schemas[0].Name)
|
||||||
|
|
||||||
|
// Step 2: Execute the scripts against a PostgreSQL database
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://user:password@localhost:5432/myapp?sslmode=disable",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute all scripts in Priority then Sequence order
|
||||||
|
if err := writer.WriteDatabase(database); err != nil {
|
||||||
|
log.Fatalf("Failed to execute scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("All scripts executed successfully!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example_withSingleSchema shows how to read and execute scripts for a single schema
|
||||||
|
func Example_withSingleSchema() {
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
})
|
||||||
|
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to read schema: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/testdb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteSchema(schema); err != nil {
|
||||||
|
log.Fatalf("Failed to execute scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Schema scripts executed successfully!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example_fileNamingConvention shows the expected file naming pattern
|
||||||
|
func Example_fileNamingConvention() {
|
||||||
|
// File naming pattern: {priority}_{sequence}_{name}.sql or .pgsql
|
||||||
|
// OR: {priority}-{sequence}-{name}.sql or .pgsql
|
||||||
|
//
|
||||||
|
// Both underscore (_) and hyphen (-) separators are supported and can be mixed.
|
||||||
|
//
|
||||||
|
// Components:
|
||||||
|
// - priority: Integer (0-9999) - Scripts with lower priority execute first
|
||||||
|
// - sequence: Integer (0-9999) - Within same priority, lower sequence executes first
|
||||||
|
// - separator: Underscore (_) or hyphen (-)
|
||||||
|
// - name: Descriptive name (alphanumeric, underscores, hyphens)
|
||||||
|
// - extension: .sql or .pgsql
|
||||||
|
//
|
||||||
|
// Examples (underscore format):
|
||||||
|
// ✓ 1_001_create_users.sql (Priority=1, Sequence=1)
|
||||||
|
// ✓ 1_002_create_posts.sql (Priority=1, Sequence=2)
|
||||||
|
// ✓ 2_001_add_indexes.pgsql (Priority=2, Sequence=1)
|
||||||
|
// ✓ 10_100_migration.sql (Priority=10, Sequence=100)
|
||||||
|
//
|
||||||
|
// Examples (hyphen format):
|
||||||
|
// ✓ 1-001-create-users.sql (Priority=1, Sequence=1)
|
||||||
|
// ✓ 1-002-create-posts.sql (Priority=1, Sequence=2)
|
||||||
|
// ✓ 2-001-add-indexes.pgsql (Priority=2, Sequence=1)
|
||||||
|
// ✓ 10-10-create-newid.pgsql (Priority=10, Sequence=10)
|
||||||
|
//
|
||||||
|
// Mixed format (both in same directory):
|
||||||
|
// ✓ 1_001_create_users.sql (underscore format)
|
||||||
|
// ✓ 1-002-create-posts.sql (hyphen format)
|
||||||
|
// ✓ 2_001_add_indexes.sql (underscore format)
|
||||||
|
//
|
||||||
|
// Execution order for mixed examples:
|
||||||
|
// 1. 1_001_create_users.sql (Priority 1, Sequence 1)
|
||||||
|
// 2. 1-002-create-posts.sql (Priority 1, Sequence 2)
|
||||||
|
// 3. 2_001_add_indexes.sql (Priority 2, Sequence 1)
|
||||||
|
//
|
||||||
|
// Invalid filenames (will be ignored):
|
||||||
|
// ✗ migration.sql (missing priority/sequence)
|
||||||
|
// ✗ 1_create_users.sql (missing sequence)
|
||||||
|
// ✗ create_users.sql (missing priority/sequence)
|
||||||
|
// ✗ 1_001_create_users.txt (wrong extension)
|
||||||
|
|
||||||
|
fmt.Println("See comments for file naming conventions")
|
||||||
|
}
|
||||||
171
pkg/readers/sqldir/reader.go
Normal file
171
pkg/readers/sqldir/reader.go
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
package sqldir
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for SQL script directories
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new SQL directory reader
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads all SQL scripts from a directory into a Database
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("directory path is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if directory exists
|
||||||
|
info, err := os.Stat(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to access directory: %w", err)
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return nil, fmt.Errorf("path is not a directory: %s", r.options.FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read scripts from directory
|
||||||
|
scripts, err := r.readScripts()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get schema name from metadata or use default
|
||||||
|
schemaName := "public"
|
||||||
|
if name, ok := r.options.Metadata["schema_name"].(string); ok && name != "" {
|
||||||
|
schemaName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create schema with scripts
|
||||||
|
schema := &models.Schema{
|
||||||
|
Name: schemaName,
|
||||||
|
Scripts: scripts,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get database name from metadata or use default
|
||||||
|
dbName := "database"
|
||||||
|
if name, ok := r.options.Metadata["database_name"].(string); ok && name != "" {
|
||||||
|
dbName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create database with schema
|
||||||
|
database := &models.Database{
|
||||||
|
Name: dbName,
|
||||||
|
Schemas: []*models.Schema{schema},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set back-reference
|
||||||
|
schema.RefDatabase = database
|
||||||
|
|
||||||
|
return database, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads all SQL scripts from a directory into a Schema
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schema found")
|
||||||
|
}
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable is not applicable for SQL script directories
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
return nil, fmt.Errorf("ReadTable is not supported for SQL script directories")
|
||||||
|
}
|
||||||
|
|
||||||
|
// readScripts recursively scans the directory for SQL files and parses them into Script models
|
||||||
|
func (r *Reader) readScripts() ([]*models.Script, error) {
|
||||||
|
var scripts []*models.Script
|
||||||
|
|
||||||
|
// Regular expression to parse filename: {priority}{sep}{sequence}{sep}{name}.sql or .pgsql
|
||||||
|
// Separator can be underscore (_) or hyphen (-)
|
||||||
|
// Example: 1_001_create_users.sql -> priority=1, sequence=001, name=create_users
|
||||||
|
// Example: 2_005_add_indexes.pgsql -> priority=2, sequence=005, name=add_indexes
|
||||||
|
// Example: 10-10-create-newid.pgsql -> priority=10, sequence=10, name=create-newid
|
||||||
|
pattern := regexp.MustCompile(`^(\d+)[_-](\d+)[_-](.+)\.(sql|pgsql)$`)
|
||||||
|
|
||||||
|
err := filepath.WalkDir(r.options.FilePath, func(path string, d os.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip directories
|
||||||
|
if d.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get filename
|
||||||
|
filename := d.Name()
|
||||||
|
|
||||||
|
// Match against pattern
|
||||||
|
matches := pattern.FindStringSubmatch(filename)
|
||||||
|
if matches == nil {
|
||||||
|
// Skip files that don't match the pattern
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse priority
|
||||||
|
priority, err := strconv.Atoi(matches[1])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid priority in filename %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse sequence
|
||||||
|
sequence, err := strconv.ParseUint(matches[2], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid sequence in filename %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract name
|
||||||
|
name := matches[3]
|
||||||
|
|
||||||
|
// Read SQL content
|
||||||
|
content, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read file %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get relative path from base directory
|
||||||
|
relPath, err := filepath.Rel(r.options.FilePath, path)
|
||||||
|
if err != nil {
|
||||||
|
relPath = path
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Script model
|
||||||
|
script := &models.Script{
|
||||||
|
Name: name,
|
||||||
|
Description: fmt.Sprintf("SQL script from %s", relPath),
|
||||||
|
SQL: string(content),
|
||||||
|
Priority: priority,
|
||||||
|
Sequence: uint(sequence),
|
||||||
|
}
|
||||||
|
|
||||||
|
scripts = append(scripts, script)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return scripts, nil
|
||||||
|
}
|
||||||
375
pkg/readers/sqldir/reader_test.go
Normal file
375
pkg/readers/sqldir/reader_test.go
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
package sqldir
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test SQL files with both underscore and hyphen separators
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1_001_create_users.sql": "CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT);",
|
||||||
|
"1_002_create_posts.sql": "CREATE TABLE posts (id SERIAL PRIMARY KEY, user_id INT);",
|
||||||
|
"2_001_add_indexes.sql": "CREATE INDEX idx_posts_user_id ON posts(user_id);",
|
||||||
|
"1_003_seed_data.pgsql": "INSERT INTO users (name) VALUES ('Alice'), ('Bob');",
|
||||||
|
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL PRIMARY KEY);",
|
||||||
|
"2-005-add-column.sql": "ALTER TABLE users ADD COLUMN email TEXT;",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create subdirectory with additional script
|
||||||
|
subDir := filepath.Join(tempDir, "migrations")
|
||||||
|
if err := os.MkdirAll(subDir, 0755); err != nil {
|
||||||
|
t.Fatalf("Failed to create subdirectory: %v", err)
|
||||||
|
}
|
||||||
|
subFile := filepath.Join(subDir, "3_001_add_column.sql")
|
||||||
|
if err := os.WriteFile(subFile, []byte("ALTER TABLE users ADD COLUMN email TEXT;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create subdirectory file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "test_schema",
|
||||||
|
"database_name": "test_db",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify database
|
||||||
|
if db.Name != "test_db" {
|
||||||
|
t.Errorf("Expected database name 'test_db', got '%s'", db.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) != 1 {
|
||||||
|
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "test_schema" {
|
||||||
|
t.Errorf("Expected schema name 'test_schema', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify scripts (should be 7 total: 4 underscore + 2 hyphen + 1 subdirectory)
|
||||||
|
if len(schema.Scripts) != 7 {
|
||||||
|
t.Fatalf("Expected 7 scripts, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify script details
|
||||||
|
expectedScripts := []struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
}{
|
||||||
|
{"create_users", 1, 1},
|
||||||
|
{"create_posts", 1, 2},
|
||||||
|
{"seed_data", 1, 3},
|
||||||
|
{"add_indexes", 2, 1},
|
||||||
|
{"add-column", 2, 5},
|
||||||
|
{"add_column", 3, 1},
|
||||||
|
{"create-newid", 10, 10},
|
||||||
|
}
|
||||||
|
|
||||||
|
scriptMap := make(map[string]*struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sql string
|
||||||
|
})
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
scriptMap[script.Name] = &struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sql string
|
||||||
|
}{
|
||||||
|
priority: script.Priority,
|
||||||
|
sequence: script.Sequence,
|
||||||
|
sql: script.SQL,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, expected := range expectedScripts {
|
||||||
|
script, exists := scriptMap[expected.name]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected script '%s' not found", expected.name)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if script.priority != expected.priority {
|
||||||
|
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||||
|
expected.name, expected.priority, script.priority)
|
||||||
|
}
|
||||||
|
if script.sequence != expected.sequence {
|
||||||
|
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||||
|
expected.name, expected.sequence, script.sequence)
|
||||||
|
}
|
||||||
|
if script.sql == "" {
|
||||||
|
t.Errorf("Script '%s': SQL content is empty", expected.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test SQL file
|
||||||
|
testFile := filepath.Join(tempDir, "1_001_test.sql")
|
||||||
|
if err := os.WriteFile(testFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read schema
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify schema
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected default schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Scripts) != 1 {
|
||||||
|
t.Fatalf("Expected 1 script, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidDirectory(t *testing.T) {
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/directory",
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for nonexistent directory, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_EmptyDirectory(t *testing.T) {
|
||||||
|
// Create temporary empty directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas[0].Scripts) != 0 {
|
||||||
|
t.Errorf("Expected 0 scripts in empty directory, got %d", len(db.Schemas[0].Scripts))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidFilename(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create files with various invalid patterns
|
||||||
|
invalidFiles := []string{
|
||||||
|
"invalid.sql", // No priority/sequence
|
||||||
|
"1_test.sql", // Missing sequence
|
||||||
|
"test_1_2.sql", // Wrong order
|
||||||
|
"a_001_test.sql", // Non-numeric priority
|
||||||
|
"1_abc_test.sql", // Non-numeric sequence
|
||||||
|
"1_001_test.txt", // Wrong extension
|
||||||
|
"1_001_test.sql.backup", // Wrong extension
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, filename := range invalidFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create one valid file
|
||||||
|
validFile := filepath.Join(tempDir, "1_001_valid.sql")
|
||||||
|
if err := os.WriteFile(validFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create valid file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should only have the valid file
|
||||||
|
if len(db.Schemas[0].Scripts) != 1 {
|
||||||
|
t.Errorf("Expected 1 script (invalid files should be skipped), got %d", len(db.Schemas[0].Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
if db.Schemas[0].Scripts[0].Name != "valid" {
|
||||||
|
t.Errorf("Expected script name 'valid', got '%s'", db.Schemas[0].Scripts[0].Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
reader := NewReader(&readers.ReaderOptions{})
|
||||||
|
|
||||||
|
_, err := reader.ReadTable()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for ReadTable (not supported), got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_HyphenFormat(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-hyphen-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test files with hyphen separators
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1-001-create-table.sql": "CREATE TABLE test (id INT);",
|
||||||
|
"1-002-insert-data.pgsql": "INSERT INTO test VALUES (1);",
|
||||||
|
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL);",
|
||||||
|
"2-005-add-index.sql": "CREATE INDEX idx_test ON test(id);",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) != 4 {
|
||||||
|
t.Fatalf("Expected 4 scripts, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify specific hyphen-formatted scripts
|
||||||
|
expectedScripts := map[string]struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
}{
|
||||||
|
"create-table": {1, 1},
|
||||||
|
"insert-data": {1, 2},
|
||||||
|
"add-index": {2, 5},
|
||||||
|
"create-newid": {10, 10},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
expected, exists := expectedScripts[script.Name]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Unexpected script: %s", script.Name)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if script.Priority != expected.priority {
|
||||||
|
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||||
|
script.Name, expected.priority, script.Priority)
|
||||||
|
}
|
||||||
|
if script.Sequence != expected.sequence {
|
||||||
|
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||||
|
script.Name, expected.sequence, script.Sequence)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_MixedFormat(t *testing.T) {
|
||||||
|
// Test that both underscore and hyphen formats can be mixed
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-mixed-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1_001_underscore.sql": "SELECT 1;",
|
||||||
|
"1-002-hyphen.sql": "SELECT 2;",
|
||||||
|
"2_003_underscore.sql": "SELECT 3;",
|
||||||
|
"2-004-hyphen.sql": "SELECT 4;",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) != 4 {
|
||||||
|
t.Fatalf("Expected 4 scripts (mixed format), got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify both formats are parsed correctly
|
||||||
|
names := make(map[string]bool)
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
names[script.Name] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedNames := []string{"underscore", "hyphen", "underscore", "hyphen"}
|
||||||
|
for _, name := range expectedNames {
|
||||||
|
if !names[name] {
|
||||||
|
t.Errorf("Expected script name '%s' not found", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
226
pkg/writers/sqlexec/README.md
Normal file
226
pkg/writers/sqlexec/README.md
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
# SQL Executor Writer
|
||||||
|
|
||||||
|
The SQL Executor Writer (`sqlexec`) executes SQL scripts from `models.Script` objects against a PostgreSQL database. Scripts are executed in order based on Priority (ascending) and Sequence (ascending).
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Ordered Execution**: Scripts execute in Priority→Sequence order
|
||||||
|
- **PostgreSQL Support**: Uses `pgx/v5` driver for robust PostgreSQL connectivity
|
||||||
|
- **Stop on Error**: Execution halts immediately on first error (default behavior)
|
||||||
|
- **Progress Reporting**: Prints execution status to stdout
|
||||||
|
- **Multiple Schemas**: Can execute scripts from multiple schemas in a database
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://user:password@localhost:5432/dbname?sslmode=disable",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute all scripts from database
|
||||||
|
err := writer.WriteDatabase(database)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Execution failed: %v", err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Execute Single Schema
|
||||||
|
|
||||||
|
```go
|
||||||
|
err := writer.WriteSchema(schema)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Schema execution failed: %v", err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Complete Example with SQL Directory Reader
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Read SQL scripts from directory
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute scripts against PostgreSQL
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/myapp",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteDatabase(db); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Required Metadata
|
||||||
|
|
||||||
|
- **connection_string**: PostgreSQL connection string (required)
|
||||||
|
|
||||||
|
### Connection String Format
|
||||||
|
|
||||||
|
```
|
||||||
|
postgres://[user[:password]@][host][:port][/dbname][?param1=value1&...]
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
postgres://localhost/mydb
|
||||||
|
postgres://user:pass@localhost:5432/mydb?sslmode=disable
|
||||||
|
postgres://user@localhost/mydb?sslmode=require
|
||||||
|
postgresql://user:pass@prod-db.example.com:5432/production
|
||||||
|
```
|
||||||
|
|
||||||
|
## Execution Order
|
||||||
|
|
||||||
|
Scripts are sorted and executed based on:
|
||||||
|
|
||||||
|
1. **Priority** (ascending): Lower priority values execute first
|
||||||
|
2. **Sequence** (ascending): Within same priority, lower sequence values execute first
|
||||||
|
|
||||||
|
### Example Execution Order
|
||||||
|
|
||||||
|
Given these scripts:
|
||||||
|
```
|
||||||
|
Script A: Priority=2, Sequence=1
|
||||||
|
Script B: Priority=1, Sequence=3
|
||||||
|
Script C: Priority=1, Sequence=1
|
||||||
|
Script D: Priority=1, Sequence=2
|
||||||
|
Script E: Priority=3, Sequence=1
|
||||||
|
```
|
||||||
|
|
||||||
|
Execution order: **C → D → B → A → E**
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
The writer prints progress to stdout:
|
||||||
|
|
||||||
|
```
|
||||||
|
Executing script: create_users (Priority=1, Sequence=1)
|
||||||
|
✓ Successfully executed: create_users
|
||||||
|
Executing script: create_posts (Priority=1, Sequence=2)
|
||||||
|
✓ Successfully executed: create_posts
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
✓ Successfully executed: add_indexes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### Connection Errors
|
||||||
|
|
||||||
|
If the database connection fails, execution stops immediately:
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: failed to connect to database: connection refused
|
||||||
|
```
|
||||||
|
|
||||||
|
### Script Execution Errors
|
||||||
|
|
||||||
|
If a script fails, execution stops and returns the error with context:
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||||
|
syntax error at or near "IDNEX"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Behavior**: Stop on first error (scripts executed before the error remain committed)
|
||||||
|
|
||||||
|
### Empty Script Handling
|
||||||
|
|
||||||
|
Scripts with empty SQL content are skipped silently.
|
||||||
|
|
||||||
|
## Database Support
|
||||||
|
|
||||||
|
Currently supports:
|
||||||
|
- ✅ PostgreSQL (via pgx/v5)
|
||||||
|
|
||||||
|
Future support planned for:
|
||||||
|
- MySQL/MariaDB
|
||||||
|
- SQLite
|
||||||
|
- Generic SQL via database/sql
|
||||||
|
|
||||||
|
## Transaction Behavior
|
||||||
|
|
||||||
|
**Current**: Each script executes in its own implicit transaction (PostgreSQL default behavior)
|
||||||
|
|
||||||
|
**Future Enhancement**: Option to wrap all scripts in a single transaction for atomic execution with rollback on error.
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
- Scripts execute sequentially (not in parallel)
|
||||||
|
- Each script creates a database round-trip
|
||||||
|
- For large migrations, consider:
|
||||||
|
- Combining related statements into fewer scripts
|
||||||
|
- Using PostgreSQL's COPY command for bulk data
|
||||||
|
- Running during low-traffic periods
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/writers/sqlexec/
|
||||||
|
```
|
||||||
|
|
||||||
|
Current tests include:
|
||||||
|
- Validation and error handling
|
||||||
|
- Script sorting logic
|
||||||
|
- Configuration validation
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
|
||||||
|
For integration testing with a real database:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start PostgreSQL (example with Docker)
|
||||||
|
docker run -d --name postgres-test \
|
||||||
|
-e POSTGRES_PASSWORD=test \
|
||||||
|
-e POSTGRES_DB=testdb \
|
||||||
|
-p 5432:5432 \
|
||||||
|
postgres:16
|
||||||
|
|
||||||
|
# Run your integration tests
|
||||||
|
go test -tags=integration ./pkg/writers/sqlexec/
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
docker stop postgres-test
|
||||||
|
docker rm postgres-test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- `WriteTable()` is not supported (returns error)
|
||||||
|
- Requires PostgreSQL connection (no offline mode)
|
||||||
|
- No built-in transaction wrapping (yet)
|
||||||
|
- No rollback script support (yet, though `models.Script.Rollback` field exists)
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
- **SQL Directory Reader**: `pkg/readers/sqldir/` - Read scripts from filesystem
|
||||||
|
- **Script Model**: `pkg/models/models.go` - Script structure definition
|
||||||
|
- **pgx Documentation**: https://github.com/jackc/pgx - PostgreSQL driver docs
|
||||||
125
pkg/writers/sqlexec/writer.go
Normal file
125
pkg/writers/sqlexec/writer.go
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
package sqlexec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for executing SQL scripts
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new SQL executor writer
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase executes all scripts from all schemas in the database
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
if db == nil {
|
||||||
|
return fmt.Errorf("database is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get connection string from metadata
|
||||||
|
connString, ok := w.options.Metadata["connection_string"].(string)
|
||||||
|
if !ok || connString == "" {
|
||||||
|
return fmt.Errorf("connection_string is required in writer metadata")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to database
|
||||||
|
ctx := context.Background()
|
||||||
|
conn, err := pgx.Connect(ctx, connString)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to connect to database: %w", err)
|
||||||
|
}
|
||||||
|
defer conn.Close(ctx)
|
||||||
|
|
||||||
|
// Execute scripts from all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if err := w.executeScripts(ctx, conn, schema.Scripts); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute scripts from schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema executes all scripts from a single schema
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
if schema == nil {
|
||||||
|
return fmt.Errorf("schema is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get connection string from metadata
|
||||||
|
connString, ok := w.options.Metadata["connection_string"].(string)
|
||||||
|
if !ok || connString == "" {
|
||||||
|
return fmt.Errorf("connection_string is required in writer metadata")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to database
|
||||||
|
ctx := context.Background()
|
||||||
|
conn, err := pgx.Connect(ctx, connString)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to connect to database: %w", err)
|
||||||
|
}
|
||||||
|
defer conn.Close(ctx)
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
if err := w.executeScripts(ctx, conn, schema.Scripts); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable is not applicable for SQL script execution
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
return fmt.Errorf("WriteTable is not supported for SQL script execution")
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeScripts executes scripts in Priority then Sequence order
|
||||||
|
func (w *Writer) executeScripts(ctx context.Context, conn *pgx.Conn, scripts []*models.Script) error {
|
||||||
|
if len(scripts) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort scripts by Priority (ascending) then Sequence (ascending)
|
||||||
|
sortedScripts := make([]*models.Script, len(scripts))
|
||||||
|
copy(sortedScripts, scripts)
|
||||||
|
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||||
|
if sortedScripts[i].Priority != sortedScripts[j].Priority {
|
||||||
|
return sortedScripts[i].Priority < sortedScripts[j].Priority
|
||||||
|
}
|
||||||
|
return sortedScripts[i].Sequence < sortedScripts[j].Sequence
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute each script in order
|
||||||
|
for _, script := range sortedScripts {
|
||||||
|
if script.SQL == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Executing script: %s (Priority=%d, Sequence=%d)\n",
|
||||||
|
script.Name, script.Priority, script.Sequence)
|
||||||
|
|
||||||
|
// Execute the SQL script
|
||||||
|
_, err := conn.Exec(ctx, script.SQL)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to execute script %s (Priority=%d, Sequence=%d): %w",
|
||||||
|
script.Name, script.Priority, script.Sequence, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("✓ Successfully executed: %s\n", script.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
201
pkg/writers/sqlexec/writer_test.go
Normal file
201
pkg/writers/sqlexec/writer_test.go
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
package sqlexec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewWriter(t *testing.T) {
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/test",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
if writer == nil {
|
||||||
|
t.Fatal("Expected non-nil writer")
|
||||||
|
}
|
||||||
|
if writer.options != opts {
|
||||||
|
t.Error("Writer options not set correctly")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_NilDatabase(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/test",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
err := writer.WriteDatabase(nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for nil database, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_MissingConnectionString(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{},
|
||||||
|
})
|
||||||
|
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "test",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Scripts: []*models.Script{
|
||||||
|
{Name: "test", SQL: "SELECT 1;"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for missing connection_string, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteSchema_NilSchema(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/test",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
err := writer.WriteSchema(nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for nil schema, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteSchema_MissingConnectionString(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{},
|
||||||
|
})
|
||||||
|
|
||||||
|
schema := &models.Schema{
|
||||||
|
Name: "public",
|
||||||
|
Scripts: []*models.Script{
|
||||||
|
{Name: "test", SQL: "SELECT 1;"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := writer.WriteSchema(schema)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for missing connection_string, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteTable(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{})
|
||||||
|
|
||||||
|
err := writer.WriteTable(&models.Table{})
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for WriteTable (not supported), got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestScriptSorting verifies that scripts are sorted correctly by Priority then Sequence
|
||||||
|
func TestScriptSorting(t *testing.T) {
|
||||||
|
scripts := []*models.Script{
|
||||||
|
{Name: "script1", Priority: 2, Sequence: 1, SQL: "SELECT 1;"},
|
||||||
|
{Name: "script2", Priority: 1, Sequence: 3, SQL: "SELECT 2;"},
|
||||||
|
{Name: "script3", Priority: 1, Sequence: 1, SQL: "SELECT 3;"},
|
||||||
|
{Name: "script4", Priority: 1, Sequence: 2, SQL: "SELECT 4;"},
|
||||||
|
{Name: "script5", Priority: 3, Sequence: 1, SQL: "SELECT 5;"},
|
||||||
|
{Name: "script6", Priority: 2, Sequence: 2, SQL: "SELECT 6;"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a copy and sort it using the same logic as executeScripts
|
||||||
|
sortedScripts := make([]*models.Script, len(scripts))
|
||||||
|
copy(sortedScripts, scripts)
|
||||||
|
|
||||||
|
// Use the same sorting logic from executeScripts
|
||||||
|
for i := 0; i < len(sortedScripts)-1; i++ {
|
||||||
|
for j := i + 1; j < len(sortedScripts); j++ {
|
||||||
|
if sortedScripts[i].Priority > sortedScripts[j].Priority ||
|
||||||
|
(sortedScripts[i].Priority == sortedScripts[j].Priority &&
|
||||||
|
sortedScripts[i].Sequence > sortedScripts[j].Sequence) {
|
||||||
|
sortedScripts[i], sortedScripts[j] = sortedScripts[j], sortedScripts[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expected order after sorting
|
||||||
|
expectedOrder := []string{
|
||||||
|
"script3", // Priority 1, Sequence 1
|
||||||
|
"script4", // Priority 1, Sequence 2
|
||||||
|
"script2", // Priority 1, Sequence 3
|
||||||
|
"script1", // Priority 2, Sequence 1
|
||||||
|
"script6", // Priority 2, Sequence 2
|
||||||
|
"script5", // Priority 3, Sequence 1
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, expected := range expectedOrder {
|
||||||
|
if sortedScripts[i].Name != expected {
|
||||||
|
t.Errorf("Position %d: expected %s, got %s", i, expected, sortedScripts[i].Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify priorities are ascending
|
||||||
|
for i := 0; i < len(sortedScripts)-1; i++ {
|
||||||
|
if sortedScripts[i].Priority > sortedScripts[i+1].Priority {
|
||||||
|
t.Errorf("Priority not ascending at position %d: %d > %d",
|
||||||
|
i, sortedScripts[i].Priority, sortedScripts[i+1].Priority)
|
||||||
|
}
|
||||||
|
// Within same priority, sequences should be ascending
|
||||||
|
if sortedScripts[i].Priority == sortedScripts[i+1].Priority &&
|
||||||
|
sortedScripts[i].Sequence > sortedScripts[i+1].Sequence {
|
||||||
|
t.Errorf("Sequence not ascending at position %d with same priority %d: %d > %d",
|
||||||
|
i, sortedScripts[i].Priority, sortedScripts[i].Sequence, sortedScripts[i+1].Sequence)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteSchema_EmptyScripts(t *testing.T) {
|
||||||
|
// This test verifies that writing an empty script list doesn't cause errors
|
||||||
|
// even without a database connection (should return early)
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://invalid/test",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
schema := &models.Schema{
|
||||||
|
Name: "public",
|
||||||
|
Scripts: []*models.Script{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: This will try to connect even with empty scripts
|
||||||
|
// In a real scenario, the executeScripts function returns early for empty scripts
|
||||||
|
// but the connection is made before that. This test documents the behavior.
|
||||||
|
err := writer.WriteSchema(schema)
|
||||||
|
// We expect a connection error since we're using an invalid connection string
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected connection error, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: Integration tests for actual database execution should be added separately
|
||||||
|
// Those tests would require:
|
||||||
|
// 1. A running PostgreSQL instance
|
||||||
|
// 2. Test database setup/teardown
|
||||||
|
// 3. Verification of actual script execution
|
||||||
|
// 4. Testing error handling during execution
|
||||||
|
// 5. Testing transaction behavior if added
|
||||||
|
//
|
||||||
|
// Example integration test structure:
|
||||||
|
// func TestWriter_Integration_ExecuteScripts(t *testing.T) {
|
||||||
|
// if testing.Short() {
|
||||||
|
// t.Skip("Skipping integration test")
|
||||||
|
// }
|
||||||
|
// // Setup test database
|
||||||
|
// // Create test scripts
|
||||||
|
// // Execute scripts
|
||||||
|
// // Verify results
|
||||||
|
// // Cleanup
|
||||||
|
// }
|
||||||
@@ -79,7 +79,7 @@ SELECT
|
|||||||
"
|
"
|
||||||
|
|
||||||
# Set environment variable for tests
|
# Set environment variable for tests
|
||||||
export RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5433/relspec_test"
|
export RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5439/relspec_test"
|
||||||
|
|
||||||
echo -e "\n${YELLOW}Running PostgreSQL reader tests...${NC}"
|
echo -e "\n${YELLOW}Running PostgreSQL reader tests...${NC}"
|
||||||
echo "Connection string: $RELSPEC_TEST_PG_CONN"
|
echo "Connection string: $RELSPEC_TEST_PG_CONN"
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ CONTAINER_NAME="relspec-test-postgres"
|
|||||||
POSTGRES_USER="relspec"
|
POSTGRES_USER="relspec"
|
||||||
POSTGRES_PASSWORD="relspec_test_password"
|
POSTGRES_PASSWORD="relspec_test_password"
|
||||||
POSTGRES_DB="relspec_test"
|
POSTGRES_DB="relspec_test"
|
||||||
POSTGRES_PORT="5433"
|
POSTGRES_PORT="5439"
|
||||||
|
|
||||||
# Check if podman is available
|
# Check if podman is available
|
||||||
if ! command -v podman &> /dev/null; then
|
if ! command -v podman &> /dev/null; then
|
||||||
|
|||||||
Reference in New Issue
Block a user