Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 4cdccde9cf | |||
| aba22cb574 | |||
| d0630b4899 | |||
| c9eed9b794 | |||
|
|
5fb09b78c3 | ||
| 5d9770b430 | |||
| f2d500f98d | |||
| 2ec9991324 | |||
| a3e45c206d | |||
| 165623bb1d | |||
| 3c20c3c5d9 | |||
| a54594e49b | |||
| cafe6a461f | |||
| abdb9b4c78 | |||
| e7a15c8e4f | |||
| c36b5ede2b | |||
| 51ab29f8e3 | |||
| f532fc110c | |||
| 92dff99725 | |||
| 283b568adb | |||
| 122743ee43 | |||
| 91b6046b9b | |||
| 6f55505444 | |||
| e0e7b64c69 | |||
| 4181cb1fbd | |||
| 120ffc6a5a | |||
| b20ad35485 | |||
| f258f8baeb | |||
| 6388daba56 | |||
| f6c3f2b460 | |||
| 156e655571 | |||
| b57e1ba304 | |||
| 19fba62f1b | |||
| b4ff4334cc | |||
| 5d9b00c8f2 | |||
| debf351c48 | |||
| d87d657275 | |||
| 1795eb64d1 | |||
| 355f0f918f | |||
| 5d3c86119e | |||
| 8c602e3db0 | |||
| 64aeac972a | |||
| 97a57f5dc8 | |||
| adfe126758 | |||
| 1d193c84d7 | |||
| 1d627c74b1 | |||
| 7c6a355458 | |||
| c0ef26b660 | |||
| cb38f95b79 | |||
| 196d87bc29 | |||
| beb1100d86 | |||
| 410b1ee743 | |||
| b5d39aeee4 | |||
| 5fb9a8f231 | |||
| 27da24f575 | |||
| 0fb3469dbd |
@@ -4,10 +4,7 @@
|
|||||||
"description": "Database Relations Specification Tool for Go",
|
"description": "Database Relations Specification Tool for Go",
|
||||||
"language": "go"
|
"language": "go"
|
||||||
},
|
},
|
||||||
"agent": {
|
|
||||||
"preferred": "Explore",
|
|
||||||
"description": "Use Explore agent for fast codebase navigation and Go project exploration"
|
|
||||||
},
|
|
||||||
"codeStyle": {
|
"codeStyle": {
|
||||||
"useGofmt": true,
|
"useGofmt": true,
|
||||||
"lineLength": 100,
|
"lineLength": 100,
|
||||||
|
|||||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: CI
|
name: CI
|
||||||
|
run-name: "Test on master branch"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
|
|||||||
68
.github/workflows/integration-tests.yml
vendored
68
.github/workflows/integration-tests.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: Integration Tests
|
name: Integration Tests
|
||||||
|
run-name: "Integration Tests"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
@@ -11,6 +11,21 @@ jobs:
|
|||||||
name: Integration Tests
|
name: Integration Tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: relspec
|
||||||
|
POSTGRES_PASSWORD: relspec_test_password
|
||||||
|
POSTGRES_DB: relspec_test
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -31,41 +46,24 @@ jobs:
|
|||||||
- name: Download dependencies
|
- name: Download dependencies
|
||||||
run: go mod download
|
run: go mod download
|
||||||
|
|
||||||
- name: Start PostgreSQL container
|
- name: Install PostgreSQL client
|
||||||
run: |
|
run: |
|
||||||
docker run -d \
|
sudo apt-get update
|
||||||
--name relspec-test-postgres \
|
sudo apt-get install -y postgresql-client
|
||||||
--network host \
|
|
||||||
-e POSTGRES_USER=relspec \
|
|
||||||
-e POSTGRES_PASSWORD=relspec_test_password \
|
|
||||||
-e POSTGRES_DB=relspec_test \
|
|
||||||
postgres:16-alpine
|
|
||||||
|
|
||||||
- name: Wait for PostgreSQL to be ready
|
|
||||||
run: |
|
|
||||||
echo "Waiting for PostgreSQL to start..."
|
|
||||||
for i in {1..30}; do
|
|
||||||
if docker exec relspec-test-postgres pg_isready -U relspec -d relspec_test > /dev/null 2>&1; then
|
|
||||||
echo "PostgreSQL is ready!"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
echo "Waiting... ($i/30)"
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
sleep 2
|
|
||||||
|
|
||||||
- name: Copy init script into container
|
|
||||||
run: |
|
|
||||||
docker cp tests/postgres/init.sql relspec-test-postgres:/tmp/init.sql
|
|
||||||
|
|
||||||
- name: Initialize test database
|
- name: Initialize test database
|
||||||
|
env:
|
||||||
|
PGPASSWORD: relspec_test_password
|
||||||
run: |
|
run: |
|
||||||
docker exec relspec-test-postgres psql -U relspec -d relspec_test -f /tmp/init.sql
|
# Services are accessible via hostname matching the service name
|
||||||
|
psql -h postgres -U relspec -d relspec_test -f tests/postgres/init.sql
|
||||||
|
|
||||||
- name: Verify database setup
|
- name: Verify database setup
|
||||||
|
env:
|
||||||
|
PGPASSWORD: relspec_test_password
|
||||||
run: |
|
run: |
|
||||||
echo "Verifying database initialization..."
|
echo "Verifying database initialization..."
|
||||||
docker exec relspec-test-postgres psql -U relspec -d relspec_test -c "
|
psql -h postgres -U relspec -d relspec_test -c "
|
||||||
SELECT
|
SELECT
|
||||||
(SELECT COUNT(*) FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND nspname NOT LIKE 'pg_%') as schemas,
|
(SELECT COUNT(*) FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND nspname NOT LIKE 'pg_%') as schemas,
|
||||||
(SELECT COUNT(*) FROM pg_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as tables,
|
(SELECT COUNT(*) FROM pg_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as tables,
|
||||||
@@ -75,17 +73,5 @@ jobs:
|
|||||||
|
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
env:
|
env:
|
||||||
RELSPEC_TEST_PG_CONN: postgres://relspec:relspec_test_password@localhost:5432/relspec_test
|
RELSPEC_TEST_PG_CONN: postgres://relspec:relspec_test_password@postgres:5432/relspec_test
|
||||||
run: make test-integration
|
run: make test-integration
|
||||||
|
|
||||||
- name: Stop PostgreSQL container
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker stop relspec-test-postgres || true
|
|
||||||
docker rm relspec-test-postgres || true
|
|
||||||
|
|
||||||
- name: Summary
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
echo "Integration tests completed."
|
|
||||||
echo "PostgreSQL container has been cleaned up."
|
|
||||||
|
|||||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: Release
|
name: Release
|
||||||
|
run-name: "Making Release"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -47,3 +47,4 @@ dist/
|
|||||||
build/
|
build/
|
||||||
bin/
|
bin/
|
||||||
tests/integration/failed_statements_example.txt
|
tests/integration/failed_statements_example.txt
|
||||||
|
test_output.log
|
||||||
|
|||||||
30
CLAUDE.md
30
CLAUDE.md
@@ -4,7 +4,11 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
|
|||||||
|
|
||||||
## Project Overview
|
## Project Overview
|
||||||
|
|
||||||
RelSpec is a database relations specification tool that provides bidirectional conversion between various database schema formats. It reads database schemas from multiple sources (live databases, DBML, DCTX, DrawDB, etc.) and writes them to various formats (GORM, Bun, JSON, YAML, SQL, etc.).
|
RelSpec is a database relations specification tool that provides bidirectional conversion between various database schema formats. It reads database schemas from multiple sources and writes them to various formats.
|
||||||
|
|
||||||
|
**Supported Readers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, PostgreSQL, Prisma, SQL Directory, SQLite, TypeORM, YAML
|
||||||
|
|
||||||
|
**Supported Writers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, PostgreSQL, Prisma, SQL Exec, SQLite, Template, TypeORM, YAML
|
||||||
|
|
||||||
## Build Commands
|
## Build Commands
|
||||||
|
|
||||||
@@ -50,8 +54,9 @@ Database
|
|||||||
```
|
```
|
||||||
|
|
||||||
**Important patterns:**
|
**Important patterns:**
|
||||||
- Each format (dbml, dctx, drawdb, etc.) has its own `pkg/readers/<format>/` and `pkg/writers/<format>/` subdirectories
|
- Each format has its own `pkg/readers/<format>/` and `pkg/writers/<format>/` subdirectories
|
||||||
- Use `ReaderOptions` and `WriterOptions` structs for configuration (file paths, connection strings, metadata)
|
- Use `ReaderOptions` and `WriterOptions` structs for configuration (file paths, connection strings, metadata, flatten option)
|
||||||
|
- FlattenSchema option collapses multi-schema databases into a single schema for simplified output
|
||||||
- Schema reading typically returns the first schema when reading from Database
|
- Schema reading typically returns the first schema when reading from Database
|
||||||
- Table reading typically returns the first table when reading from Schema
|
- Table reading typically returns the first table when reading from Schema
|
||||||
|
|
||||||
@@ -65,8 +70,22 @@ Contains PostgreSQL-specific helpers:
|
|||||||
- `keywords.go`: SQL reserved keywords validation
|
- `keywords.go`: SQL reserved keywords validation
|
||||||
- `datatypes.go`: PostgreSQL data type mappings and conversions
|
- `datatypes.go`: PostgreSQL data type mappings and conversions
|
||||||
|
|
||||||
|
### Additional Utilities
|
||||||
|
|
||||||
|
- **pkg/diff/**: Schema difference detection and comparison
|
||||||
|
- **pkg/inspector/**: Schema inspection and analysis tools
|
||||||
|
- **pkg/merge/**: Schema merging capabilities
|
||||||
|
- **pkg/reflectutil/**: Reflection utilities for dynamic type handling
|
||||||
|
- **pkg/ui/**: Terminal UI components for interactive schema editing
|
||||||
|
- **pkg/commontypes/**: Shared type definitions
|
||||||
|
|
||||||
## Development Patterns
|
## Development Patterns
|
||||||
|
|
||||||
|
- Each reader/writer is self-contained in its own subdirectory
|
||||||
|
- Options structs control behavior (file paths, connection strings, flatten schema, etc.)
|
||||||
|
- Live database connections supported for PostgreSQL and SQLite
|
||||||
|
- Template writer allows custom output formats
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
- Test files should be in the same package as the code they test
|
- Test files should be in the same package as the code they test
|
||||||
@@ -77,5 +96,6 @@ Contains PostgreSQL-specific helpers:
|
|||||||
## Module Information
|
## Module Information
|
||||||
|
|
||||||
- Module path: `git.warky.dev/wdevs/relspecgo`
|
- Module path: `git.warky.dev/wdevs/relspecgo`
|
||||||
- Go version: 1.25.5
|
- Go version: 1.24.0
|
||||||
- Uses Cobra for CLI, Viper for configuration
|
- Uses Cobra for CLI
|
||||||
|
- Key dependencies: pgx/v5 (PostgreSQL), modernc.org/sqlite (SQLite), tview (TUI), Bun ORM
|
||||||
|
|||||||
196
GODOC.md
Normal file
196
GODOC.md
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
# RelSpec API Documentation (godoc)
|
||||||
|
|
||||||
|
This document explains how to access and use the RelSpec API documentation.
|
||||||
|
|
||||||
|
## Viewing Documentation Locally
|
||||||
|
|
||||||
|
### Using `go doc` Command Line
|
||||||
|
|
||||||
|
View package documentation:
|
||||||
|
```bash
|
||||||
|
# Main package overview
|
||||||
|
go doc
|
||||||
|
|
||||||
|
# Specific package
|
||||||
|
go doc ./pkg/models
|
||||||
|
go doc ./pkg/readers
|
||||||
|
go doc ./pkg/writers
|
||||||
|
go doc ./pkg/ui
|
||||||
|
|
||||||
|
# Specific type or function
|
||||||
|
go doc ./pkg/models Database
|
||||||
|
go doc ./pkg/readers Reader
|
||||||
|
go doc ./pkg/writers Writer
|
||||||
|
```
|
||||||
|
|
||||||
|
View all documentation for a package:
|
||||||
|
```bash
|
||||||
|
go doc -all ./pkg/models
|
||||||
|
go doc -all ./pkg/readers
|
||||||
|
go doc -all ./pkg/writers
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using `godoc` Web Server
|
||||||
|
|
||||||
|
**Quick Start (Recommended):**
|
||||||
|
```bash
|
||||||
|
make godoc
|
||||||
|
```
|
||||||
|
|
||||||
|
This will automatically install godoc if needed and start the server on port 6060.
|
||||||
|
|
||||||
|
**Manual Installation:**
|
||||||
|
```bash
|
||||||
|
go install golang.org/x/tools/cmd/godoc@latest
|
||||||
|
godoc -http=:6060
|
||||||
|
```
|
||||||
|
|
||||||
|
Then open your browser to:
|
||||||
|
```
|
||||||
|
http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Package Documentation
|
||||||
|
|
||||||
|
### Core Packages
|
||||||
|
|
||||||
|
- **`pkg/models`** - Core data structures (Database, Schema, Table, Column, etc.)
|
||||||
|
- **`pkg/readers`** - Input format readers (dbml, pgsql, gorm, prisma, etc.)
|
||||||
|
- **`pkg/writers`** - Output format writers (dbml, pgsql, gorm, prisma, etc.)
|
||||||
|
|
||||||
|
### Utility Packages
|
||||||
|
|
||||||
|
- **`pkg/diff`** - Schema comparison and difference detection
|
||||||
|
- **`pkg/merge`** - Schema merging utilities
|
||||||
|
- **`pkg/transform`** - Validation and normalization
|
||||||
|
- **`pkg/ui`** - Interactive terminal UI for schema editing
|
||||||
|
|
||||||
|
### Support Packages
|
||||||
|
|
||||||
|
- **`pkg/pgsql`** - PostgreSQL-specific utilities
|
||||||
|
- **`pkg/inspector`** - Database introspection capabilities
|
||||||
|
- **`pkg/reflectutil`** - Reflection utilities for Go code analysis
|
||||||
|
- **`pkg/commontypes`** - Shared type definitions
|
||||||
|
|
||||||
|
### Reader Implementations
|
||||||
|
|
||||||
|
Each reader is in its own subpackage under `pkg/readers/`:
|
||||||
|
|
||||||
|
- `pkg/readers/dbml` - DBML format reader
|
||||||
|
- `pkg/readers/dctx` - DCTX format reader
|
||||||
|
- `pkg/readers/drawdb` - DrawDB JSON reader
|
||||||
|
- `pkg/readers/graphql` - GraphQL schema reader
|
||||||
|
- `pkg/readers/json` - JSON schema reader
|
||||||
|
- `pkg/readers/yaml` - YAML schema reader
|
||||||
|
- `pkg/readers/gorm` - Go GORM models reader
|
||||||
|
- `pkg/readers/bun` - Go Bun models reader
|
||||||
|
- `pkg/readers/drizzle` - TypeScript Drizzle ORM reader
|
||||||
|
- `pkg/readers/prisma` - Prisma schema reader
|
||||||
|
- `pkg/readers/typeorm` - TypeScript TypeORM reader
|
||||||
|
- `pkg/readers/pgsql` - PostgreSQL database reader
|
||||||
|
- `pkg/readers/sqlite` - SQLite database reader
|
||||||
|
|
||||||
|
### Writer Implementations
|
||||||
|
|
||||||
|
Each writer is in its own subpackage under `pkg/writers/`:
|
||||||
|
|
||||||
|
- `pkg/writers/dbml` - DBML format writer
|
||||||
|
- `pkg/writers/dctx` - DCTX format writer
|
||||||
|
- `pkg/writers/drawdb` - DrawDB JSON writer
|
||||||
|
- `pkg/writers/graphql` - GraphQL schema writer
|
||||||
|
- `pkg/writers/json` - JSON schema writer
|
||||||
|
- `pkg/writers/yaml` - YAML schema writer
|
||||||
|
- `pkg/writers/gorm` - Go GORM models writer
|
||||||
|
- `pkg/writers/bun` - Go Bun models writer
|
||||||
|
- `pkg/writers/drizzle` - TypeScript Drizzle ORM writer
|
||||||
|
- `pkg/writers/prisma` - Prisma schema writer
|
||||||
|
- `pkg/writers/typeorm` - TypeScript TypeORM writer
|
||||||
|
- `pkg/writers/pgsql` - PostgreSQL SQL writer
|
||||||
|
- `pkg/writers/sqlite` - SQLite SQL writer
|
||||||
|
|
||||||
|
## Usage Examples
|
||||||
|
|
||||||
|
### Reading a Schema
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
)
|
||||||
|
|
||||||
|
reader := dbml.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "schema.dbml",
|
||||||
|
})
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Writing a Schema
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
writer := gorm.NewWriter(&writers.WriterOptions{
|
||||||
|
OutputPath: "./models",
|
||||||
|
PackageName: "models",
|
||||||
|
})
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Comparing Schemas
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/diff"
|
||||||
|
|
||||||
|
result := diff.CompareDatabases(sourceDB, targetDB)
|
||||||
|
err := diff.FormatDiff(result, diff.OutputFormatText, os.Stdout)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Merging Schemas
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/merge"
|
||||||
|
|
||||||
|
result := merge.MergeDatabases(targetDB, sourceDB, nil)
|
||||||
|
fmt.Printf("Added %d tables\n", result.TablesAdded)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Documentation Standards
|
||||||
|
|
||||||
|
All public APIs follow Go documentation conventions:
|
||||||
|
|
||||||
|
- Package documentation in `doc.go` files
|
||||||
|
- Type, function, and method comments start with the item name
|
||||||
|
- Examples where applicable
|
||||||
|
- Clear description of parameters and return values
|
||||||
|
- Usage notes and caveats where relevant
|
||||||
|
|
||||||
|
## Generating Documentation
|
||||||
|
|
||||||
|
To regenerate documentation after code changes:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verify documentation builds correctly
|
||||||
|
go doc -all ./pkg/... > /dev/null
|
||||||
|
|
||||||
|
# Check for undocumented exports
|
||||||
|
go vet ./...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing Documentation
|
||||||
|
|
||||||
|
When adding new packages or exported items:
|
||||||
|
|
||||||
|
1. Add package documentation in a `doc.go` file
|
||||||
|
2. Document all exported types, functions, and methods
|
||||||
|
3. Include usage examples for complex APIs
|
||||||
|
4. Follow Go documentation style guide
|
||||||
|
5. Verify with `go doc` before committing
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- [Go Documentation Guide](https://go.dev/doc/comment)
|
||||||
|
- [Effective Go - Commentary](https://go.dev/doc/effective_go#commentary)
|
||||||
|
- [godoc Documentation](https://pkg.go.dev/golang.org/x/tools/cmd/godoc)
|
||||||
96
Makefile
96
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration release release-version
|
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration start stop release release-version godoc
|
||||||
|
|
||||||
# Binary name
|
# Binary name
|
||||||
BINARY_NAME=relspec
|
BINARY_NAME=relspec
|
||||||
@@ -14,9 +14,29 @@ GOGET=$(GOCMD) get
|
|||||||
GOMOD=$(GOCMD) mod
|
GOMOD=$(GOCMD) mod
|
||||||
GOCLEAN=$(GOCMD) clean
|
GOCLEAN=$(GOCMD) clean
|
||||||
|
|
||||||
|
# Auto-detect container runtime (Docker or Podman)
|
||||||
|
CONTAINER_RUNTIME := $(shell \
|
||||||
|
if command -v podman > /dev/null 2>&1; then \
|
||||||
|
echo "podman"; \
|
||||||
|
elif command -v docker > /dev/null 2>&1; then \
|
||||||
|
echo "docker"; \
|
||||||
|
else \
|
||||||
|
echo "none"; \
|
||||||
|
fi)
|
||||||
|
|
||||||
|
# Detect compose command
|
||||||
|
COMPOSE_CMD := $(shell \
|
||||||
|
if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
echo "podman-compose"; \
|
||||||
|
elif command -v docker-compose > /dev/null 2>&1; then \
|
||||||
|
echo "docker-compose"; \
|
||||||
|
else \
|
||||||
|
echo "docker compose"; \
|
||||||
|
fi)
|
||||||
|
|
||||||
all: lint test build ## Run linting, tests, and build
|
all: lint test build ## Run linting, tests, and build
|
||||||
|
|
||||||
build: ## Build the binary
|
build: deps ## Build the binary
|
||||||
@echo "Building $(BINARY_NAME)..."
|
@echo "Building $(BINARY_NAME)..."
|
||||||
@mkdir -p $(BUILD_DIR)
|
@mkdir -p $(BUILD_DIR)
|
||||||
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
|
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
|
||||||
@@ -32,7 +52,7 @@ test-integration: ## Run integration tests (requires RELSPEC_TEST_PG_CONN enviro
|
|||||||
@echo "Running integration tests..."
|
@echo "Running integration tests..."
|
||||||
@if [ -z "$$RELSPEC_TEST_PG_CONN" ]; then \
|
@if [ -z "$$RELSPEC_TEST_PG_CONN" ]; then \
|
||||||
echo "Error: RELSPEC_TEST_PG_CONN environment variable is not set"; \
|
echo "Error: RELSPEC_TEST_PG_CONN environment variable is not set"; \
|
||||||
echo "Example: export RELSPEC_TEST_PG_CONN='postgres://relspec:relspec_test_password@localhost:5432/relspec_test'"; \
|
echo "Example: export RELSPEC_TEST_PG_CONN='postgres://relspec:relspec_test_password@localhost:5439/relspec_test'"; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
@echo "Running PostgreSQL reader tests..."
|
@echo "Running PostgreSQL reader tests..."
|
||||||
@@ -81,34 +101,76 @@ deps: ## Download dependencies
|
|||||||
$(GOMOD) tidy
|
$(GOMOD) tidy
|
||||||
@echo "Dependencies updated"
|
@echo "Dependencies updated"
|
||||||
|
|
||||||
docker-up: ## Start PostgreSQL test database
|
godoc: ## Start godoc server on http://localhost:6060
|
||||||
@echo "Starting PostgreSQL test database..."
|
@echo "Starting godoc server..."
|
||||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
@GOBIN=$$(go env GOPATH)/bin; \
|
||||||
docker-compose up -d postgres; \
|
if command -v godoc > /dev/null 2>&1; then \
|
||||||
|
echo "godoc server running on http://localhost:6060"; \
|
||||||
|
echo "View documentation at: http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/"; \
|
||||||
|
echo "Press Ctrl+C to stop"; \
|
||||||
|
godoc -http=:6060; \
|
||||||
|
elif [ -f "$$GOBIN/godoc" ]; then \
|
||||||
|
echo "godoc server running on http://localhost:6060"; \
|
||||||
|
echo "View documentation at: http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/"; \
|
||||||
|
echo "Press Ctrl+C to stop"; \
|
||||||
|
$$GOBIN/godoc -http=:6060; \
|
||||||
else \
|
else \
|
||||||
docker compose up -d postgres; \
|
echo "godoc not installed. Installing..."; \
|
||||||
|
go install golang.org/x/tools/cmd/godoc@latest; \
|
||||||
|
echo "godoc installed. Starting server..."; \
|
||||||
|
echo "godoc server running on http://localhost:6060"; \
|
||||||
|
echo "View documentation at: http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/"; \
|
||||||
|
echo "Press Ctrl+C to stop"; \
|
||||||
|
$$GOBIN/godoc -http=:6060; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
start: docker-up ## Alias for docker-up (start PostgreSQL test database)
|
||||||
|
|
||||||
|
stop: docker-down ## Alias for docker-down (stop PostgreSQL test database)
|
||||||
|
|
||||||
|
docker-up: ## Start PostgreSQL test database
|
||||||
|
@echo "Starting PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||||
|
@if [ "$(CONTAINER_RUNTIME)" = "none" ]; then \
|
||||||
|
echo "Error: Neither Docker nor Podman is installed"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
podman run -d --name relspec-test-postgres \
|
||||||
|
-e POSTGRES_USER=relspec \
|
||||||
|
-e POSTGRES_PASSWORD=relspec_test_password \
|
||||||
|
-e POSTGRES_DB=relspec_test \
|
||||||
|
-p 5439:5432 \
|
||||||
|
-v ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql:Z \
|
||||||
|
postgres:16-alpine 2>/dev/null || echo "Container already running"; \
|
||||||
|
else \
|
||||||
|
$(COMPOSE_CMD) up -d postgres; \
|
||||||
fi
|
fi
|
||||||
@echo "Waiting for PostgreSQL to be ready..."
|
@echo "Waiting for PostgreSQL to be ready..."
|
||||||
@sleep 3
|
@sleep 3
|
||||||
@echo "PostgreSQL is running on port 5433"
|
@echo "PostgreSQL is running on port 5439"
|
||||||
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5433/relspec_test"
|
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5439/relspec_test"
|
||||||
|
|
||||||
docker-down: ## Stop PostgreSQL test database
|
docker-down: ## Stop PostgreSQL test database
|
||||||
@echo "Stopping PostgreSQL test database..."
|
@echo "Stopping PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
docker-compose down; \
|
podman stop relspec-test-postgres 2>/dev/null || true; \
|
||||||
|
podman rm relspec-test-postgres 2>/dev/null || true; \
|
||||||
else \
|
else \
|
||||||
docker compose down; \
|
$(COMPOSE_CMD) down; \
|
||||||
fi
|
fi
|
||||||
@echo "PostgreSQL stopped"
|
@echo "PostgreSQL stopped"
|
||||||
|
|
||||||
docker-test: ## Run PostgreSQL integration tests with Docker
|
docker-test: ## Run PostgreSQL integration tests with Docker/Podman
|
||||||
@./tests/postgres/run_tests.sh
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
./tests/postgres/run_tests_podman.sh; \
|
||||||
|
else \
|
||||||
|
./tests/postgres/run_tests.sh; \
|
||||||
|
fi
|
||||||
|
|
||||||
docker-test-integration: docker-up ## Start DB and run integration tests
|
docker-test-integration: docker-up ## Start DB and run integration tests
|
||||||
@echo "Running integration tests..."
|
@echo "Running integration tests..."
|
||||||
@sleep 2
|
@sleep 2
|
||||||
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5433/relspec_test" \
|
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5439/relspec_test" \
|
||||||
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
||||||
@make docker-down
|
@make docker-down
|
||||||
|
|
||||||
|
|||||||
156
README.md
156
README.md
@@ -1,16 +1,24 @@
|
|||||||
# RelSpec
|
# RelSpec
|
||||||
|
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/releases/latest)
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/ci.yml)
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/integration-tests.yml)
|
||||||
|
[](https://go.dev/dl/)
|
||||||
|
[](LICENSE)
|
||||||
|
|
||||||
> Database Relations Specification Tool for Go
|
> Database Relations Specification Tool for Go
|
||||||
|
|
||||||
RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
|
RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
RelSpec provides bidirectional conversion and comparison between various database specification formats, allowing you to:
|
RelSpec provides bidirectional conversion, comparison, and validation of database specification formats, allowing you to:
|
||||||
- Inspect live databases and extract their structure
|
- Inspect live databases and extract their structure
|
||||||
|
- Validate schemas against configurable rules and naming conventions
|
||||||
- Convert between different ORM models (GORM, Bun, etc.)
|
- Convert between different ORM models (GORM, Bun, etc.)
|
||||||
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
||||||
- Generate standardized specification files (JSON, YAML, etc.)
|
- Generate standardized specification files (JSON, YAML, etc.)
|
||||||
|
- Compare database schemas and track changes
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -29,6 +37,7 @@ RelSpec can read database schemas from multiple sources:
|
|||||||
|
|
||||||
#### Database Inspection
|
#### Database Inspection
|
||||||
- [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection
|
- [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection
|
||||||
|
- [SQLite](pkg/readers/sqlite/README.md) - Direct SQLite database introspection
|
||||||
|
|
||||||
#### Schema Formats
|
#### Schema Formats
|
||||||
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
|
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
|
||||||
@@ -51,6 +60,7 @@ RelSpec can write database schemas to multiple formats:
|
|||||||
|
|
||||||
#### Database DDL
|
#### Database DDL
|
||||||
- [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.)
|
- [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.)
|
||||||
|
- [SQLite](pkg/writers/sqlite/README.md) - SQLite DDL with automatic schema flattening
|
||||||
|
|
||||||
#### Schema Formats
|
#### Schema Formats
|
||||||
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
|
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
|
||||||
@@ -60,10 +70,46 @@ RelSpec can write database schemas to multiple formats:
|
|||||||
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
||||||
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
|
### Inspector (Schema Validation)
|
||||||
|
|
||||||
|
RelSpec includes a powerful schema validation and linting tool:
|
||||||
|
|
||||||
|
- [Inspector](pkg/inspector/README.md) - Validate database schemas against configurable rules
|
||||||
|
- Enforce naming conventions (snake_case, camelCase, custom patterns)
|
||||||
|
- Check primary key and foreign key standards
|
||||||
|
- Detect missing indexes on foreign keys
|
||||||
|
- Prevent use of SQL reserved keywords
|
||||||
|
- Ensure schema integrity (missing PKs, orphaned FKs, circular dependencies)
|
||||||
|
- Support for custom validation rules
|
||||||
|
- Multiple output formats (Markdown with colors, JSON)
|
||||||
|
- CI/CD integration ready
|
||||||
|
|
||||||
## Use of AI
|
## Use of AI
|
||||||
[Rules and use of AI](./AI_USE.md)
|
[Rules and use of AI](./AI_USE.md)
|
||||||
|
|
||||||
|
## User Interface
|
||||||
|
|
||||||
|
RelSpec provides an interactive terminal-based user interface for managing and editing database schemas. The UI allows you to:
|
||||||
|
|
||||||
|
- **Browse Databases** - Navigate through your database structure with an intuitive menu system
|
||||||
|
- **Edit Schemas** - Create, modify, and organize database schemas
|
||||||
|
- **Manage Tables** - Add, update, or delete tables with full control over structure
|
||||||
|
- **Configure Columns** - Define column properties, data types, constraints, and relationships
|
||||||
|
- **Interactive Editing** - Real-time validation and feedback as you make changes
|
||||||
|
|
||||||
|
The interface supports multiple input formats, making it easy to load, edit, and save your database definitions in various formats.
|
||||||
|
|
||||||
|
<p align="center" width="100%">
|
||||||
|
<img src="./assets/image/screenshots/main_screen.jpg">
|
||||||
|
</p>
|
||||||
|
<p align="center" width="100%">
|
||||||
|
<img src="./assets/image/screenshots/table_view.jpg">
|
||||||
|
</p>
|
||||||
|
<p align="center" width="100%">
|
||||||
|
<img src="./assets/image/screenshots/edit_column.jpg">
|
||||||
|
</p>
|
||||||
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -74,30 +120,118 @@ go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
### Interactive Schema Editor
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Inspect database and generate GORM models
|
# Launch interactive editor with a DBML schema
|
||||||
relspec --input db --conn "postgres://..." --output gorm --out-file models.go
|
relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
|
||||||
|
|
||||||
|
# Edit PostgreSQL database in place
|
||||||
|
relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||||
|
--to pgsql --to-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Edit JSON schema and save as GORM models
|
||||||
|
relspec edit --from json --from-path db.json --to gorm --to-path models/
|
||||||
|
```
|
||||||
|
|
||||||
|
The `edit` command launches an interactive terminal user interface where you can:
|
||||||
|
- Browse and navigate your database structure
|
||||||
|
- Create, modify, and delete schemas, tables, and columns
|
||||||
|
- Configure column properties, constraints, and relationships
|
||||||
|
- Save changes to various formats
|
||||||
|
- Import and merge schemas from other databases
|
||||||
|
|
||||||
|
### Schema Merging
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Merge two JSON schemas (additive merge - adds missing items only)
|
||||||
|
relspec merge --target json --target-path base.json \
|
||||||
|
--source json --source-path additions.json \
|
||||||
|
--output json --output-path merged.json
|
||||||
|
|
||||||
|
# Merge PostgreSQL database into JSON, skipping specific tables
|
||||||
|
relspec merge --target json --target-path current.json \
|
||||||
|
--source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
|
||||||
|
--output json --output-path updated.json \
|
||||||
|
--skip-tables "audit_log,temp_tables"
|
||||||
|
|
||||||
|
# Cross-format merge (DBML + YAML → JSON)
|
||||||
|
relspec merge --target dbml --target-path base.dbml \
|
||||||
|
--source yaml --source-path additions.yaml \
|
||||||
|
--output json --output-path result.json \
|
||||||
|
--skip-relations --skip-views
|
||||||
|
```
|
||||||
|
|
||||||
|
The `merge` command combines two database schemas additively:
|
||||||
|
- Adds missing schemas, tables, columns, and other objects
|
||||||
|
- Never modifies or deletes existing items (safe operation)
|
||||||
|
- Supports selective merging with skip options (domains, relations, enums, views, sequences, specific tables)
|
||||||
|
- Works across any combination of supported formats
|
||||||
|
- Perfect for integrating multiple schema definitions or applying patches
|
||||||
|
|
||||||
|
### Schema Conversion
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Convert PostgreSQL database to GORM models
|
||||||
|
relspec convert --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||||
|
--to gorm --to-path models/ --package models
|
||||||
|
|
||||||
# Convert GORM models to Bun
|
# Convert GORM models to Bun
|
||||||
relspec --input gorm --in-file existing.go --output bun --out-file bun_models.go
|
relspec convert --from gorm --from-path models.go \
|
||||||
|
--to bun --to-path bun_models.go --package models
|
||||||
|
|
||||||
# Export database schema to JSON
|
# Export database schema to JSON
|
||||||
relspec --input db --conn "mysql://..." --output json --out-file schema.json
|
relspec convert --from pgsql --from-conn "postgres://..." \
|
||||||
|
--to json --to-path schema.json
|
||||||
|
|
||||||
# Convert Clarion DCTX to YAML
|
# Convert DBML to PostgreSQL SQL
|
||||||
relspec --input dctx --in-file legacy.dctx --output yaml --out-file schema.yaml
|
relspec convert --from dbml --from-path schema.dbml \
|
||||||
|
--to pgsql --to-path schema.sql
|
||||||
|
|
||||||
|
# Convert PostgreSQL database to SQLite (with automatic schema flattening)
|
||||||
|
relspec convert --from pgsql --from-conn "postgres://..." \
|
||||||
|
--to sqlite --to-path sqlite_schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Validation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Validate a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Validate DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules .relspec-rules.yaml
|
||||||
|
|
||||||
|
# Generate JSON validation report
|
||||||
|
relspec inspect --from json --from-path db.json \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Validate specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Comparison
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Compare two database schemas
|
||||||
|
relspec diff --from pgsql --from-conn "postgres://localhost/db1" \
|
||||||
|
--to pgsql --to-conn "postgres://localhost/db2"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Project Structure
|
## Project Structure
|
||||||
|
|
||||||
```
|
```
|
||||||
relspecgo/
|
relspecgo/
|
||||||
├── cmd/ # CLI application
|
├── cmd/
|
||||||
|
│ └── relspec/ # CLI application (convert, inspect, diff, scripts)
|
||||||
├── pkg/
|
├── pkg/
|
||||||
│ ├── readers/ # Input format readers
|
│ ├── readers/ # Input format readers (DBML, GORM, PostgreSQL, etc.)
|
||||||
│ ├── writers/ # Output format writers
|
│ ├── writers/ # Output format writers (GORM, Bun, SQL, etc.)
|
||||||
|
│ ├── inspector/ # Schema validation and linting
|
||||||
|
│ ├── diff/ # Schema comparison
|
||||||
│ ├── models/ # Internal data models
|
│ ├── models/ # Internal data models
|
||||||
│ └── transform/ # Transformation logic
|
│ ├── transform/ # Transformation logic
|
||||||
|
│ └── pgsql/ # PostgreSQL utilities (keywords, data types)
|
||||||
├── examples/ # Usage examples
|
├── examples/ # Usage examples
|
||||||
└── tests/ # Test files
|
└── tests/ # Test files
|
||||||
```
|
```
|
||||||
|
|||||||
26
TODO.md
26
TODO.md
@@ -1,15 +1,15 @@
|
|||||||
# RelSpec - TODO List
|
# RelSpec - TODO List
|
||||||
|
|
||||||
|
|
||||||
## Input Readers / Writers
|
## Input Readers / Writers
|
||||||
|
|
||||||
- [✔️] **Database Inspector**
|
- [✔️] **Database Inspector**
|
||||||
- [✔️] PostgreSQL driver
|
- [✔️] PostgreSQL driver (reader + writer)
|
||||||
- [ ] MySQL driver
|
- [ ] MySQL driver
|
||||||
- [ ] SQLite driver
|
- [✔️] SQLite driver (reader + writer with automatic schema flattening)
|
||||||
- [ ] MSSQL driver
|
- [ ] MSSQL driver
|
||||||
- [✔️] Foreign key detection
|
- [✔️] Foreign key detection
|
||||||
- [✔️] Index extraction
|
- [✔️] Index extraction
|
||||||
- [*] .sql file generation with sequence and priority
|
- [✔️] .sql file generation (PostgreSQL, SQLite)
|
||||||
- [✔️] .dbml: Database Markup Language (DBML) for textual schema representation.
|
- [✔️] .dbml: Database Markup Language (DBML) for textual schema representation.
|
||||||
- [✔️] Prisma schema support (PSL format) .prisma
|
- [✔️] Prisma schema support (PSL format) .prisma
|
||||||
- [✔️] Drizzle ORM support .ts (TypeScript / JavaScript) (Mr. Edd wanted to move from Prisma to Drizzle. If you are bugs, you are welcome to do pull requests or issues)
|
- [✔️] Drizzle ORM support .ts (TypeScript / JavaScript) (Mr. Edd wanted to move from Prisma to Drizzle. If you are bugs, you are welcome to do pull requests or issues)
|
||||||
@@ -20,12 +20,25 @@
|
|||||||
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
|
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
|
||||||
- [✔️] GraphQL schema generation
|
- [✔️] GraphQL schema generation
|
||||||
|
|
||||||
|
## UI
|
||||||
|
|
||||||
|
- [✔️] Basic UI (I went with tview)
|
||||||
|
- [✔️] Save / Load Database
|
||||||
|
- [✔️] Schemas / Domains / Tables
|
||||||
|
- [✔️] Add Relations
|
||||||
|
- [ ] Add Indexes
|
||||||
|
- [ ] Add Views
|
||||||
|
- [ ] Add Sequences
|
||||||
|
- [ ] Add Scripts
|
||||||
|
- [ ] Domain / Table Assignment
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
- [ ] API documentation (godoc)
|
|
||||||
|
- [✔️] API documentation (godoc)
|
||||||
- [ ] Usage examples for each format combination
|
- [ ] Usage examples for each format combination
|
||||||
|
|
||||||
## Advanced Features
|
## Advanced Features
|
||||||
|
|
||||||
- [ ] Dry-run mode for validation
|
- [ ] Dry-run mode for validation
|
||||||
- [x] Diff tool for comparing specifications
|
- [x] Diff tool for comparing specifications
|
||||||
- [ ] Migration script generation
|
- [ ] Migration script generation
|
||||||
@@ -34,12 +47,13 @@
|
|||||||
- [ ] Watch mode for auto-regeneration
|
- [ ] Watch mode for auto-regeneration
|
||||||
|
|
||||||
## Future Considerations
|
## Future Considerations
|
||||||
|
|
||||||
- [ ] Web UI for visual editing
|
- [ ] Web UI for visual editing
|
||||||
- [ ] REST API server mode
|
- [ ] REST API server mode
|
||||||
- [ ] Support for NoSQL databases
|
- [ ] Support for NoSQL databases
|
||||||
|
|
||||||
|
|
||||||
## Performance
|
## Performance
|
||||||
|
|
||||||
- [ ] Concurrent processing for multiple tables
|
- [ ] Concurrent processing for multiple tables
|
||||||
- [ ] Streaming for large databases
|
- [ ] Streaming for large databases
|
||||||
- [ ] Memory optimization
|
- [ ] Memory optimization
|
||||||
|
|||||||
BIN
assets/image/screenshots/edit_column.jpg
Normal file
BIN
assets/image/screenshots/edit_column.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
BIN
assets/image/screenshots/main_screen.jpg
Normal file
BIN
assets/image/screenshots/main_screen.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 50 KiB |
BIN
assets/image/screenshots/table_view.jpg
Normal file
BIN
assets/image/screenshots/table_view.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 67 KiB |
@@ -20,6 +20,7 @@ import (
|
|||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
@@ -33,6 +34,7 @@ import (
|
|||||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
|
||||||
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
)
|
)
|
||||||
@@ -45,6 +47,7 @@ var (
|
|||||||
convertTargetPath string
|
convertTargetPath string
|
||||||
convertPackageName string
|
convertPackageName string
|
||||||
convertSchemaFilter string
|
convertSchemaFilter string
|
||||||
|
convertFlattenSchema bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var convertCmd = &cobra.Command{
|
var convertCmd = &cobra.Command{
|
||||||
@@ -69,6 +72,7 @@ Input formats:
|
|||||||
- prisma: Prisma schema files (.prisma)
|
- prisma: Prisma schema files (.prisma)
|
||||||
- typeorm: TypeORM entity files (TypeScript)
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL database (live connection)
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
- sqlite: SQLite database file
|
||||||
|
|
||||||
Output formats:
|
Output formats:
|
||||||
- dbml: DBML schema files
|
- dbml: DBML schema files
|
||||||
@@ -83,14 +87,21 @@ Output formats:
|
|||||||
- prisma: Prisma schema files (.prisma)
|
- prisma: Prisma schema files (.prisma)
|
||||||
- typeorm: TypeORM entity files (TypeScript)
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL SQL schema
|
- pgsql: PostgreSQL SQL schema
|
||||||
|
- sqlite: SQLite SQL schema (with automatic schema flattening)
|
||||||
|
|
||||||
PostgreSQL Connection String Examples:
|
Connection String Examples:
|
||||||
|
PostgreSQL:
|
||||||
postgres://username:password@localhost:5432/database_name
|
postgres://username:password@localhost:5432/database_name
|
||||||
postgres://username:password@localhost/database_name
|
postgres://username:password@localhost/database_name
|
||||||
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
postgresql://user:pass@host/dbname?sslmode=require
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
|
||||||
|
SQLite:
|
||||||
|
/path/to/database.db
|
||||||
|
./relative/path/database.sqlite
|
||||||
|
database.db
|
||||||
|
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
# Convert DBML to GORM models
|
# Convert DBML to GORM models
|
||||||
@@ -135,19 +146,28 @@ Examples:
|
|||||||
|
|
||||||
# Convert Bun models directory to JSON
|
# Convert Bun models directory to JSON
|
||||||
relspec convert --from bun --from-path ./models \
|
relspec convert --from bun --from-path ./models \
|
||||||
--to json --to-path schema.json`,
|
--to json --to-path schema.json
|
||||||
|
|
||||||
|
# Convert SQLite database to JSON
|
||||||
|
relspec convert --from sqlite --from-path database.db \
|
||||||
|
--to json --to-path schema.json
|
||||||
|
|
||||||
|
# Convert SQLite to PostgreSQL SQL
|
||||||
|
relspec convert --from sqlite --from-path database.db \
|
||||||
|
--to pgsql --to-path schema.sql`,
|
||||||
RunE: runConvert,
|
RunE: runConvert,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql, sqlite)")
|
||||||
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for pgsql) or file path (for sqlite)")
|
||||||
|
|
||||||
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||||
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||||
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
||||||
|
convertCmd.Flags().BoolVar(&convertFlattenSchema, "flatten-schema", false, "Flatten schema.table names to schema_table (useful for databases like SQLite that do not support schemas)")
|
||||||
|
|
||||||
err := convertCmd.MarkFlagRequired("from")
|
err := convertCmd.MarkFlagRequired("from")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -202,7 +222,7 @@ func runConvert(cmd *cobra.Command, args []string) error {
|
|||||||
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
|
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter); err != nil {
|
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter, convertFlattenSchema); err != nil {
|
||||||
return fmt.Errorf("failed to write target: %w", err)
|
return fmt.Errorf("failed to write target: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -289,6 +309,17 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
|||||||
}
|
}
|
||||||
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
// SQLite can use either file path or connection string
|
||||||
|
dbPath := filePath
|
||||||
|
if dbPath == "" {
|
||||||
|
dbPath = connString
|
||||||
|
}
|
||||||
|
if dbPath == "" {
|
||||||
|
return nil, fmt.Errorf("file path or connection string is required for SQLite format")
|
||||||
|
}
|
||||||
|
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
||||||
}
|
}
|
||||||
@@ -301,12 +332,13 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
|||||||
return db, nil
|
return db, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string) error {
|
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string, flattenSchema bool) error {
|
||||||
var writer writers.Writer
|
var writer writers.Writer
|
||||||
|
|
||||||
writerOpts := &writers.WriterOptions{
|
writerOpts := &writers.WriterOptions{
|
||||||
OutputPath: outputPath,
|
OutputPath: outputPath,
|
||||||
PackageName: packageName,
|
PackageName: packageName,
|
||||||
|
FlattenSchema: flattenSchema,
|
||||||
}
|
}
|
||||||
|
|
||||||
switch strings.ToLower(dbType) {
|
switch strings.ToLower(dbType) {
|
||||||
@@ -343,6 +375,9 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
|||||||
case "pgsql", "postgres", "postgresql", "sql":
|
case "pgsql", "postgres", "postgresql", "sql":
|
||||||
writer = wpgsql.NewWriter(writerOpts)
|
writer = wpgsql.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
writer = wsqlite.NewWriter(writerOpts)
|
||||||
|
|
||||||
case "prisma":
|
case "prisma":
|
||||||
writer = wprisma.NewWriter(writerOpts)
|
writer = wprisma.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import (
|
|||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -254,6 +255,17 @@ func readDatabase(dbType, filePath, connString, label string) (*models.Database,
|
|||||||
}
|
}
|
||||||
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
// SQLite can use either file path or connection string
|
||||||
|
dbPath := filePath
|
||||||
|
if dbPath == "" {
|
||||||
|
dbPath = connString
|
||||||
|
}
|
||||||
|
if dbPath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
|
||||||
|
}
|
||||||
|
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("%s: unsupported database format: %s", label, dbType)
|
return nil, fmt.Errorf("%s: unsupported database format: %s", label, dbType)
|
||||||
}
|
}
|
||||||
|
|||||||
361
cmd/relspec/edit.go
Normal file
361
cmd/relspec/edit.go
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/ui"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||||
|
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||||
|
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||||
|
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
|
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
|
||||||
|
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
|
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
editSourceType string
|
||||||
|
editSourcePath string
|
||||||
|
editSourceConn string
|
||||||
|
editTargetType string
|
||||||
|
editTargetPath string
|
||||||
|
editSchemaFilter string
|
||||||
|
)
|
||||||
|
|
||||||
|
var editCmd = &cobra.Command{
|
||||||
|
Use: "edit",
|
||||||
|
Short: "Edit database schema interactively with TUI",
|
||||||
|
Long: `Edit database schemas from various formats using an interactive terminal UI.
|
||||||
|
|
||||||
|
Allows you to:
|
||||||
|
- List and navigate schemas and tables
|
||||||
|
- Create, edit, and delete schemas
|
||||||
|
- Create, edit, and delete tables
|
||||||
|
- Add, edit, and delete columns
|
||||||
|
- Set table and column properties
|
||||||
|
- Add constraints, indexes, and relationships
|
||||||
|
|
||||||
|
Supports reading from and writing to all supported formats:
|
||||||
|
Input formats:
|
||||||
|
- dbml: DBML schema files
|
||||||
|
- dctx: DCTX schema files
|
||||||
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
|
- json: JSON database schema
|
||||||
|
- yaml: YAML database schema
|
||||||
|
- gorm: GORM model files (Go, file or directory)
|
||||||
|
- bun: Bun model files (Go, file or directory)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
- sqlite: SQLite database file
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- dbml: DBML schema files
|
||||||
|
- dctx: DCTX schema files
|
||||||
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
|
- json: JSON database schema
|
||||||
|
- yaml: YAML database schema
|
||||||
|
- gorm: GORM model files (Go)
|
||||||
|
- bun: Bun model files (Go)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
|
- pgsql: PostgreSQL SQL schema
|
||||||
|
- sqlite: SQLite SQL schema (with automatic schema flattening)
|
||||||
|
|
||||||
|
Connection String Examples:
|
||||||
|
PostgreSQL:
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
SQLite:
|
||||||
|
/path/to/database.db
|
||||||
|
./relative/path/database.sqlite
|
||||||
|
database.db
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Edit a DBML schema file
|
||||||
|
relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
|
||||||
|
|
||||||
|
# Edit a PostgreSQL database
|
||||||
|
relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||||
|
--to pgsql --to-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Edit JSON schema and output to GORM
|
||||||
|
relspec edit --from json --from-path db.json --to gorm --to-path models/
|
||||||
|
|
||||||
|
# Edit GORM models in place
|
||||||
|
relspec edit --from gorm --from-path ./models --to gorm --to-path ./models
|
||||||
|
|
||||||
|
# Edit SQLite database
|
||||||
|
relspec edit --from sqlite --from-path database.db --to sqlite --to-path database.db
|
||||||
|
|
||||||
|
# Convert SQLite to DBML
|
||||||
|
relspec edit --from sqlite --from-path database.db --to dbml --to-path schema.dbml`,
|
||||||
|
RunE: runEdit,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
editCmd.Flags().StringVar(&editSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql, sqlite)")
|
||||||
|
editCmd.Flags().StringVar(&editSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
|
editCmd.Flags().StringVar(&editSourceConn, "from-conn", "", "Source connection string (for pgsql) or file path (for sqlite)")
|
||||||
|
editCmd.Flags().StringVar(&editTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql, sqlite)")
|
||||||
|
editCmd.Flags().StringVar(&editTargetPath, "to-path", "", "Target file path (for file-based formats)")
|
||||||
|
editCmd.Flags().StringVar(&editSchemaFilter, "schema", "", "Filter to a specific schema by name")
|
||||||
|
|
||||||
|
// Flags are now optional - if not provided, UI will prompt for load/save options
|
||||||
|
}
|
||||||
|
|
||||||
|
func runEdit(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Editor ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
var db *models.Database
|
||||||
|
var loadConfig *ui.LoadConfig
|
||||||
|
var saveConfig *ui.SaveConfig
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Check if source parameters are provided
|
||||||
|
if editSourceType != "" {
|
||||||
|
// Read source database
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", editSourceType)
|
||||||
|
if editSourcePath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", editSourcePath)
|
||||||
|
}
|
||||||
|
if editSourceConn != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(editSourceConn))
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err = readDatabaseForEdit(editSourceType, editSourcePath, editSourceConn, "Source")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read source: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply schema filter if specified
|
||||||
|
if editSchemaFilter != "" {
|
||||||
|
db = filterDatabaseBySchema(db, editSchemaFilter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||||
|
|
||||||
|
totalTables := 0
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
totalTables += len(schema.Tables)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||||
|
|
||||||
|
// Store load config
|
||||||
|
loadConfig = &ui.LoadConfig{
|
||||||
|
SourceType: editSourceType,
|
||||||
|
FilePath: editSourcePath,
|
||||||
|
ConnString: editSourceConn,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No source parameters provided, UI will show load screen
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/2] No source specified, editor will prompt for database\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store save config if target parameters are provided
|
||||||
|
if editTargetType != "" {
|
||||||
|
saveConfig = &ui.SaveConfig{
|
||||||
|
TargetType: editTargetType,
|
||||||
|
FilePath: editTargetPath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Launch interactive TUI
|
||||||
|
if editSourceType != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/3] Launching interactive editor...\n")
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/2] Launching interactive editor...\n")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " Use arrow keys and shortcuts to navigate\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Press ? for help\n\n")
|
||||||
|
|
||||||
|
editor := ui.NewSchemaEditorWithConfigs(db, loadConfig, saveConfig)
|
||||||
|
if err := editor.Run(); err != nil {
|
||||||
|
return fmt.Errorf("editor failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only write to output if target parameters were provided and database was loaded from command line
|
||||||
|
if editTargetType != "" && editSourceType != "" && db != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "[3/3] Writing changes to output...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", editTargetType)
|
||||||
|
if editTargetPath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", editTargetPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the potentially modified database from the editor
|
||||||
|
err = writeDatabaseForEdit(editTargetType, editTargetPath, "", editor.GetDatabase(), "Target")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write output: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully written database\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Edit complete ===\n")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func readDatabaseForEdit(dbType, filePath, connString, label string) (*models.Database, error) {
|
||||||
|
var reader readers.Reader
|
||||||
|
|
||||||
|
switch strings.ToLower(dbType) {
|
||||||
|
case "dbml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
|
||||||
|
}
|
||||||
|
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "dctx":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
|
||||||
|
}
|
||||||
|
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drawdb":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
|
||||||
|
}
|
||||||
|
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "graphql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "json":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
|
||||||
|
}
|
||||||
|
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "yaml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
|
||||||
|
}
|
||||||
|
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "gorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
|
||||||
|
}
|
||||||
|
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "bun":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
|
||||||
|
}
|
||||||
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
|
||||||
|
}
|
||||||
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
|
||||||
|
}
|
||||||
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
|
||||||
|
}
|
||||||
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "pgsql":
|
||||||
|
if connString == "" {
|
||||||
|
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
|
||||||
|
}
|
||||||
|
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
// SQLite can use either file path or connection string
|
||||||
|
dbPath := filePath
|
||||||
|
if dbPath == "" {
|
||||||
|
dbPath = connString
|
||||||
|
}
|
||||||
|
if dbPath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
|
||||||
|
}
|
||||||
|
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%s: unsupported format: %s", label, dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("%s: %w", label, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeDatabaseForEdit(dbType, filePath, connString string, db *models.Database, label string) error {
|
||||||
|
var writer writers.Writer
|
||||||
|
|
||||||
|
switch strings.ToLower(dbType) {
|
||||||
|
case "dbml":
|
||||||
|
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "dctx":
|
||||||
|
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "drawdb":
|
||||||
|
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "graphql":
|
||||||
|
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "json":
|
||||||
|
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "yaml":
|
||||||
|
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "gorm":
|
||||||
|
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "bun":
|
||||||
|
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "drizzle":
|
||||||
|
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "prisma":
|
||||||
|
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "typeorm":
|
||||||
|
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
writer = wsqlite.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "pgsql":
|
||||||
|
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("%s: unsupported format: %s", label, dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: %w", label, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
333
cmd/relspec/inspect.go
Normal file
333
cmd/relspec/inspect.go
Normal file
@@ -0,0 +1,333 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
inspectSourceType string
|
||||||
|
inspectSourcePath string
|
||||||
|
inspectSourceConn string
|
||||||
|
inspectRulesPath string
|
||||||
|
inspectOutputFormat string
|
||||||
|
inspectOutputPath string
|
||||||
|
inspectSchemaFilter string
|
||||||
|
)
|
||||||
|
|
||||||
|
var inspectCmd = &cobra.Command{
|
||||||
|
Use: "inspect",
|
||||||
|
Short: "Inspect and validate database schemas against rules",
|
||||||
|
Long: `Inspect database schemas from various formats and validate against configurable rules.
|
||||||
|
|
||||||
|
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
|
||||||
|
JSON, YAML, etc.) and generates validation reports.
|
||||||
|
|
||||||
|
Input formats:
|
||||||
|
- dbml: DBML schema files
|
||||||
|
- dctx: DCTX schema files
|
||||||
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
|
- json: JSON database schema
|
||||||
|
- yaml: YAML database schema
|
||||||
|
- gorm: GORM model files (Go, file or directory)
|
||||||
|
- bun: Bun model files (Go, file or directory)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- markdown: Human-readable markdown report (default, with ANSI colors for terminal)
|
||||||
|
- json: JSON report for tooling integration
|
||||||
|
|
||||||
|
PostgreSQL Connection String Examples:
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Inspect and output JSON report to file
|
||||||
|
relspec inspect --from json --from-path db.json \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public`,
|
||||||
|
RunE: runInspect,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectRulesPath, "rules", ".relspec-rules.yaml", "Path to rules configuration file (uses defaults if not found)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectOutputFormat, "output-format", "markdown", "Output format (markdown, json)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectOutputPath, "output", "", "Output file path (default: stdout)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSchemaFilter, "schema", "", "Filter to a specific schema by name")
|
||||||
|
|
||||||
|
err := inspectCmd.MarkFlagRequired("from")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runInspect(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Inspector ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Read source database
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", inspectSourceType)
|
||||||
|
if inspectSourcePath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", inspectSourcePath)
|
||||||
|
}
|
||||||
|
if inspectSourceConn != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(inspectSourceConn))
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := readDatabaseForInspect(inspectSourceType, inspectSourcePath, inspectSourceConn)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read source: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply schema filter if specified
|
||||||
|
if inspectSchemaFilter != "" {
|
||||||
|
db = filterDatabaseBySchema(db, inspectSchemaFilter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||||
|
|
||||||
|
totalTables := 0
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
totalTables += len(schema.Tables)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||||
|
|
||||||
|
// Load rules configuration
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/3] Loading validation rules...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Rules: %s\n", inspectRulesPath)
|
||||||
|
|
||||||
|
config, err := inspector.LoadConfig(inspectRulesPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load rules config: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
enabledCount := 0
|
||||||
|
for _, rule := range config.Rules {
|
||||||
|
if rule.IsEnabled() {
|
||||||
|
enabledCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Loaded %d rule(s) (%d enabled)\n\n", len(config.Rules), enabledCount)
|
||||||
|
|
||||||
|
// Run inspection
|
||||||
|
fmt.Fprintf(os.Stderr, "[3/3] Running validation...\n")
|
||||||
|
|
||||||
|
insp := inspector.NewInspector(db, config)
|
||||||
|
report, err := insp.Inspect()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("inspection failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Inspection complete\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Errors: %d\n", report.Summary.ErrorCount)
|
||||||
|
fmt.Fprintf(os.Stderr, " Warnings: %d\n\n", report.Summary.WarningCount)
|
||||||
|
|
||||||
|
// Format and output report
|
||||||
|
var formattedReport string
|
||||||
|
switch strings.ToLower(inspectOutputFormat) {
|
||||||
|
case "json":
|
||||||
|
formatter := inspector.NewJSONFormatter()
|
||||||
|
formattedReport, err = formatter.Format(report)
|
||||||
|
case "markdown", "md":
|
||||||
|
// Determine output writer for terminal detection
|
||||||
|
var output *os.File
|
||||||
|
if inspectOutputPath != "" {
|
||||||
|
output, err = os.Create(inspectOutputPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create output file: %w", err)
|
||||||
|
}
|
||||||
|
defer output.Close()
|
||||||
|
} else {
|
||||||
|
output = os.Stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
formatter := inspector.NewMarkdownFormatter(output)
|
||||||
|
formattedReport, err = formatter.Format(report)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported output format: %s", inspectOutputFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to format report: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write output
|
||||||
|
if inspectOutputPath != "" {
|
||||||
|
err = os.WriteFile(inspectOutputPath, []byte(formattedReport), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write output file: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "Report written to: %s\n", inspectOutputPath)
|
||||||
|
} else {
|
||||||
|
fmt.Println(formattedReport)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Inspection Complete ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Exit with appropriate code
|
||||||
|
if report.HasErrors() {
|
||||||
|
return fmt.Errorf("inspection found %d error(s)", report.Summary.ErrorCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func readDatabaseForInspect(dbType, filePath, connString string) (*models.Database, error) {
|
||||||
|
var reader readers.Reader
|
||||||
|
|
||||||
|
switch strings.ToLower(dbType) {
|
||||||
|
case "dbml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DBML format")
|
||||||
|
}
|
||||||
|
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "dctx":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DCTX format")
|
||||||
|
}
|
||||||
|
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drawdb":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DrawDB format")
|
||||||
|
}
|
||||||
|
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "graphql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "json":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for JSON format")
|
||||||
|
}
|
||||||
|
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "yaml", "yml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for YAML format")
|
||||||
|
}
|
||||||
|
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "gorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GORM format")
|
||||||
|
}
|
||||||
|
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "bun":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Bun format")
|
||||||
|
}
|
||||||
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Drizzle format")
|
||||||
|
}
|
||||||
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Prisma format")
|
||||||
|
}
|
||||||
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for TypeORM format")
|
||||||
|
}
|
||||||
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "pgsql", "postgres", "postgresql":
|
||||||
|
if connString == "" {
|
||||||
|
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
|
||||||
|
}
|
||||||
|
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
// SQLite can use either file path or connection string
|
||||||
|
dbPath := filePath
|
||||||
|
if dbPath == "" {
|
||||||
|
dbPath = connString
|
||||||
|
}
|
||||||
|
if dbPath == "" {
|
||||||
|
return nil, fmt.Errorf("file path or connection string is required for SQLite format")
|
||||||
|
}
|
||||||
|
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func filterDatabaseBySchema(db *models.Database, schemaName string) *models.Database {
|
||||||
|
filtered := &models.Database{
|
||||||
|
Name: db.Name,
|
||||||
|
Description: db.Description,
|
||||||
|
DatabaseType: db.DatabaseType,
|
||||||
|
DatabaseVersion: db.DatabaseVersion,
|
||||||
|
SourceFormat: db.SourceFormat,
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name == schemaName {
|
||||||
|
filtered.Schemas = append(filtered.Schemas, schema)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
467
cmd/relspec/merge.go
Normal file
467
cmd/relspec/merge.go
Normal file
@@ -0,0 +1,467 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/merge"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||||
|
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||||
|
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||||
|
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
|
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
|
||||||
|
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
|
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
mergeTargetType string
|
||||||
|
mergeTargetPath string
|
||||||
|
mergeTargetConn string
|
||||||
|
mergeSourceType string
|
||||||
|
mergeSourcePath string
|
||||||
|
mergeSourceConn string
|
||||||
|
mergeOutputType string
|
||||||
|
mergeOutputPath string
|
||||||
|
mergeOutputConn string
|
||||||
|
mergeSkipDomains bool
|
||||||
|
mergeSkipRelations bool
|
||||||
|
mergeSkipEnums bool
|
||||||
|
mergeSkipViews bool
|
||||||
|
mergeSkipSequences bool
|
||||||
|
mergeSkipTables string // Comma-separated table names to skip
|
||||||
|
mergeVerbose bool
|
||||||
|
mergeReportPath string // Path to write merge report
|
||||||
|
mergeFlattenSchema bool
|
||||||
|
)
|
||||||
|
|
||||||
|
var mergeCmd = &cobra.Command{
|
||||||
|
Use: "merge",
|
||||||
|
Short: "Merge database schemas (additive only - adds missing items)",
|
||||||
|
Long: `Merge one database schema into another. Performs additive merging only:
|
||||||
|
adds missing schemas, tables, columns, and other objects without modifying
|
||||||
|
or deleting existing items.
|
||||||
|
|
||||||
|
The target database is loaded first, then the source database is merged into it.
|
||||||
|
The result can be saved to a new format or updated in place.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Merge two JSON schemas
|
||||||
|
relspec merge --target json --target-path base.json \
|
||||||
|
--source json --source-path additional.json \
|
||||||
|
--output json --output-path merged.json
|
||||||
|
|
||||||
|
# Merge from PostgreSQL into JSON
|
||||||
|
relspec merge --target json --target-path mydb.json \
|
||||||
|
--source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
|
||||||
|
--output json --output-path combined.json
|
||||||
|
|
||||||
|
# Merge and execute on PostgreSQL database with report
|
||||||
|
relspec merge --target json --target-path base.json \
|
||||||
|
--source json --source-path additional.json \
|
||||||
|
--output pgsql --output-conn "postgres://user:pass@localhost/target_db" \
|
||||||
|
--merge-report merge-report.json
|
||||||
|
|
||||||
|
# Merge DBML and YAML, skip relations
|
||||||
|
relspec merge --target dbml --target-path schema.dbml \
|
||||||
|
--source yaml --source-path tables.yaml \
|
||||||
|
--output dbml --output-path merged.dbml \
|
||||||
|
--skip-relations
|
||||||
|
|
||||||
|
# Merge and save back to target format
|
||||||
|
relspec merge --target json --target-path base.json \
|
||||||
|
--source json --source-path patch.json \
|
||||||
|
--output json --output-path base.json`,
|
||||||
|
RunE: runMerge,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// Target database flags
|
||||||
|
mergeCmd.Flags().StringVar(&mergeTargetType, "target", "", "Target format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeTargetPath, "target-path", "", "Target file path (required for file-based formats)")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeTargetConn, "target-conn", "", "Target connection string (required for pgsql)")
|
||||||
|
|
||||||
|
// Source database flags
|
||||||
|
mergeCmd.Flags().StringVar(&mergeSourceType, "source", "", "Source format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeSourcePath, "source-path", "", "Source file path (required for file-based formats)")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeSourceConn, "source-conn", "", "Source connection string (required for pgsql)")
|
||||||
|
|
||||||
|
// Output flags
|
||||||
|
mergeCmd.Flags().StringVar(&mergeOutputType, "output", "", "Output format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeOutputPath, "output-path", "", "Output file path (required for file-based formats)")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeOutputConn, "output-conn", "", "Output connection string (for pgsql)")
|
||||||
|
|
||||||
|
// Merge options
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeSkipDomains, "skip-domains", false, "Skip domains during merge")
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeSkipRelations, "skip-relations", false, "Skip relations during merge")
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeSkipEnums, "skip-enums", false, "Skip enums during merge")
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeSkipViews, "skip-views", false, "Skip views during merge")
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeSkipSequences, "skip-sequences", false, "Skip sequences during merge")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeSkipTables, "skip-tables", "", "Comma-separated list of table names to skip during merge")
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeVerbose, "verbose", false, "Show verbose output")
|
||||||
|
mergeCmd.Flags().StringVar(&mergeReportPath, "merge-report", "", "Path to write merge report (JSON format)")
|
||||||
|
mergeCmd.Flags().BoolVar(&mergeFlattenSchema, "flatten-schema", false, "Flatten schema.table names to schema_table (useful for databases like SQLite that do not support schemas)")
|
||||||
|
}
|
||||||
|
|
||||||
|
func runMerge(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== RelSpec Merge ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Validate required flags
|
||||||
|
if mergeTargetType == "" {
|
||||||
|
return fmt.Errorf("--target format is required")
|
||||||
|
}
|
||||||
|
if mergeSourceType == "" {
|
||||||
|
return fmt.Errorf("--source format is required")
|
||||||
|
}
|
||||||
|
if mergeOutputType == "" {
|
||||||
|
return fmt.Errorf("--output format is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate and expand file paths
|
||||||
|
if mergeTargetType != "pgsql" {
|
||||||
|
if mergeTargetPath == "" {
|
||||||
|
return fmt.Errorf("--target-path is required for %s format", mergeTargetType)
|
||||||
|
}
|
||||||
|
mergeTargetPath = expandPath(mergeTargetPath)
|
||||||
|
} else if mergeTargetConn == "" {
|
||||||
|
|
||||||
|
return fmt.Errorf("--target-conn is required for pgsql format")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if mergeSourceType != "pgsql" {
|
||||||
|
if mergeSourcePath == "" {
|
||||||
|
return fmt.Errorf("--source-path is required for %s format", mergeSourceType)
|
||||||
|
}
|
||||||
|
mergeSourcePath = expandPath(mergeSourcePath)
|
||||||
|
} else if mergeSourceConn == "" {
|
||||||
|
return fmt.Errorf("--source-conn is required for pgsql format")
|
||||||
|
}
|
||||||
|
|
||||||
|
if mergeOutputType != "pgsql" {
|
||||||
|
if mergeOutputPath == "" {
|
||||||
|
return fmt.Errorf("--output-path is required for %s format", mergeOutputType)
|
||||||
|
}
|
||||||
|
mergeOutputPath = expandPath(mergeOutputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 1: Read target database
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/3] Reading target database...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeTargetType)
|
||||||
|
if mergeTargetPath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeTargetPath)
|
||||||
|
}
|
||||||
|
if mergeTargetConn != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeTargetConn))
|
||||||
|
}
|
||||||
|
|
||||||
|
targetDB, err := readDatabaseForMerge(mergeTargetType, mergeTargetPath, mergeTargetConn, "Target")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read target database: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read target database '%s'\n", targetDB.Name)
|
||||||
|
printDatabaseStats(targetDB)
|
||||||
|
|
||||||
|
// Step 2: Read source database
|
||||||
|
fmt.Fprintf(os.Stderr, "\n[2/3] Reading source database...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeSourceType)
|
||||||
|
if mergeSourcePath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeSourcePath)
|
||||||
|
}
|
||||||
|
if mergeSourceConn != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeSourceConn))
|
||||||
|
}
|
||||||
|
|
||||||
|
sourceDB, err := readDatabaseForMerge(mergeSourceType, mergeSourcePath, mergeSourceConn, "Source")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read source database: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read source database '%s'\n", sourceDB.Name)
|
||||||
|
printDatabaseStats(sourceDB)
|
||||||
|
|
||||||
|
// Step 3: Merge databases
|
||||||
|
fmt.Fprintf(os.Stderr, "\n[3/3] Merging databases...\n")
|
||||||
|
|
||||||
|
opts := &merge.MergeOptions{
|
||||||
|
SkipDomains: mergeSkipDomains,
|
||||||
|
SkipRelations: mergeSkipRelations,
|
||||||
|
SkipEnums: mergeSkipEnums,
|
||||||
|
SkipViews: mergeSkipViews,
|
||||||
|
SkipSequences: mergeSkipSequences,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse skip-tables flag
|
||||||
|
if mergeSkipTables != "" {
|
||||||
|
opts.SkipTableNames = parseSkipTables(mergeSkipTables)
|
||||||
|
if len(opts.SkipTableNames) > 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, " Skipping tables: %s\n", mergeSkipTables)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result := merge.MergeDatabases(targetDB, sourceDB, opts)
|
||||||
|
|
||||||
|
// Update timestamp
|
||||||
|
targetDB.UpdateDate()
|
||||||
|
|
||||||
|
// Print merge summary
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Merge complete\n\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "%s\n", merge.GetMergeSummary(result))
|
||||||
|
|
||||||
|
// Step 4: Write output
|
||||||
|
fmt.Fprintf(os.Stderr, "\n[4/4] Writing output...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeOutputType)
|
||||||
|
if mergeOutputPath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeOutputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = writeDatabaseForMerge(mergeOutputType, mergeOutputPath, mergeOutputConn, targetDB, "Output", mergeFlattenSchema)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write output: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully written merged database\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Merge complete ===\n")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func readDatabaseForMerge(dbType, filePath, connString, label string) (*models.Database, error) {
|
||||||
|
var reader readers.Reader
|
||||||
|
|
||||||
|
switch strings.ToLower(dbType) {
|
||||||
|
case "dbml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
|
||||||
|
}
|
||||||
|
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "dctx":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
|
||||||
|
}
|
||||||
|
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drawdb":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
|
||||||
|
}
|
||||||
|
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "graphql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "json":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
|
||||||
|
}
|
||||||
|
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "yaml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
|
||||||
|
}
|
||||||
|
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "gorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
|
||||||
|
}
|
||||||
|
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "bun":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
|
||||||
|
}
|
||||||
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
|
||||||
|
}
|
||||||
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
|
||||||
|
}
|
||||||
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
|
||||||
|
}
|
||||||
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "pgsql":
|
||||||
|
if connString == "" {
|
||||||
|
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
|
||||||
|
}
|
||||||
|
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
// SQLite can use either file path or connection string
|
||||||
|
dbPath := filePath
|
||||||
|
if dbPath == "" {
|
||||||
|
dbPath = connString
|
||||||
|
}
|
||||||
|
if dbPath == "" {
|
||||||
|
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
|
||||||
|
}
|
||||||
|
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%s: unsupported format '%s'", label, dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Database, label string, flattenSchema bool) error {
|
||||||
|
var writer writers.Writer
|
||||||
|
|
||||||
|
switch strings.ToLower(dbType) {
|
||||||
|
case "dbml":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for DBML format", label)
|
||||||
|
}
|
||||||
|
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "dctx":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for DCTX format", label)
|
||||||
|
}
|
||||||
|
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "drawdb":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for DrawDB format", label)
|
||||||
|
}
|
||||||
|
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "graphql":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for GraphQL format", label)
|
||||||
|
}
|
||||||
|
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "json":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for JSON format", label)
|
||||||
|
}
|
||||||
|
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "yaml":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for YAML format", label)
|
||||||
|
}
|
||||||
|
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "gorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for GORM format", label)
|
||||||
|
}
|
||||||
|
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "bun":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for Bun format", label)
|
||||||
|
}
|
||||||
|
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for Drizzle format", label)
|
||||||
|
}
|
||||||
|
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for Prisma format", label)
|
||||||
|
}
|
||||||
|
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return fmt.Errorf("%s: file path is required for TypeORM format", label)
|
||||||
|
}
|
||||||
|
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "sqlite", "sqlite3":
|
||||||
|
writer = wsqlite.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
|
||||||
|
case "pgsql":
|
||||||
|
writerOpts := &writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema}
|
||||||
|
if connString != "" {
|
||||||
|
writerOpts.Metadata = map[string]interface{}{
|
||||||
|
"connection_string": connString,
|
||||||
|
}
|
||||||
|
// Add report path if merge report is enabled
|
||||||
|
if mergeReportPath != "" {
|
||||||
|
writerOpts.Metadata["report_path"] = mergeReportPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writer = wpgsql.NewWriter(writerOpts)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("%s: unsupported format '%s'", label, dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
return writer.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
func expandPath(path string) string {
|
||||||
|
if len(path) > 0 && path[0] == '~' {
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err == nil {
|
||||||
|
return filepath.Join(home, path[1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
|
||||||
|
func printDatabaseStats(db *models.Database) {
|
||||||
|
totalTables := 0
|
||||||
|
totalColumns := 0
|
||||||
|
totalConstraints := 0
|
||||||
|
totalIndexes := 0
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
totalTables += len(schema.Tables)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
totalColumns += len(table.Columns)
|
||||||
|
totalConstraints += len(table.Constraints)
|
||||||
|
totalIndexes += len(table.Indexes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " Schemas: %d, Tables: %d, Columns: %d, Constraints: %d, Indexes: %d\n",
|
||||||
|
len(db.Schemas), totalTables, totalColumns, totalConstraints, totalIndexes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseSkipTables(skipTablesStr string) map[string]bool {
|
||||||
|
skipTables := make(map[string]bool)
|
||||||
|
if skipTablesStr == "" {
|
||||||
|
return skipTables
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split by comma and trim whitespace
|
||||||
|
parts := strings.Split(skipTablesStr, ",")
|
||||||
|
for _, part := range parts {
|
||||||
|
trimmed := strings.TrimSpace(part)
|
||||||
|
if trimmed != "" {
|
||||||
|
// Store in lowercase for case-insensitive matching
|
||||||
|
skipTables[strings.ToLower(trimmed)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return skipTables
|
||||||
|
}
|
||||||
@@ -18,4 +18,10 @@ JSON, YAML, SQL, etc.).`,
|
|||||||
func init() {
|
func init() {
|
||||||
rootCmd.AddCommand(convertCmd)
|
rootCmd.AddCommand(convertCmd)
|
||||||
rootCmd.AddCommand(diffCmd)
|
rootCmd.AddCommand(diffCmd)
|
||||||
|
rootCmd.AddCommand(inspectCmd)
|
||||||
|
rootCmd.AddCommand(scriptsCmd)
|
||||||
|
rootCmd.AddCommand(templCmd)
|
||||||
|
rootCmd.AddCommand(editCmd)
|
||||||
|
rootCmd.AddCommand(mergeCmd)
|
||||||
|
rootCmd.AddCommand(splitCmd)
|
||||||
}
|
}
|
||||||
|
|||||||
295
cmd/relspec/scripts.go
Normal file
295
cmd/relspec/scripts.go
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
scriptsDir string
|
||||||
|
scriptsConn string
|
||||||
|
scriptsSchemaName string
|
||||||
|
scriptsDBName string
|
||||||
|
scriptsIgnoreErrors bool
|
||||||
|
)
|
||||||
|
|
||||||
|
var scriptsCmd = &cobra.Command{
|
||||||
|
Use: "scripts",
|
||||||
|
Short: "Manage and execute SQL migration scripts",
|
||||||
|
Long: `Manage and execute SQL migration scripts from a directory.
|
||||||
|
|
||||||
|
Scripts must follow the naming pattern (both separators supported):
|
||||||
|
{priority}_{sequence}_{name}.sql or .pgsql
|
||||||
|
{priority}-{sequence}-{name}.sql or .pgsql
|
||||||
|
|
||||||
|
Example filenames (underscore format):
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1
|
||||||
|
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||||
|
2_001_add_indexes.pgsql # Priority 2, Sequence 1
|
||||||
|
|
||||||
|
Example filenames (hyphen format):
|
||||||
|
1-001-create-users.sql # Priority 1, Sequence 1
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||||
|
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||||
|
|
||||||
|
Both formats can be mixed in the same directory and subdirectories.
|
||||||
|
Scripts are executed in order: Priority (ascending), Sequence (ascending), Name (alphabetical).`,
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptsListCmd = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "List SQL scripts from a directory",
|
||||||
|
Long: `List SQL scripts from a directory and show their execution order.
|
||||||
|
|
||||||
|
The scripts are read recursively from the specified directory and displayed in the order
|
||||||
|
they would be executed: Priority (ascending), then Sequence (ascending), then Name (alphabetical).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
relspec scripts list --dir ./migrations`,
|
||||||
|
RunE: runScriptsList,
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptsExecuteCmd = &cobra.Command{
|
||||||
|
Use: "execute",
|
||||||
|
Short: "Execute SQL scripts against a database",
|
||||||
|
Long: `Execute SQL scripts from a directory against a PostgreSQL database.
|
||||||
|
|
||||||
|
Scripts are executed in order: Priority (ascending), Sequence (ascending), Name (alphabetical).
|
||||||
|
By default, execution stops immediately on the first error. Use --ignore-errors to continue execution.
|
||||||
|
|
||||||
|
The directory is scanned recursively for all subdirectories and files matching the patterns:
|
||||||
|
{priority}_{sequence}_{name}.sql or .pgsql (underscore format)
|
||||||
|
{priority}-{sequence}-{name}.sql or .pgsql (hyphen format)
|
||||||
|
|
||||||
|
PostgreSQL Connection String Examples:
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Execute migration scripts from a directory (including subdirectories)
|
||||||
|
relspec scripts execute --dir ./migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||||
|
|
||||||
|
# Execute with custom schema name
|
||||||
|
relspec scripts execute --dir ./migrations \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--schema public
|
||||||
|
|
||||||
|
# Execute with SSL disabled
|
||||||
|
relspec scripts execute --dir ./sql \
|
||||||
|
--conn "postgres://user:pass@localhost/db?sslmode=disable"
|
||||||
|
|
||||||
|
# Continue executing even if errors occur
|
||||||
|
relspec scripts execute --dir ./migrations \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--ignore-errors`,
|
||||||
|
RunE: runScriptsExecute,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// List command flags
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||||
|
err := scriptsListCmd.MarkFlagRequired("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute command flags
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||||
|
scriptsExecuteCmd.Flags().BoolVar(&scriptsIgnoreErrors, "ignore-errors", false, "Continue executing scripts even if errors occur")
|
||||||
|
|
||||||
|
err = scriptsExecuteCmd.MarkFlagRequired("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = scriptsExecuteCmd.MarkFlagRequired("conn")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking conn flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add subcommands to scripts command
|
||||||
|
scriptsCmd.AddCommand(scriptsListCmd)
|
||||||
|
scriptsCmd.AddCommand(scriptsExecuteCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runScriptsList(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts List ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Directory: %s\n\n", scriptsDir)
|
||||||
|
|
||||||
|
// Read scripts from directory
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: scriptsDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": scriptsSchemaName,
|
||||||
|
"database_name": scriptsDBName,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "No schemas found\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "No SQL scripts found matching pattern {priority}_{sequence}_{name}.sql\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort scripts by Priority, Sequence, then Name
|
||||||
|
sortedScripts := make([]*struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sqlLines int
|
||||||
|
}, len(schema.Scripts))
|
||||||
|
|
||||||
|
for i, script := range schema.Scripts {
|
||||||
|
// Count non-empty lines in SQL
|
||||||
|
sqlLines := 0
|
||||||
|
for _, line := range []byte(script.SQL) {
|
||||||
|
if line == '\n' {
|
||||||
|
sqlLines++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(script.SQL) > 0 {
|
||||||
|
sqlLines++ // Count last line if no trailing newline
|
||||||
|
}
|
||||||
|
|
||||||
|
sortedScripts[i] = &struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sqlLines int
|
||||||
|
}{
|
||||||
|
name: script.Name,
|
||||||
|
priority: script.Priority,
|
||||||
|
sequence: script.Sequence,
|
||||||
|
sqlLines: sqlLines,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||||
|
if sortedScripts[i].priority != sortedScripts[j].priority {
|
||||||
|
return sortedScripts[i].priority < sortedScripts[j].priority
|
||||||
|
}
|
||||||
|
if sortedScripts[i].sequence != sortedScripts[j].sequence {
|
||||||
|
return sortedScripts[i].sequence < sortedScripts[j].sequence
|
||||||
|
}
|
||||||
|
return sortedScripts[i].name < sortedScripts[j].name
|
||||||
|
})
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "Found %d script(s) in execution order:\n\n", len(sortedScripts))
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "No.", "Priority", "Sequence", "Name", "Lines")
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "----", "--------", "--------", "------------------------------", "-----")
|
||||||
|
|
||||||
|
for i, script := range sortedScripts {
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4d %-10d %-8d %-30s %d\n",
|
||||||
|
i+1,
|
||||||
|
script.priority,
|
||||||
|
script.sequence,
|
||||||
|
script.name,
|
||||||
|
script.sqlLines,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runScriptsExecute(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts Execution ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n", getCurrentTimestamp())
|
||||||
|
fmt.Fprintf(os.Stderr, "Directory: %s\n", scriptsDir)
|
||||||
|
fmt.Fprintf(os.Stderr, "Database: %s\n\n", maskPassword(scriptsConn))
|
||||||
|
|
||||||
|
// Step 1: Read scripts from directory
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/2] Reading SQL scripts...\n")
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: scriptsDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": scriptsSchemaName,
|
||||||
|
"database_name": scriptsDBName,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return fmt.Errorf("no schemas found")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, " No scripts found. Nothing to execute.\n\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Found %d script(s)\n\n", len(schema.Scripts))
|
||||||
|
|
||||||
|
// Step 2: Execute scripts
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/2] Executing scripts in order (Priority → Sequence → Name)...\n\n")
|
||||||
|
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": scriptsConn,
|
||||||
|
"ignore_errors": scriptsIgnoreErrors,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteSchema(schema); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
return fmt.Errorf("script execution failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get execution results from writer metadata
|
||||||
|
totalCount := len(schema.Scripts)
|
||||||
|
successCount := totalCount
|
||||||
|
failedCount := 0
|
||||||
|
|
||||||
|
opts := writer.Options()
|
||||||
|
if total, exists := opts.Metadata["execution_total"].(int); exists {
|
||||||
|
totalCount = total
|
||||||
|
}
|
||||||
|
if success, exists := opts.Metadata["execution_success"].(int); exists {
|
||||||
|
successCount = success
|
||||||
|
}
|
||||||
|
if failed, exists := opts.Metadata["execution_failed"].(int); exists {
|
||||||
|
failedCount = failed
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||||
|
fmt.Fprintf(os.Stderr, "Total scripts: %d\n", totalCount)
|
||||||
|
fmt.Fprintf(os.Stderr, "Successful: %d\n", successCount)
|
||||||
|
if failedCount > 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "Failed: %d\n", failedCount)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
319
cmd/relspec/split.go
Normal file
319
cmd/relspec/split.go
Normal file
@@ -0,0 +1,319 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
splitSourceType string
|
||||||
|
splitSourcePath string
|
||||||
|
splitSourceConn string
|
||||||
|
splitTargetType string
|
||||||
|
splitTargetPath string
|
||||||
|
splitSchemas string
|
||||||
|
splitTables string
|
||||||
|
splitPackageName string
|
||||||
|
splitDatabaseName string
|
||||||
|
splitExcludeSchema string
|
||||||
|
splitExcludeTables string
|
||||||
|
)
|
||||||
|
|
||||||
|
var splitCmd = &cobra.Command{
|
||||||
|
Use: "split",
|
||||||
|
Short: "Split database schemas to extract selected tables into a separate database",
|
||||||
|
Long: `Extract selected schemas and tables from a database and write them to a separate output.
|
||||||
|
|
||||||
|
The split command allows you to:
|
||||||
|
- Select specific schemas to include in the output
|
||||||
|
- Select specific tables within schemas
|
||||||
|
- Exclude specific schemas or tables if preferred
|
||||||
|
- Export the selected subset to any supported format
|
||||||
|
|
||||||
|
Input formats:
|
||||||
|
- dbml: DBML schema files
|
||||||
|
- dctx: DCTX schema files
|
||||||
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
|
- json: JSON database schema
|
||||||
|
- yaml: YAML database schema
|
||||||
|
- gorm: GORM model files (Go, file or directory)
|
||||||
|
- bun: Bun model files (Go, file or directory)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- dbml: DBML schema files
|
||||||
|
- dctx: DCTX schema files
|
||||||
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
|
- json: JSON database schema
|
||||||
|
- yaml: YAML database schema
|
||||||
|
- gorm: GORM model files (Go)
|
||||||
|
- bun: Bun model files (Go)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
|
- pgsql: PostgreSQL SQL schema
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Split specific schemas from DBML
|
||||||
|
relspec split --from dbml --from-path schema.dbml \
|
||||||
|
--schemas public,auth \
|
||||||
|
--to json --to-path subset.json
|
||||||
|
|
||||||
|
# Extract specific tables from PostgreSQL
|
||||||
|
relspec split --from pgsql \
|
||||||
|
--from-conn "postgres://user:pass@localhost:5432/mydb" \
|
||||||
|
--schemas public \
|
||||||
|
--tables users,orders,products \
|
||||||
|
--to dbml --to-path subset.dbml
|
||||||
|
|
||||||
|
# Exclude specific tables
|
||||||
|
relspec split --from json --from-path schema.json \
|
||||||
|
--exclude-tables "audit_log,system_config,temp_data" \
|
||||||
|
--to json --to-path public_schema.json
|
||||||
|
|
||||||
|
# Split and convert to GORM
|
||||||
|
relspec split --from json --from-path schema.json \
|
||||||
|
--tables "users,posts,comments" \
|
||||||
|
--to gorm --to-path models/ --package models \
|
||||||
|
--database-name MyAppDB
|
||||||
|
|
||||||
|
# Exclude specific schema and tables
|
||||||
|
relspec split --from pgsql \
|
||||||
|
--from-conn "postgres://user:pass@localhost/db" \
|
||||||
|
--exclude-schema pg_catalog,information_schema \
|
||||||
|
--exclude-tables "temp_users,debug_logs" \
|
||||||
|
--to json --to-path public_schema.json`,
|
||||||
|
RunE: runSplit,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
splitCmd.Flags().StringVar(&splitSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
|
splitCmd.Flags().StringVar(&splitSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
|
splitCmd.Flags().StringVar(&splitSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||||
|
|
||||||
|
splitCmd.Flags().StringVar(&splitTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
|
splitCmd.Flags().StringVar(&splitTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||||
|
splitCmd.Flags().StringVar(&splitPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||||
|
splitCmd.Flags().StringVar(&splitDatabaseName, "database-name", "", "Override database name in output")
|
||||||
|
|
||||||
|
splitCmd.Flags().StringVar(&splitSchemas, "schemas", "", "Comma-separated list of schema names to include")
|
||||||
|
splitCmd.Flags().StringVar(&splitTables, "tables", "", "Comma-separated list of table names to include (case-insensitive)")
|
||||||
|
splitCmd.Flags().StringVar(&splitExcludeSchema, "exclude-schema", "", "Comma-separated list of schema names to exclude")
|
||||||
|
splitCmd.Flags().StringVar(&splitExcludeTables, "exclude-tables", "", "Comma-separated list of table names to exclude (case-insensitive)")
|
||||||
|
|
||||||
|
err := splitCmd.MarkFlagRequired("from")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = splitCmd.MarkFlagRequired("to")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = splitCmd.MarkFlagRequired("to-path")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to-path flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runSplit(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Split ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Read source database
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", splitSourceType)
|
||||||
|
if splitSourcePath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", splitSourcePath)
|
||||||
|
}
|
||||||
|
if splitSourceConn != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(splitSourceConn))
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := readDatabaseForConvert(splitSourceType, splitSourcePath, splitSourceConn)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read source: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||||
|
|
||||||
|
totalTables := 0
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
totalTables += len(schema.Tables)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||||
|
|
||||||
|
// Filter the database
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/3] Filtering schemas and tables...\n")
|
||||||
|
filteredDB, err := filterDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to filter database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if splitDatabaseName != "" {
|
||||||
|
filteredDB.Name = splitDatabaseName
|
||||||
|
}
|
||||||
|
|
||||||
|
filteredTables := 0
|
||||||
|
for _, schema := range filteredDB.Schemas {
|
||||||
|
filteredTables += len(schema.Tables)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Filtered to: %d schema(s), %d table(s)\n\n", len(filteredDB.Schemas), filteredTables)
|
||||||
|
|
||||||
|
// Write to target format
|
||||||
|
fmt.Fprintf(os.Stderr, "[3/3] Writing to target format...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", splitTargetType)
|
||||||
|
fmt.Fprintf(os.Stderr, " Output: %s\n", splitTargetPath)
|
||||||
|
if splitPackageName != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Package: %s\n", splitPackageName)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = writeDatabase(
|
||||||
|
filteredDB,
|
||||||
|
splitTargetType,
|
||||||
|
splitTargetPath,
|
||||||
|
splitPackageName,
|
||||||
|
"", // no schema filter for split
|
||||||
|
false, // no flatten-schema for split
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write output: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully written to '%s'\n\n", splitTargetPath)
|
||||||
|
fmt.Fprintf(os.Stderr, "=== Split Completed Successfully ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// filterDatabase filters the database based on provided criteria
|
||||||
|
func filterDatabase(db *models.Database) (*models.Database, error) {
|
||||||
|
filteredDB := &models.Database{
|
||||||
|
Name: db.Name,
|
||||||
|
Description: db.Description,
|
||||||
|
Comment: db.Comment,
|
||||||
|
DatabaseType: db.DatabaseType,
|
||||||
|
DatabaseVersion: db.DatabaseVersion,
|
||||||
|
SourceFormat: db.SourceFormat,
|
||||||
|
UpdatedAt: db.UpdatedAt,
|
||||||
|
GUID: db.GUID,
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
Domains: db.Domains, // Keep domains for now
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse filter flags
|
||||||
|
includeSchemas := parseCommaSeparated(splitSchemas)
|
||||||
|
includeTables := parseCommaSeparated(splitTables)
|
||||||
|
excludeSchemas := parseCommaSeparated(splitExcludeSchema)
|
||||||
|
excludeTables := parseCommaSeparated(splitExcludeTables)
|
||||||
|
|
||||||
|
// Convert table names to lowercase for case-insensitive matching
|
||||||
|
includeTablesLower := make(map[string]bool)
|
||||||
|
for _, t := range includeTables {
|
||||||
|
includeTablesLower[strings.ToLower(t)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
excludeTablesLower := make(map[string]bool)
|
||||||
|
for _, t := range excludeTables {
|
||||||
|
excludeTablesLower[strings.ToLower(t)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iterate through schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Check if schema should be excluded
|
||||||
|
if contains(excludeSchemas, schema.Name) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if schema should be included
|
||||||
|
if len(includeSchemas) > 0 && !contains(includeSchemas, schema.Name) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a copy of the schema with filtered tables
|
||||||
|
filteredSchema := &models.Schema{
|
||||||
|
Name: schema.Name,
|
||||||
|
Description: schema.Description,
|
||||||
|
Owner: schema.Owner,
|
||||||
|
Permissions: schema.Permissions,
|
||||||
|
Comment: schema.Comment,
|
||||||
|
Metadata: schema.Metadata,
|
||||||
|
Scripts: schema.Scripts,
|
||||||
|
Sequence: schema.Sequence,
|
||||||
|
Relations: schema.Relations,
|
||||||
|
Enums: schema.Enums,
|
||||||
|
UpdatedAt: schema.UpdatedAt,
|
||||||
|
GUID: schema.GUID,
|
||||||
|
Tables: []*models.Table{},
|
||||||
|
Views: schema.Views,
|
||||||
|
Sequences: schema.Sequences,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter tables within the schema
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableLower := strings.ToLower(table.Name)
|
||||||
|
|
||||||
|
// Check if table should be excluded
|
||||||
|
if excludeTablesLower[tableLower] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If specific tables are requested, only include those
|
||||||
|
if len(includeTablesLower) > 0 {
|
||||||
|
if !includeTablesLower[tableLower] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
filteredSchema.Tables = append(filteredSchema.Tables, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only add schema if it has tables (unless no table filter was specified)
|
||||||
|
if len(filteredSchema.Tables) > 0 || (len(includeTablesLower) == 0 && len(excludeTablesLower) == 0) {
|
||||||
|
filteredDB.Schemas = append(filteredDB.Schemas, filteredSchema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(filteredDB.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas matched the filter criteria")
|
||||||
|
}
|
||||||
|
|
||||||
|
return filteredDB, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseCommaSeparated parses a comma-separated string into a slice, trimming whitespace
|
||||||
|
func parseCommaSeparated(s string) []string {
|
||||||
|
if s == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Split(s, ",")
|
||||||
|
result := make([]string, 0, len(parts))
|
||||||
|
for _, p := range parts {
|
||||||
|
trimmed := strings.TrimSpace(p)
|
||||||
|
if trimmed != "" {
|
||||||
|
result = append(result, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains checks if a string is in a slice
|
||||||
|
func contains(slice []string, item string) bool {
|
||||||
|
for _, s := range slice {
|
||||||
|
if s == item {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
167
cmd/relspec/templ.go
Normal file
167
cmd/relspec/templ.go
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
wtemplate "git.warky.dev/wdevs/relspecgo/pkg/writers/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
templSourceType string
|
||||||
|
templSourcePath string
|
||||||
|
templSourceConn string
|
||||||
|
templTemplatePath string
|
||||||
|
templOutputPath string
|
||||||
|
templSchemaFilter string
|
||||||
|
templMode string
|
||||||
|
templFilenamePattern string
|
||||||
|
)
|
||||||
|
|
||||||
|
var templCmd = &cobra.Command{
|
||||||
|
Use: "templ",
|
||||||
|
Short: "Apply custom templates to database schemas",
|
||||||
|
Long: `Apply custom Go text templates to database schemas with flexible execution modes.
|
||||||
|
|
||||||
|
The templ command allows you to transform database schemas using custom Go text
|
||||||
|
templates. It supports multiple execution modes for different use cases:
|
||||||
|
|
||||||
|
Execution Modes:
|
||||||
|
database Execute template once for entire database (single output file)
|
||||||
|
schema Execute template once per schema (one file per schema)
|
||||||
|
script Execute template once per script (one file per script)
|
||||||
|
table Execute template once per table (one file per table)
|
||||||
|
|
||||||
|
Supported Input Formats:
|
||||||
|
dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql
|
||||||
|
|
||||||
|
Template Functions:
|
||||||
|
String utilities: toUpper, toLower, toCamelCase, toPascalCase, toSnakeCase, toKebabCase,
|
||||||
|
pluralize, singularize, title, trim, split, join, replace
|
||||||
|
|
||||||
|
Type conversion: sqlToGo, sqlToTypeScript, sqlToJava, sqlToPython, sqlToRust,
|
||||||
|
sqlToCSharp, sqlToPhp
|
||||||
|
|
||||||
|
Filtering: filterTables, filterColumns, filterPrimaryKeys, filterForeignKeys,
|
||||||
|
filterNullable, filterNotNull, filterColumnsByType
|
||||||
|
|
||||||
|
Formatting: toJSON, toJSONPretty, toYAML, indent, escape, comment
|
||||||
|
|
||||||
|
Loop helpers: enumerate, batch, reverse, first, last, skip, take, concat,
|
||||||
|
unique, sortBy, groupBy
|
||||||
|
|
||||||
|
Safe access: get, getOr, getPath, has, keys, values, merge, pick, omit,
|
||||||
|
sliceContains, indexOf, pluck
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Generate documentation from PostgreSQL database
|
||||||
|
relspec templ --from pgsql --from-conn "postgres://user:pass@localhost/db" \
|
||||||
|
--template docs.tmpl --output schema-docs.md
|
||||||
|
|
||||||
|
# Generate one TypeScript model file per table
|
||||||
|
relspec templ --from dbml --from-path schema.dbml \
|
||||||
|
--template ts-model.tmpl --mode table \
|
||||||
|
--output ./models/ \
|
||||||
|
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||||
|
|
||||||
|
# Generate schema documentation files
|
||||||
|
relspec templ --from json --from-path db.json \
|
||||||
|
--template schema.tmpl --mode schema \
|
||||||
|
--output ./docs/ \
|
||||||
|
--filename-pattern "{{.Name}}_schema.md"`,
|
||||||
|
RunE: runTempl,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
templCmd.Flags().StringVar(&templSourceType, "from", "", "Source format (dbml, pgsql, json, etc.)")
|
||||||
|
templCmd.Flags().StringVar(&templSourcePath, "from-path", "", "Source file path (for file-based sources)")
|
||||||
|
templCmd.Flags().StringVar(&templSourceConn, "from-conn", "", "Source connection string (for database sources)")
|
||||||
|
templCmd.Flags().StringVar(&templTemplatePath, "template", "", "Template file path (required)")
|
||||||
|
templCmd.Flags().StringVar(&templOutputPath, "output", "", "Output path (file or directory, empty for stdout)")
|
||||||
|
templCmd.Flags().StringVar(&templSchemaFilter, "schema", "", "Filter to specific schema")
|
||||||
|
templCmd.Flags().StringVar(&templMode, "mode", "database", "Execution mode: database, schema, script, or table")
|
||||||
|
templCmd.Flags().StringVar(&templFilenamePattern, "filename-pattern", "{{.Name}}.txt", "Filename pattern for multi-output modes")
|
||||||
|
|
||||||
|
_ = templCmd.MarkFlagRequired("from")
|
||||||
|
_ = templCmd.MarkFlagRequired("template")
|
||||||
|
}
|
||||||
|
|
||||||
|
func runTempl(cmd *cobra.Command, args []string) error {
|
||||||
|
// Print header
|
||||||
|
fmt.Fprintf(os.Stderr, "=== RelSpec Template Execution ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Read database using the same function as convert
|
||||||
|
fmt.Fprintf(os.Stderr, "Reading from %s...\n", templSourceType)
|
||||||
|
db, err := readDatabaseForConvert(templSourceType, templSourcePath, templSourceConn)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read source: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print database stats
|
||||||
|
schemaCount := len(db.Schemas)
|
||||||
|
tableCount := 0
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
tableCount += len(schema.Tables)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "✓ Successfully read database: %s\n", db.Name)
|
||||||
|
fmt.Fprintf(os.Stderr, " Schemas: %d\n", schemaCount)
|
||||||
|
fmt.Fprintf(os.Stderr, " Tables: %d\n\n", tableCount)
|
||||||
|
|
||||||
|
// Apply schema filter if specified
|
||||||
|
if templSchemaFilter != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, "Filtering to schema: %s\n", templSchemaFilter)
|
||||||
|
found := false
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name == templSchemaFilter {
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
return fmt.Errorf("schema not found: %s", templSchemaFilter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create template writer
|
||||||
|
fmt.Fprintf(os.Stderr, "Loading template: %s\n", templTemplatePath)
|
||||||
|
fmt.Fprintf(os.Stderr, "Execution mode: %s\n", templMode)
|
||||||
|
|
||||||
|
metadata := map[string]interface{}{
|
||||||
|
"template_path": templTemplatePath,
|
||||||
|
"mode": templMode,
|
||||||
|
"filename_pattern": templFilenamePattern,
|
||||||
|
}
|
||||||
|
|
||||||
|
writerOpts := &writers.WriterOptions{
|
||||||
|
OutputPath: templOutputPath,
|
||||||
|
Metadata: metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
writer, err := wtemplate.NewWriter(writerOpts)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create template writer: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute template
|
||||||
|
fmt.Fprintf(os.Stderr, "\nExecuting template...\n")
|
||||||
|
if err := writer.WriteDatabase(db); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute template: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print success message
|
||||||
|
fmt.Fprintf(os.Stderr, "\n✓ Template executed successfully\n")
|
||||||
|
if templOutputPath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, "Output written to: %s\n", templOutputPath)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Output written to stdout\n")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
108
doc.go
Normal file
108
doc.go
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
// Package relspecgo provides bidirectional conversion between database schema formats.
|
||||||
|
//
|
||||||
|
// RelSpec is a comprehensive database schema tool that reads, writes, and transforms
|
||||||
|
// database schemas across multiple formats including live databases, ORM models,
|
||||||
|
// schema definition languages, and data interchange formats.
|
||||||
|
//
|
||||||
|
// # Features
|
||||||
|
//
|
||||||
|
// - Read from 15+ formats: PostgreSQL, SQLite, DBML, GORM, Prisma, Drizzle, and more
|
||||||
|
// - Write to 15+ formats: SQL, ORM models, schema definitions, JSON/YAML
|
||||||
|
// - Interactive TUI editor for visual schema management
|
||||||
|
// - Schema diff and merge capabilities
|
||||||
|
// - Format-agnostic intermediate representation
|
||||||
|
//
|
||||||
|
// # Architecture
|
||||||
|
//
|
||||||
|
// RelSpec uses a hub-and-spoke architecture with models.Database as the central type:
|
||||||
|
//
|
||||||
|
// Input Format → Reader → models.Database → Writer → Output Format
|
||||||
|
//
|
||||||
|
// This allows any supported input format to be converted to any supported output format
|
||||||
|
// without requiring N² conversion implementations.
|
||||||
|
//
|
||||||
|
// # Key Packages
|
||||||
|
//
|
||||||
|
// - pkg/models: Core data structures (Database, Schema, Table, Column, etc.)
|
||||||
|
// - pkg/readers: Input format readers (dbml, pgsql, gorm, etc.)
|
||||||
|
// - pkg/writers: Output format writers (dbml, pgsql, gorm, etc.)
|
||||||
|
// - pkg/ui: Interactive terminal UI for schema editing
|
||||||
|
// - pkg/diff: Schema comparison and difference detection
|
||||||
|
// - pkg/merge: Schema merging utilities
|
||||||
|
// - pkg/transform: Validation and normalization
|
||||||
|
//
|
||||||
|
// # Installation
|
||||||
|
//
|
||||||
|
// go install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// Command-line conversion:
|
||||||
|
//
|
||||||
|
// relspec convert --from dbml --from-path schema.dbml \
|
||||||
|
// --to gorm --to-path ./models
|
||||||
|
//
|
||||||
|
// Interactive editor:
|
||||||
|
//
|
||||||
|
// relspec edit --from pgsql --from-conn "postgres://..." \
|
||||||
|
// --to dbml --to-path schema.dbml
|
||||||
|
//
|
||||||
|
// Schema comparison:
|
||||||
|
//
|
||||||
|
// relspec diff --source-type pgsql --source-conn "postgres://..." \
|
||||||
|
// --target-type dbml --target-path schema.dbml
|
||||||
|
//
|
||||||
|
// Merge schemas:
|
||||||
|
//
|
||||||
|
// relspec merge --target schema1.dbml --sources schema2.dbml,schema3.dbml
|
||||||
|
//
|
||||||
|
// # Supported Formats
|
||||||
|
//
|
||||||
|
// Input/Output Formats:
|
||||||
|
// - dbml: Database Markup Language
|
||||||
|
// - dctx: DCTX schema files
|
||||||
|
// - drawdb: DrawDB JSON format
|
||||||
|
// - graphql: GraphQL schema definition
|
||||||
|
// - json: JSON schema representation
|
||||||
|
// - yaml: YAML schema representation
|
||||||
|
// - gorm: Go GORM models
|
||||||
|
// - bun: Go Bun models
|
||||||
|
// - drizzle: TypeScript Drizzle ORM
|
||||||
|
// - prisma: Prisma schema language
|
||||||
|
// - typeorm: TypeScript TypeORM entities
|
||||||
|
// - pgsql: PostgreSQL (live DB or SQL)
|
||||||
|
// - sqlite: SQLite (database file or SQL)
|
||||||
|
//
|
||||||
|
// # Library Usage
|
||||||
|
//
|
||||||
|
// RelSpec can be used as a Go library:
|
||||||
|
//
|
||||||
|
// import (
|
||||||
|
// "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
// "git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
// "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// // Read DBML
|
||||||
|
// reader := dbml.NewReader(&readers.ReaderOptions{
|
||||||
|
// FilePath: "schema.dbml",
|
||||||
|
// })
|
||||||
|
// db, err := reader.ReadDatabase()
|
||||||
|
//
|
||||||
|
// // Write GORM models
|
||||||
|
// writer := gorm.NewWriter(&writers.WriterOptions{
|
||||||
|
// OutputPath: "./models",
|
||||||
|
// PackageName: "models",
|
||||||
|
// })
|
||||||
|
// err = writer.WriteDatabase(db)
|
||||||
|
//
|
||||||
|
// # Documentation
|
||||||
|
//
|
||||||
|
// Full documentation available at: https://git.warky.dev/wdevs/relspecgo
|
||||||
|
//
|
||||||
|
// API documentation: go doc git.warky.dev/wdevs/relspecgo/...
|
||||||
|
//
|
||||||
|
// # License
|
||||||
|
//
|
||||||
|
// See LICENSE file in the repository root.
|
||||||
|
package relspecgo
|
||||||
@@ -9,7 +9,7 @@ services:
|
|||||||
POSTGRES_PASSWORD: relspec_test_password
|
POSTGRES_PASSWORD: relspec_test_password
|
||||||
POSTGRES_DB: relspec_test
|
POSTGRES_DB: relspec_test
|
||||||
ports:
|
ports:
|
||||||
- "5433:5432" # Using 5433 to avoid conflicts with local PostgreSQL
|
- "5439:5432" # Using 5439 to avoid conflicts with local PostgreSQL
|
||||||
volumes:
|
volumes:
|
||||||
- ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql
|
- ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|||||||
149
docs/DOMAINS_DRAWDB.md
Normal file
149
docs/DOMAINS_DRAWDB.md
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
# Domains and DrawDB Areas Integration
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Domains provide a way to organize tables from potentially multiple schemas into logical business groupings. When working with DrawDB format, domains are automatically imported/exported as **Subject Areas** - a native DrawDB feature for visually grouping tables.
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
### Writing Domains to DrawDB (Export)
|
||||||
|
|
||||||
|
When you export a database with domains to DrawDB format:
|
||||||
|
|
||||||
|
1. **Schema Areas** are created automatically for each schema (existing behavior)
|
||||||
|
2. **Domain Areas** are created for each domain, calculated based on the positions of the tables they contain
|
||||||
|
3. The domain area bounds are automatically calculated to encompass all its tables with a small padding
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Example: Creating a domain and exporting to DrawDB
|
||||||
|
db := models.InitDatabase("mydb")
|
||||||
|
|
||||||
|
// Create an "authentication" domain
|
||||||
|
authDomain := models.InitDomain("authentication")
|
||||||
|
authDomain.Tables = append(authDomain.Tables,
|
||||||
|
models.InitDomainTable("users", "public"),
|
||||||
|
models.InitDomainTable("roles", "public"),
|
||||||
|
models.InitDomainTable("permissions", "public"),
|
||||||
|
)
|
||||||
|
db.Domains = append(db.Domains, authDomain)
|
||||||
|
|
||||||
|
// Create a "financial" domain spanning multiple schemas
|
||||||
|
finDomain := models.InitDomain("financial")
|
||||||
|
finDomain.Tables = append(finDomain.Tables,
|
||||||
|
models.InitDomainTable("accounts", "public"),
|
||||||
|
models.InitDomainTable("transactions", "public"),
|
||||||
|
models.InitDomainTable("ledger", "finance"), // Different schema!
|
||||||
|
)
|
||||||
|
db.Domains = append(db.Domains, finDomain)
|
||||||
|
|
||||||
|
// Write to DrawDB - domains become subject areas
|
||||||
|
writer := drawdb.NewWriter(&writers.WriterOptions{
|
||||||
|
OutputPath: "schema.json",
|
||||||
|
})
|
||||||
|
writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
The resulting DrawDB JSON will have Subject Areas for both:
|
||||||
|
- "authentication" area containing the auth tables
|
||||||
|
- "financial" area containing the financial tables from both schemas
|
||||||
|
|
||||||
|
### Reading Domains from DrawDB (Import)
|
||||||
|
|
||||||
|
When you import a DrawDB file with Subject Areas:
|
||||||
|
|
||||||
|
1. **Subject Areas** are automatically converted to **Domains**
|
||||||
|
2. Tables are assigned to a domain if they fall within the area's visual bounds
|
||||||
|
3. Table references include both the table name and schema name
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Example: Reading DrawDB with areas
|
||||||
|
reader := drawdb.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "schema.json",
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Access domains
|
||||||
|
for _, domain := range db.Domains {
|
||||||
|
fmt.Printf("Domain: %s\n", domain.Name)
|
||||||
|
for _, domainTable := range domain.Tables {
|
||||||
|
fmt.Printf(" - %s.%s\n", domainTable.SchemaName, domainTable.TableName)
|
||||||
|
|
||||||
|
// Access the actual table reference if loaded
|
||||||
|
if domainTable.RefTable != nil {
|
||||||
|
fmt.Printf(" Description: %s\n", domainTable.RefTable.Description)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Domain Structure
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Domain struct {
|
||||||
|
Name string // Domain name (e.g., "authentication", "user_data")
|
||||||
|
Description string // Optional human-readable description
|
||||||
|
Tables []*DomainTable // Tables belonging to this domain
|
||||||
|
Comment string // Optional comment
|
||||||
|
Metadata map[string]any // Extensible metadata
|
||||||
|
Sequence uint // Ordering hint
|
||||||
|
}
|
||||||
|
|
||||||
|
type DomainTable struct {
|
||||||
|
TableName string // Table name
|
||||||
|
SchemaName string // Schema containing the table
|
||||||
|
Sequence uint // Ordering hint
|
||||||
|
RefTable *Table // Pointer to actual table (in-memory only, not serialized)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Multi-Schema Domains
|
||||||
|
|
||||||
|
One of the key features of domains is that they can span multiple schemas:
|
||||||
|
|
||||||
|
```
|
||||||
|
Domain: "user_data"
|
||||||
|
├── public.users
|
||||||
|
├── public.profiles
|
||||||
|
├── public.user_preferences
|
||||||
|
├── auth.user_sessions
|
||||||
|
└── auth.mfa_devices
|
||||||
|
```
|
||||||
|
|
||||||
|
This allows you to organize related tables even when they're stored in different schemas.
|
||||||
|
|
||||||
|
## Visual Organization in DrawDB
|
||||||
|
|
||||||
|
When viewing the exported DrawDB file in DrawDB Editor:
|
||||||
|
|
||||||
|
1. **Schema areas** appear in one color (original behavior)
|
||||||
|
2. **Domain areas** appear in a different color
|
||||||
|
3. Domain area bounds are calculated to fit all contained tables
|
||||||
|
4. Areas can overlap - a table can visually belong to multiple areas
|
||||||
|
|
||||||
|
## Integration with Other Formats
|
||||||
|
|
||||||
|
Currently, domain/area integration is implemented for DrawDB format.
|
||||||
|
|
||||||
|
To implement similar functionality for other formats:
|
||||||
|
|
||||||
|
1. Identify if the format has a native grouping/area feature
|
||||||
|
2. Add conversion logic in the reader to map format areas → Domain model
|
||||||
|
3. Add conversion logic in the writer to map Domain model → format areas
|
||||||
|
|
||||||
|
Example formats that could support domains:
|
||||||
|
- **DBML**: Could use DBML's `TableGroup` feature
|
||||||
|
- **DrawDB**: ✅ Already implemented (Subject Areas)
|
||||||
|
- **GraphQL**: Could use schema directives
|
||||||
|
- **Custom formats**: Implement as needed
|
||||||
|
|
||||||
|
## Tips and Best Practices
|
||||||
|
|
||||||
|
1. **Keep domains focused**: Each domain should represent a distinct business area
|
||||||
|
2. **Document purposes**: Use Description and Comment fields to explain each domain
|
||||||
|
3. **Use meaningful names**: Domain names should clearly reflect their purpose
|
||||||
|
4. **Maintain schema consistency**: Keep related tables together in the same schema when possible
|
||||||
|
5. **Use metadata**: Store tool-specific information in the Metadata field
|
||||||
360
docs/SCRIPTS_COMMAND.md
Normal file
360
docs/SCRIPTS_COMMAND.md
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
# RelSpec Scripts Command
|
||||||
|
|
||||||
|
The `relspec scripts` command provides tools for managing and executing SQL migration scripts from a directory structure.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The scripts command supports two main operations:
|
||||||
|
- **list**: List SQL scripts from a directory in execution order
|
||||||
|
- **execute**: Execute SQL scripts against a PostgreSQL database
|
||||||
|
|
||||||
|
Scripts are read from a directory (recursively) and executed in a deterministic order based on **Priority** (ascending) and **Sequence** (ascending).
|
||||||
|
|
||||||
|
## File Naming Convention
|
||||||
|
|
||||||
|
SQL scripts must follow this naming pattern (both separators are supported):
|
||||||
|
|
||||||
|
```
|
||||||
|
{priority}_{sequence}_{name}.{sql|pgsql} (underscore format)
|
||||||
|
{priority}-{sequence}-{name}.{sql|pgsql} (hyphen format)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- **priority**: Integer (0-9999) - Execution priority level (lower executes first)
|
||||||
|
- **sequence**: Integer (0-9999) - Order within priority level (lower executes first)
|
||||||
|
- **separator**: Underscore `_` or hyphen `-` (both formats can be mixed)
|
||||||
|
- **name**: Descriptive name (alphanumeric, underscores, hyphens)
|
||||||
|
- **extension**: `.sql` or `.pgsql`
|
||||||
|
|
||||||
|
### Valid Examples
|
||||||
|
|
||||||
|
**Underscore format:**
|
||||||
|
```
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1
|
||||||
|
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||||
|
1_003_create_comments.pgsql # Priority 1, Sequence 3
|
||||||
|
2_001_add_indexes.sql # Priority 2, Sequence 1
|
||||||
|
2_002_add_constraints.sql # Priority 2, Sequence 2
|
||||||
|
3_001_seed_users.sql # Priority 3, Sequence 1
|
||||||
|
```
|
||||||
|
|
||||||
|
**Hyphen format:**
|
||||||
|
```
|
||||||
|
1-001-create-users.sql # Priority 1, Sequence 1
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||||
|
1-003-create-comments.pgsql # Priority 1, Sequence 3
|
||||||
|
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||||
|
```
|
||||||
|
|
||||||
|
**Mixed format (both in same directory):**
|
||||||
|
```
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1 (underscore)
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2 (hyphen)
|
||||||
|
2_001_add_indexes.sql # Priority 2, Sequence 1 (underscore)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Execution Order**: 1→2→3→4→5→6 (sorted by Priority, then Sequence)
|
||||||
|
|
||||||
|
### Invalid Examples (Will be ignored)
|
||||||
|
|
||||||
|
```
|
||||||
|
migration.sql # Missing priority/sequence
|
||||||
|
create_users.sql # Missing priority/sequence
|
||||||
|
1_create_users.sql # Missing sequence
|
||||||
|
1_001_test.txt # Wrong extension
|
||||||
|
README.md # Not a SQL file
|
||||||
|
```
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
Scripts can be organized in subdirectories. The scanner recursively finds all matching SQL files:
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_schema.sql
|
||||||
|
├── 1_002_create_users.sql
|
||||||
|
├── tables/
|
||||||
|
│ ├── 1_003_create_posts.sql
|
||||||
|
│ └── 1_004_create_comments.pgsql
|
||||||
|
├── indexes/
|
||||||
|
│ └── 2_001_add_indexes.sql
|
||||||
|
└── data/
|
||||||
|
└── 3_001_seed_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
All files will be found and executed in Priority→Sequence order regardless of directory structure.
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
### relspec scripts list
|
||||||
|
|
||||||
|
List all SQL scripts in a directory and show their execution order.
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir <directory> [flags]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flags:**
|
||||||
|
- `--dir <path>` (required): Directory containing SQL scripts
|
||||||
|
- `--schema <name>`: Schema name (default: "public")
|
||||||
|
- `--database <name>`: Database name (default: "database")
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir ./migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
```
|
||||||
|
=== SQL Scripts List ===
|
||||||
|
Directory: ./migrations
|
||||||
|
|
||||||
|
Found 5 script(s) in execution order:
|
||||||
|
|
||||||
|
No. Priority Sequence Name Lines
|
||||||
|
---- -------- -------- ------------------------------ -----
|
||||||
|
1 1 1 create_users 7
|
||||||
|
2 1 2 create_posts 8
|
||||||
|
3 2 1 add_indexes 4
|
||||||
|
4 2 2 add_constraints 6
|
||||||
|
5 3 1 seed_data 4
|
||||||
|
```
|
||||||
|
|
||||||
|
### relspec scripts execute
|
||||||
|
|
||||||
|
Execute SQL scripts from a directory against a PostgreSQL database.
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
```bash
|
||||||
|
relspec scripts execute --dir <directory> --conn <connection-string> [flags]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flags:**
|
||||||
|
- `--dir <path>` (required): Directory containing SQL scripts
|
||||||
|
- `--conn <string>` (required): PostgreSQL connection string
|
||||||
|
- `--schema <name>`: Schema name (default: "public")
|
||||||
|
- `--database <name>`: Database name (default: "database")
|
||||||
|
|
||||||
|
**Connection String Formats:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Standard PostgreSQL URLs
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
|
||||||
|
# Key-value format
|
||||||
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
```
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Execute migration scripts
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||||
|
|
||||||
|
# Execute with custom schema
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--schema public
|
||||||
|
|
||||||
|
# Execute with SSL disabled
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./sql \
|
||||||
|
--conn "postgres://user:pass@localhost/db?sslmode=disable"
|
||||||
|
|
||||||
|
# Execute using key-value connection string
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "host=localhost port=5432 user=admin password=secret dbname=prod"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
```
|
||||||
|
=== SQL Scripts Execution ===
|
||||||
|
Started at: 2025-12-30 22:30:15
|
||||||
|
Directory: ./migrations
|
||||||
|
Database: postgres://user:***@localhost:5432/mydb
|
||||||
|
|
||||||
|
[1/2] Reading SQL scripts...
|
||||||
|
✓ Found 4 script(s)
|
||||||
|
|
||||||
|
[2/2] Executing scripts in order (Priority → Sequence)...
|
||||||
|
|
||||||
|
Executing script: create_users (Priority=1, Sequence=1)
|
||||||
|
✓ Successfully executed: create_users
|
||||||
|
Executing script: create_posts (Priority=1, Sequence=2)
|
||||||
|
✓ Successfully executed: create_posts
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
✓ Successfully executed: add_indexes
|
||||||
|
Executing script: seed_data (Priority=2, Sequence=2)
|
||||||
|
✓ Successfully executed: seed_data
|
||||||
|
|
||||||
|
=== Execution Complete ===
|
||||||
|
Completed at: 2025-12-30 22:30:16
|
||||||
|
Successfully executed 4 script(s)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Execution Behavior
|
||||||
|
|
||||||
|
### Execution Order
|
||||||
|
|
||||||
|
Scripts are **always** executed in this order:
|
||||||
|
1. Sort by **Priority** (ascending)
|
||||||
|
2. Within same priority, sort by **Sequence** (ascending)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
Priority 1, Sequence 1 → Executes 1st
|
||||||
|
Priority 1, Sequence 2 → Executes 2nd
|
||||||
|
Priority 1, Sequence 10 → Executes 3rd
|
||||||
|
Priority 2, Sequence 1 → Executes 4th
|
||||||
|
Priority 2, Sequence 5 → Executes 5th
|
||||||
|
Priority 10, Sequence 1 → Executes 6th
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
- **Stop on First Error**: Execution stops immediately when any script fails
|
||||||
|
- **No Automatic Rollback**: Scripts executed before the failure remain committed
|
||||||
|
- **Error Details**: Full error message with script name, priority, and sequence
|
||||||
|
|
||||||
|
Example error output:
|
||||||
|
```
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
Error: execution failed: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||||
|
ERROR: syntax error at or near "IDNEX" (SQLSTATE 42601)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Transaction Behavior
|
||||||
|
|
||||||
|
- Each script executes in its own implicit transaction (PostgreSQL default)
|
||||||
|
- No automatic transaction wrapping across multiple scripts
|
||||||
|
- For atomic migrations, manually wrap SQL in `BEGIN/COMMIT` blocks
|
||||||
|
|
||||||
|
### Empty Scripts
|
||||||
|
|
||||||
|
Scripts with empty SQL content are silently skipped.
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
### Development Migrations
|
||||||
|
|
||||||
|
Organize database changes by priority levels:
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_xxx_schema.sql # Priority 1: Core schema
|
||||||
|
├── 1_xxx_tables.sql
|
||||||
|
├── 2_xxx_indexes.sql # Priority 2: Performance
|
||||||
|
├── 2_xxx_constraints.sql
|
||||||
|
└── 3_xxx_seed.sql # Priority 3: Data
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multi-Environment Deployments
|
||||||
|
|
||||||
|
Use priority levels for environment-specific scripts:
|
||||||
|
|
||||||
|
```
|
||||||
|
deploy/
|
||||||
|
├── 1_xxx_core_schema.sql # Priority 1: All environments
|
||||||
|
├── 2_xxx_dev_data.sql # Priority 2: Dev only
|
||||||
|
├── 2_xxx_staging_data.sql # Priority 2: Staging only
|
||||||
|
└── 3_xxx_prod_data.sql # Priority 3: Production only
|
||||||
|
```
|
||||||
|
|
||||||
|
### Incremental Rollouts
|
||||||
|
|
||||||
|
Use sequence for ordered feature rollouts:
|
||||||
|
|
||||||
|
```
|
||||||
|
features/
|
||||||
|
├── 1_001_feature_a_schema.sql
|
||||||
|
├── 1_002_feature_a_data.sql
|
||||||
|
├── 1_003_feature_b_schema.sql
|
||||||
|
├── 1_004_feature_b_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with RelSpec
|
||||||
|
|
||||||
|
The scripts command uses:
|
||||||
|
- **Reader**: `pkg/readers/sqldir/` - Reads SQL files into `models.Schema.Scripts`
|
||||||
|
- **Writer**: `pkg/writers/sqlexec/` - Executes scripts from `models.Schema.Scripts`
|
||||||
|
|
||||||
|
You can use these packages programmatically:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
db, _ := reader.ReadDatabase()
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/mydb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### Naming
|
||||||
|
|
||||||
|
- Use zero-padded sequences: `001`, `002`, `010` (not `1`, `2`, `10`)
|
||||||
|
- Use descriptive names: `create_users_table`, not `table1`
|
||||||
|
- Group related changes: same priority for related DDL
|
||||||
|
|
||||||
|
### Organization
|
||||||
|
|
||||||
|
- Keep scripts small and focused (one logical change per file)
|
||||||
|
- Use priority levels to organize phases (schema → indexes → data)
|
||||||
|
- Document complex migrations with SQL comments
|
||||||
|
|
||||||
|
### Safety
|
||||||
|
|
||||||
|
- Always test migrations in development first
|
||||||
|
- Use `scripts list` to verify execution order before running
|
||||||
|
- Back up production databases before executing
|
||||||
|
- Consider using transactions for critical changes
|
||||||
|
- Review generated SQL before execution
|
||||||
|
|
||||||
|
### Version Control
|
||||||
|
|
||||||
|
- Commit scripts to version control
|
||||||
|
- Never modify executed scripts (create new ones instead)
|
||||||
|
- Use meaningful commit messages
|
||||||
|
- Tag releases with migration checkpoints
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- PostgreSQL only (currently)
|
||||||
|
- No built-in rollback support
|
||||||
|
- No migration state tracking (no "already executed" detection)
|
||||||
|
- No dry-run mode
|
||||||
|
- Stops on first error (no partial execution tracking)
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
Potential future features:
|
||||||
|
- Migration state tracking (executed scripts table)
|
||||||
|
- Rollback script support (using `models.Script.Rollback` field)
|
||||||
|
- Dry-run mode (validate without executing)
|
||||||
|
- Transaction wrapping (all-or-nothing execution)
|
||||||
|
- Multi-database support (MySQL, SQLite, etc.)
|
||||||
|
- Parallel execution for independent scripts
|
||||||
393
docs/SCRIPTS_EXAMPLES.md
Normal file
393
docs/SCRIPTS_EXAMPLES.md
Normal file
@@ -0,0 +1,393 @@
|
|||||||
|
# RelSpec Scripts Command - Quick Examples
|
||||||
|
|
||||||
|
## Basic Workflow
|
||||||
|
|
||||||
|
### 1. Create migration directory structure
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Create migration scripts
|
||||||
|
|
||||||
|
Both underscore and hyphen formats are supported. Examples below use underscore format,
|
||||||
|
but you can also use: `1-001-create-users-table.sql`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Priority 1: Core schema
|
||||||
|
cat > migrations/1_001_create_users_table.sql << 'EOF'
|
||||||
|
CREATE TABLE users (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
username VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
email VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
password_hash VARCHAR(255) NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_users_username ON users(username);
|
||||||
|
CREATE INDEX idx_users_email ON users(email);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > migrations/1_002_create_posts_table.sql << 'EOF'
|
||||||
|
CREATE TABLE posts (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
title VARCHAR(200) NOT NULL,
|
||||||
|
content TEXT,
|
||||||
|
published BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Priority 2: Additional indexes
|
||||||
|
cat > migrations/2_001_add_post_indexes.sql << 'EOF'
|
||||||
|
CREATE INDEX idx_posts_user_id ON posts(user_id);
|
||||||
|
CREATE INDEX idx_posts_published ON posts(published);
|
||||||
|
CREATE INDEX idx_posts_created_at ON posts(created_at);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Priority 3: Seed data
|
||||||
|
cat > migrations/3_001_seed_admin_user.sql << 'EOF'
|
||||||
|
INSERT INTO users (username, email, password_hash)
|
||||||
|
VALUES ('admin', 'admin@example.com', 'hashed_password_here')
|
||||||
|
ON CONFLICT (username) DO NOTHING;
|
||||||
|
EOF
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. List scripts to verify order
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
```
|
||||||
|
=== SQL Scripts List ===
|
||||||
|
Directory: migrations
|
||||||
|
|
||||||
|
Found 4 script(s) in execution order:
|
||||||
|
|
||||||
|
No. Priority Sequence Name Lines
|
||||||
|
---- -------- -------- ------------------------------ -----
|
||||||
|
1 1 1 create_users_table 13
|
||||||
|
2 1 2 create_posts_table 11
|
||||||
|
3 2 1 add_post_indexes 4
|
||||||
|
4 3 1 seed_admin_user 4
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Execute against database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://myuser:mypass@localhost:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Real-World Examples
|
||||||
|
|
||||||
|
### Example 1: E-commerce Database Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Directory structure
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_products.sql
|
||||||
|
├── 1_003_create_orders.sql
|
||||||
|
├── 1_004_create_order_items.sql
|
||||||
|
├── 2_001_add_indexes.sql
|
||||||
|
├── 2_002_add_constraints.sql
|
||||||
|
├── 3_001_seed_categories.sql
|
||||||
|
└── 3_002_seed_sample_products.sql
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://ecommerce_user:pass@db.example.com:5432/ecommerce_prod?sslmode=require"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: Multi-Schema Database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Organize by schema using subdirectories
|
||||||
|
migrations/
|
||||||
|
├── public/
|
||||||
|
│ ├── 1_001_create_users.sql
|
||||||
|
│ └── 1_002_create_sessions.sql
|
||||||
|
├── analytics/
|
||||||
|
│ ├── 1_001_create_events.sql
|
||||||
|
│ └── 2_001_create_views.sql
|
||||||
|
└── reporting/
|
||||||
|
└── 1_001_create_reports.sql
|
||||||
|
|
||||||
|
# Execute (all schemas processed together)
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/multi_schema_db" \
|
||||||
|
--schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 3: Development Environment Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create local development database
|
||||||
|
createdb myapp_dev
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./db/migrations \
|
||||||
|
--conn "postgres://localhost/myapp_dev?sslmode=disable"
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
psql myapp_dev -c "\dt"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 4: CI/CD Pipeline
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# .github/workflows/deploy.yml
|
||||||
|
- name: Run database migrations
|
||||||
|
run: |
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "${{ secrets.DATABASE_URL }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 5: Docker Compose Integration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# docker-compose.yml
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: myapp
|
||||||
|
POSTGRES_USER: myuser
|
||||||
|
POSTGRES_PASSWORD: mypass
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
|
||||||
|
migrate:
|
||||||
|
image: relspec:latest
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
volumes:
|
||||||
|
- ./migrations:/migrations
|
||||||
|
command: >
|
||||||
|
scripts execute
|
||||||
|
--dir /migrations
|
||||||
|
--conn "postgres://myuser:mypass@postgres:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run migrations with docker-compose
|
||||||
|
docker-compose up -d postgres
|
||||||
|
sleep 5 # Wait for postgres to be ready
|
||||||
|
docker-compose run --rm migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 6: Incremental Feature Rollout
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Feature branch structure
|
||||||
|
migrations/
|
||||||
|
├── 1_100_user_profiles_schema.sql # Feature: User profiles
|
||||||
|
├── 1_101_user_profiles_constraints.sql
|
||||||
|
├── 1_102_user_profiles_indexes.sql
|
||||||
|
├── 2_100_notifications_schema.sql # Feature: Notifications
|
||||||
|
├── 2_101_notifications_constraints.sql
|
||||||
|
└── 2_102_notifications_indexes.sql
|
||||||
|
|
||||||
|
# Deploy just user profiles (Priority 1)
|
||||||
|
# Then later deploy notifications (Priority 2)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 7: Rollback Strategy (Manual)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Forward migration
|
||||||
|
cat > migrations/1_001_add_column.sql << 'EOF'
|
||||||
|
ALTER TABLE users ADD COLUMN phone VARCHAR(20);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create manual rollback script (not auto-executed)
|
||||||
|
cat > rollbacks/1_001_remove_column.sql << 'EOF'
|
||||||
|
ALTER TABLE users DROP COLUMN phone;
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# If needed, manually execute rollback
|
||||||
|
psql myapp -f rollbacks/1_001_remove_column.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 8: Complex Schema Changes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# migrations/1_001_alter_users_table.sql
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- Add new column
|
||||||
|
ALTER TABLE users ADD COLUMN full_name VARCHAR(200);
|
||||||
|
|
||||||
|
-- Populate from existing data
|
||||||
|
UPDATE users SET full_name = username WHERE full_name IS NULL;
|
||||||
|
|
||||||
|
-- Make it required
|
||||||
|
ALTER TABLE users ALTER COLUMN full_name SET NOT NULL;
|
||||||
|
|
||||||
|
-- Add index
|
||||||
|
CREATE INDEX idx_users_full_name ON users(full_name);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
Execute:
|
||||||
|
```bash
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## File Naming Format Examples
|
||||||
|
|
||||||
|
### Underscore Format (Traditional)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_posts.sql
|
||||||
|
├── 2_001_add_indexes.sql
|
||||||
|
└── 3_001_seed_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Hyphen Format (Alternative)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1-001-create-users.sql
|
||||||
|
├── 1-002-create-posts.sql
|
||||||
|
├── 10-10-create-newid.pgsql
|
||||||
|
└── 2-001-add-indexes.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mixed Format (Both in Same Directory)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql # Underscore format
|
||||||
|
├── 1-002-create-posts.sql # Hyphen format
|
||||||
|
├── 2_001_add_indexes.sql # Underscore format
|
||||||
|
└── 10-10-special-migration.pgsql # Hyphen format
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** All three approaches work identically - use whichever naming style you prefer!
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Pattern 1: Schema → Indexes → Constraints → Data
|
||||||
|
|
||||||
|
```
|
||||||
|
1_xxx_*.sql # Tables and basic structure
|
||||||
|
2_xxx_*.sql # Indexes for performance
|
||||||
|
3_xxx_*.sql # Foreign keys and constraints
|
||||||
|
4_xxx_*.sql # Seed/reference data
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 2: Feature-Based Organization
|
||||||
|
|
||||||
|
```
|
||||||
|
1_001_feature_auth_users.sql
|
||||||
|
1_002_feature_auth_sessions.sql
|
||||||
|
1_003_feature_auth_permissions.sql
|
||||||
|
2_001_feature_blog_posts.sql
|
||||||
|
2_002_feature_blog_comments.sql
|
||||||
|
3_001_feature_payments_transactions.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 3: Date-Based Versioning
|
||||||
|
|
||||||
|
```
|
||||||
|
1_20250130_create_users.sql
|
||||||
|
2_20250131_add_user_indexes.sql
|
||||||
|
3_20250201_create_posts.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 4: Environment-Specific Scripts
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Base migrations (all environments)
|
||||||
|
migrations/base/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_products.sql
|
||||||
|
|
||||||
|
# Development-specific
|
||||||
|
migrations/dev/
|
||||||
|
└── 9_001_seed_test_data.sql
|
||||||
|
|
||||||
|
# Production-specific
|
||||||
|
migrations/prod/
|
||||||
|
└── 9_001_seed_production_config.sql
|
||||||
|
|
||||||
|
# Execute different paths based on environment
|
||||||
|
ENV=dev
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations/base \
|
||||||
|
--conn "postgres://localhost/myapp_${ENV}"
|
||||||
|
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations/${ENV} \
|
||||||
|
--conn "postgres://localhost/myapp_${ENV}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Check script order before execution
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test against local database first
|
||||||
|
```bash
|
||||||
|
# Create test database
|
||||||
|
createdb myapp_test
|
||||||
|
|
||||||
|
# Test migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/myapp_test"
|
||||||
|
|
||||||
|
# Inspect results
|
||||||
|
psql myapp_test
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
dropdb myapp_test
|
||||||
|
```
|
||||||
|
|
||||||
|
### Validate SQL syntax
|
||||||
|
```bash
|
||||||
|
# Use PostgreSQL to check syntax without executing
|
||||||
|
for f in migrations/*.sql; do
|
||||||
|
echo "Checking $f..."
|
||||||
|
psql myapp -c "BEGIN; \i $f; ROLLBACK;" --single-transaction
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
### Debug connection issues
|
||||||
|
```bash
|
||||||
|
# Test connection string
|
||||||
|
psql "postgres://user:pass@localhost:5432/myapp"
|
||||||
|
|
||||||
|
# If that works, use the same string for relspec
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tips
|
||||||
|
|
||||||
|
1. **Always review execution order** with `list` before running `execute`
|
||||||
|
2. **Test in development** before running against production
|
||||||
|
3. **Use zero-padded sequences** (001, 002, not 1, 2) for consistent sorting
|
||||||
|
4. **Keep scripts idempotent** when possible (use IF NOT EXISTS, ON CONFLICT, etc.)
|
||||||
|
5. **Back up production** before running migrations
|
||||||
|
6. **Use transactions** for complex multi-statement migrations
|
||||||
|
7. **Document breaking changes** with SQL comments in the migration files
|
||||||
|
8. **Version control everything** - commit migrations with code changes
|
||||||
572
docs/TEMPLATE_MODE.md
Normal file
572
docs/TEMPLATE_MODE.md
Normal file
@@ -0,0 +1,572 @@
|
|||||||
|
# RelSpec Template Mode
|
||||||
|
|
||||||
|
The `templ` command allows you to transform database schemas using custom Go text templates. It provides powerful template functions and flexible execution modes for generating any type of output from your database schema.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Quick Start](#quick-start)
|
||||||
|
- [Execution Modes](#execution-modes)
|
||||||
|
- [Template Functions](#template-functions)
|
||||||
|
- [String Utilities](#string-utilities)
|
||||||
|
- [Type Conversion](#type-conversion)
|
||||||
|
- [Filtering](#filtering)
|
||||||
|
- [Formatting](#formatting)
|
||||||
|
- [Loop Helpers](#loop-helpers)
|
||||||
|
- [Sorting Helpers](#sorting-helpers)
|
||||||
|
- [Safe Access](#safe-access)
|
||||||
|
- [Utility Functions](#utility-functions)
|
||||||
|
- [Data Model](#data-model)
|
||||||
|
- [Examples](#examples)
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate documentation from a database
|
||||||
|
relspec templ --from pgsql --from-conn "postgres://user:pass@localhost/db" \
|
||||||
|
--template docs.tmpl --output schema-docs.md
|
||||||
|
|
||||||
|
# Generate TypeScript models (one file per table)
|
||||||
|
relspec templ --from dbml --from-path schema.dbml \
|
||||||
|
--template model.tmpl --mode table \
|
||||||
|
--output ./models/ \
|
||||||
|
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||||
|
|
||||||
|
# Output to stdout
|
||||||
|
relspec templ --from json --from-path schema.json \
|
||||||
|
--template report.tmpl
|
||||||
|
```
|
||||||
|
|
||||||
|
## Execution Modes
|
||||||
|
|
||||||
|
The `--mode` flag controls how the template is executed:
|
||||||
|
|
||||||
|
| Mode | Description | Output | When to Use |
|
||||||
|
|------|-------------|--------|-------------|
|
||||||
|
| `database` | Execute once for entire database | Single file | Documentation, reports, overview files |
|
||||||
|
| `schema` | Execute once per schema | One file per schema | Schema-specific documentation |
|
||||||
|
| `domain` | Execute once per domain | One file per domain | Domain-based documentation, domain exports |
|
||||||
|
| `script` | Execute once per script | One file per script | Script processing |
|
||||||
|
| `table` | Execute once per table | One file per table | Model generation, table docs |
|
||||||
|
|
||||||
|
### Filename Patterns
|
||||||
|
|
||||||
|
For multi-file modes (`schema`, `domain`, `script`, `table`), use `--filename-pattern` to control output filenames:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Default pattern
|
||||||
|
--filename-pattern "{{.Name}}.txt"
|
||||||
|
|
||||||
|
# With transformations
|
||||||
|
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||||
|
|
||||||
|
# Nested directories
|
||||||
|
--filename-pattern "{{.Schema}}/{{.Name}}.md"
|
||||||
|
|
||||||
|
# Complex patterns
|
||||||
|
--filename-pattern "{{.ParentSchema.Name}}/models/{{.Name | toPascalCase}}Model.java"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Template Functions
|
||||||
|
|
||||||
|
### String Utilities
|
||||||
|
|
||||||
|
Transform and manipulate strings in your templates.
|
||||||
|
|
||||||
|
| Function | Description | Example | Output |
|
||||||
|
|----------|-------------|---------|--------|
|
||||||
|
| `toUpper` | Convert to uppercase | `{{ "hello" \| toUpper }}` | `HELLO` |
|
||||||
|
| `toLower` | Convert to lowercase | `{{ "HELLO" \| toLower }}` | `hello` |
|
||||||
|
| `toCamelCase` | Convert to camelCase | `{{ "user_name" \| toCamelCase }}` | `userName` |
|
||||||
|
| `toPascalCase` | Convert to PascalCase | `{{ "user_name" \| toPascalCase }}` | `UserName` |
|
||||||
|
| `toSnakeCase` | Convert to snake_case | `{{ "UserName" \| toSnakeCase }}` | `user_name` |
|
||||||
|
| `toKebabCase` | Convert to kebab-case | `{{ "UserName" \| toKebabCase }}` | `user-name` |
|
||||||
|
| `pluralize` | Convert to plural | `{{ "user" \| pluralize }}` | `users` |
|
||||||
|
| `singularize` | Convert to singular | `{{ "users" \| singularize }}` | `user` |
|
||||||
|
| `title` | Capitalize first letter | `{{ "hello world" \| title }}` | `Hello World` |
|
||||||
|
| `trim` | Trim whitespace | `{{ " hello " \| trim }}` | `hello` |
|
||||||
|
| `trimPrefix` | Remove prefix | `{{ trimPrefix "tbl_users" "tbl_" }}` | `users` |
|
||||||
|
| `trimSuffix` | Remove suffix | `{{ trimSuffix "users_old" "_old" }}` | `users` |
|
||||||
|
| `replace` | Replace occurrences | `{{ replace "hello" "l" "L" -1 }}` | `heLLo` |
|
||||||
|
| `stringContains` | Check if contains substring | `{{ stringContains "hello" "ell" }}` | `true` |
|
||||||
|
| `hasPrefix` | Check if starts with | `{{ hasPrefix "hello" "hel" }}` | `true` |
|
||||||
|
| `hasSuffix` | Check if ends with | `{{ hasSuffix "hello" "llo" }}` | `true` |
|
||||||
|
| `split` | Split by separator | `{{ split "a,b,c" "," }}` | `[a b c]` |
|
||||||
|
| `join` | Join with separator | `{{ join (list "a" "b") "," }}` | `a,b` |
|
||||||
|
|
||||||
|
### Type Conversion
|
||||||
|
|
||||||
|
Convert SQL types to various programming language types.
|
||||||
|
|
||||||
|
| Function | Parameters | Description | Example |
|
||||||
|
|----------|------------|-------------|---------|
|
||||||
|
| `sqlToGo` | `sqlType`, `nullable` | SQL to Go | `{{ sqlToGo "varchar" true }}` → `string` |
|
||||||
|
| `sqlToTypeScript` | `sqlType`, `nullable` | SQL to TypeScript | `{{ sqlToTypeScript "integer" false }}` → `number \| null` |
|
||||||
|
| `sqlToJava` | `sqlType`, `nullable` | SQL to Java | `{{ sqlToJava "varchar" true }}` → `String` |
|
||||||
|
| `sqlToPython` | `sqlType` | SQL to Python | `{{ sqlToPython "integer" }}` → `int` |
|
||||||
|
| `sqlToRust` | `sqlType`, `nullable` | SQL to Rust | `{{ sqlToRust "varchar" false }}` → `Option<String>` |
|
||||||
|
| `sqlToCSharp` | `sqlType`, `nullable` | SQL to C# | `{{ sqlToCSharp "integer" false }}` → `int?` |
|
||||||
|
| `sqlToPhp` | `sqlType`, `nullable` | SQL to PHP | `{{ sqlToPhp "varchar" false }}` → `?string` |
|
||||||
|
|
||||||
|
**Supported SQL Types:**
|
||||||
|
- Integer: `integer`, `int`, `smallint`, `bigint`, `serial`, `bigserial`
|
||||||
|
- String: `text`, `varchar`, `char`, `character`, `citext`
|
||||||
|
- Boolean: `boolean`, `bool`
|
||||||
|
- Float: `real`, `float`, `double precision`, `numeric`, `decimal`
|
||||||
|
- Date/Time: `timestamp`, `date`, `time`, `timestamptz`
|
||||||
|
- Binary: `bytea`
|
||||||
|
- Special: `uuid`, `json`, `jsonb`, `array`
|
||||||
|
|
||||||
|
### Filtering
|
||||||
|
|
||||||
|
Filter and select specific database objects.
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `filterTables` | Filter tables by pattern | `{{ filterTables .Schema.Tables "user_*" }}` |
|
||||||
|
| `filterTablesByPattern` | Alias for filterTables | `{{ filterTablesByPattern .Schema.Tables "temp_*" }}` |
|
||||||
|
| `filterColumns` | Filter columns by pattern | `{{ filterColumns .Table.Columns "*_id" }}` |
|
||||||
|
| `filterColumnsByType` | Filter by SQL type | `{{ filterColumnsByType .Table.Columns "varchar" }}` |
|
||||||
|
| `filterPrimaryKeys` | Get primary key columns | `{{ filterPrimaryKeys .Table.Columns }}` |
|
||||||
|
| `filterForeignKeys` | Get foreign key constraints | `{{ filterForeignKeys .Table.Constraints }}` |
|
||||||
|
| `filterUniqueConstraints` | Get unique constraints | `{{ filterUniqueConstraints .Table.Constraints }}` |
|
||||||
|
| `filterCheckConstraints` | Get check constraints | `{{ filterCheckConstraints .Table.Constraints }}` |
|
||||||
|
| `filterNullable` | Get nullable columns | `{{ filterNullable .Table.Columns }}` |
|
||||||
|
| `filterNotNull` | Get non-nullable columns | `{{ filterNotNull .Table.Columns }}` |
|
||||||
|
|
||||||
|
**Pattern Matching:**
|
||||||
|
- `*` - Match any characters
|
||||||
|
- `?` - Match single character
|
||||||
|
- Example: `user_*` matches `user_profile`, `user_settings`
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
Format output and add structure to generated code.
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `toJSON` | Convert to JSON | `{{ .Database \| toJSON }}` |
|
||||||
|
| `toJSONPretty` | Pretty-print JSON | `{{ toJSONPretty .Table " " }}` |
|
||||||
|
| `toYAML` | Convert to YAML | `{{ .Schema \| toYAML }}` |
|
||||||
|
| `indent` | Indent by spaces | `{{ indent .Column.Description 4 }}` |
|
||||||
|
| `indentWith` | Indent with prefix | `{{ indentWith .Comment " " }}` |
|
||||||
|
| `escape` | Escape special chars | `{{ escape .Column.Default }}` |
|
||||||
|
| `escapeQuotes` | Escape quotes only | `{{ escapeQuotes .String }}` |
|
||||||
|
| `comment` | Add comment prefix | `{{ comment .Description "//" }}` |
|
||||||
|
| `quoteString` | Add quotes | `{{ quoteString "value" }}` → `"value"` |
|
||||||
|
| `unquoteString` | Remove quotes | `{{ unquoteString "\"value\"" }}` → `value` |
|
||||||
|
|
||||||
|
**Comment Styles:**
|
||||||
|
- `//` - C/Go/JavaScript style
|
||||||
|
- `#` - Python/Shell style
|
||||||
|
- `--` - SQL style
|
||||||
|
- `/* */` - Block comment style
|
||||||
|
|
||||||
|
### Loop Helpers
|
||||||
|
|
||||||
|
Iterate and manipulate collections.
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `enumerate` | Add index to items | `{{ range enumerate .Tables }}{{ .Index }}: {{ .Value.Name }}{{ end }}` |
|
||||||
|
| `batch` | Split into chunks | `{{ range batch .Columns 3 }}...{{ end }}` |
|
||||||
|
| `chunk` | Alias for batch | `{{ range chunk .Columns 5 }}...{{ end }}` |
|
||||||
|
| `reverse` | Reverse order | `{{ range reverse .Tables }}...{{ end }}` |
|
||||||
|
| `first` | Get first N items | `{{ range first .Tables 5 }}...{{ end }}` |
|
||||||
|
| `last` | Get last N items | `{{ range last .Tables 3 }}...{{ end }}` |
|
||||||
|
| `skip` | Skip first N items | `{{ range skip .Tables 2 }}...{{ end }}` |
|
||||||
|
| `take` | Take first N (alias) | `{{ range take .Tables 10 }}...{{ end }}` |
|
||||||
|
| `concat` | Concatenate slices | `{{ $all := concat .Schema1.Tables .Schema2.Tables }}` |
|
||||||
|
| `unique` | Remove duplicates | `{{ $unique := unique .Items }}` |
|
||||||
|
| `sortBy` | Sort by field | `{{ $sorted := sortBy .Tables "Name" }}` |
|
||||||
|
| `groupBy` | Group by field | `{{ $grouped := groupBy .Tables "Schema" }}` |
|
||||||
|
|
||||||
|
### Sorting Helpers
|
||||||
|
|
||||||
|
Sort database objects by name or sequence number. All sort functions modify the slice in-place.
|
||||||
|
|
||||||
|
**Schema Sorting:**
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `sortSchemasByName` | Sort schemas by name | `{{ sortSchemasByName .Database.Schemas false }}` |
|
||||||
|
| `sortSchemasBySequence` | Sort schemas by sequence | `{{ sortSchemasBySequence .Database.Schemas false }}` |
|
||||||
|
|
||||||
|
**Table Sorting:**
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `sortTablesByName` | Sort tables by name | `{{ sortTablesByName .Schema.Tables false }}` |
|
||||||
|
| `sortTablesBySequence` | Sort tables by sequence | `{{ sortTablesBySequence .Schema.Tables true }}` |
|
||||||
|
|
||||||
|
**Column Sorting:**
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `sortColumnsMapByName` | Convert column map to sorted slice by name | `{{ $cols := sortColumnsMapByName .Table.Columns false }}` |
|
||||||
|
| `sortColumnsMapBySequence` | Convert column map to sorted slice by sequence | `{{ $cols := sortColumnsMapBySequence .Table.Columns false }}` |
|
||||||
|
| `sortColumnsByName` | Sort column slice by name | `{{ sortColumnsByName $columns false }}` |
|
||||||
|
| `sortColumnsBySequence` | Sort column slice by sequence | `{{ sortColumnsBySequence $columns true }}` |
|
||||||
|
|
||||||
|
**Other Object Sorting:**
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `sortViewsByName` | Sort views by name | `{{ sortViewsByName .Schema.Views false }}` |
|
||||||
|
| `sortViewsBySequence` | Sort views by sequence | `{{ sortViewsBySequence .Schema.Views false }}` |
|
||||||
|
| `sortSequencesByName` | Sort sequences by name | `{{ sortSequencesByName .Schema.Sequences false }}` |
|
||||||
|
| `sortSequencesBySequence` | Sort sequences by sequence | `{{ sortSequencesBySequence .Schema.Sequences false }}` |
|
||||||
|
| `sortIndexesMapByName` | Convert index map to sorted slice by name | `{{ $idx := sortIndexesMapByName .Table.Indexes false }}` |
|
||||||
|
| `sortIndexesMapBySequence` | Convert index map to sorted slice by sequence | `{{ $idx := sortIndexesMapBySequence .Table.Indexes false }}` |
|
||||||
|
| `sortIndexesByName` | Sort index slice by name | `{{ sortIndexesByName $indexes false }}` |
|
||||||
|
| `sortIndexesBySequence` | Sort index slice by sequence | `{{ sortIndexesBySequence $indexes false }}` |
|
||||||
|
| `sortConstraintsMapByName` | Convert constraint map to sorted slice by name | `{{ $cons := sortConstraintsMapByName .Table.Constraints false }}` |
|
||||||
|
| `sortConstraintsByName` | Sort constraint slice by name | `{{ sortConstraintsByName $constraints false }}` |
|
||||||
|
| `sortRelationshipsMapByName` | Convert relationship map to sorted slice by name | `{{ $rels := sortRelationshipsMapByName .Table.Relationships false }}` |
|
||||||
|
| `sortRelationshipsByName` | Sort relationship slice by name | `{{ sortRelationshipsByName $relationships false }}` |
|
||||||
|
| `sortScriptsByName` | Sort scripts by name | `{{ sortScriptsByName .Schema.Scripts false }}` |
|
||||||
|
| `sortEnumsByName` | Sort enums by name | `{{ sortEnumsByName .Schema.Enums false }}` |
|
||||||
|
|
||||||
|
**Sort Parameters:**
|
||||||
|
- Second parameter: `false` = ascending, `true` = descending
|
||||||
|
- Example: `{{ sortTablesByName .Schema.Tables true }}` sorts descending (Z-A)
|
||||||
|
|
||||||
|
### Safe Access
|
||||||
|
|
||||||
|
Safely access nested data without panicking.
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `get` | Get map value | `{{ get .Metadata "key" }}` |
|
||||||
|
| `getOr` | Get with default | `{{ getOr .Metadata "key" "default" }}` |
|
||||||
|
| `getPath` | Nested access | `{{ getPath .Config "database.host" }}` |
|
||||||
|
| `getPathOr` | Nested with default | `{{ getPathOr .Config "db.port" 5432 }}` |
|
||||||
|
| `safeIndex` | Safe array access | `{{ safeIndex .Tables 0 }}` |
|
||||||
|
| `safeIndexOr` | Safe with default | `{{ safeIndexOr .Tables 0 nil }}` |
|
||||||
|
| `has` | Check key exists | `{{ if has .Metadata "key" }}...{{ end }}` |
|
||||||
|
| `hasPath` | Check nested path | `{{ if hasPath .Config "db.host" }}...{{ end }}` |
|
||||||
|
| `keys` | Get map keys | `{{ range keys .Metadata }}...{{ end }}` |
|
||||||
|
| `values` | Get map values | `{{ range values .Table.Columns }}...{{ end }}` |
|
||||||
|
| `merge` | Merge maps | `{{ $merged := merge .Map1 .Map2 }}` |
|
||||||
|
| `pick` | Select keys | `{{ $subset := pick .Metadata "name" "desc" }}` |
|
||||||
|
| `omit` | Exclude keys | `{{ $filtered := omit .Metadata "internal" }}` |
|
||||||
|
| `sliceContains` | Check contains | `{{ if sliceContains .Names "admin" }}...{{ end }}` |
|
||||||
|
| `indexOf` | Find index | `{{ $idx := indexOf .Names "admin" }}` |
|
||||||
|
| `pluck` | Extract field | `{{ $names := pluck .Tables "Name" }}` |
|
||||||
|
|
||||||
|
### Utility Functions
|
||||||
|
|
||||||
|
General-purpose template helpers.
|
||||||
|
|
||||||
|
| Function | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `add` | Add numbers | `{{ add 5 3 }}` → `8` |
|
||||||
|
| `sub` | Subtract | `{{ sub 10 3 }}` → `7` |
|
||||||
|
| `mul` | Multiply | `{{ mul 4 5 }}` → `20` |
|
||||||
|
| `div` | Divide | `{{ div 10 2 }}` → `5` |
|
||||||
|
| `mod` | Modulo | `{{ mod 10 3 }}` → `1` |
|
||||||
|
| `default` | Default value | `{{ default "unknown" .Name }}` |
|
||||||
|
| `dict` | Create map | `{{ $m := dict "key1" "val1" "key2" "val2" }}` |
|
||||||
|
| `list` | Create list | `{{ $l := list "a" "b" "c" }}` |
|
||||||
|
| `seq` | Number sequence | `{{ range seq 1 5 }}{{ . }}{{ end }}` → `12345` |
|
||||||
|
|
||||||
|
## Data Model
|
||||||
|
|
||||||
|
The data available in templates depends on the execution mode:
|
||||||
|
|
||||||
|
### Database Mode
|
||||||
|
```go
|
||||||
|
.Database // *models.Database - Full database
|
||||||
|
.ParentDatabase // *models.Database - Same as .Database
|
||||||
|
.FlatColumns // []*models.FlatColumn - All columns flattened
|
||||||
|
.FlatTables // []*models.FlatTable - All tables flattened
|
||||||
|
.FlatConstraints // []*models.FlatConstraint - All constraints
|
||||||
|
.FlatRelationships // []*models.FlatRelationship - All relationships
|
||||||
|
.Summary // *models.DatabaseSummary - Statistics
|
||||||
|
.Metadata // map[string]interface{} - User metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Mode
|
||||||
|
```go
|
||||||
|
.Schema // *models.Schema - Current schema
|
||||||
|
.ParentDatabase // *models.Database - Parent database context
|
||||||
|
.FlatColumns // []*models.FlatColumn - Schema's columns flattened
|
||||||
|
.FlatTables // []*models.FlatTable - Schema's tables flattened
|
||||||
|
.FlatConstraints // []*models.FlatConstraint - Schema's constraints
|
||||||
|
.FlatRelationships // []*models.FlatRelationship - Schema's relationships
|
||||||
|
.Summary // *models.DatabaseSummary - Statistics
|
||||||
|
.Metadata // map[string]interface{} - User metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
### Domain Mode
|
||||||
|
```go
|
||||||
|
.Domain // *models.Domain - Current domain
|
||||||
|
.ParentDatabase // *models.Database - Parent database context
|
||||||
|
.Metadata // map[string]interface{} - User metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
### Table Mode
|
||||||
|
```go
|
||||||
|
.Table // *models.Table - Current table
|
||||||
|
.ParentSchema // *models.Schema - Parent schema
|
||||||
|
.ParentDatabase // *models.Database - Parent database context
|
||||||
|
.Metadata // map[string]interface{} - User metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
### Script Mode
|
||||||
|
```go
|
||||||
|
.Script // *models.Script - Current script
|
||||||
|
.ParentSchema // *models.Schema - Parent schema
|
||||||
|
.ParentDatabase // *models.Database - Parent database context
|
||||||
|
.Metadata // map[string]interface{} - User metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
### Model Structures
|
||||||
|
|
||||||
|
**Database:**
|
||||||
|
- `.Name` - Database name
|
||||||
|
- `.Schemas` - List of schemas
|
||||||
|
- `.Domains` - List of domains (business domain groupings)
|
||||||
|
- `.Description`, `.Comment` - Documentation
|
||||||
|
|
||||||
|
**Schema:**
|
||||||
|
- `.Name` - Schema name
|
||||||
|
- `.Tables` - List of tables
|
||||||
|
- `.Views`, `.Sequences`, `.Scripts` - Other objects
|
||||||
|
- `.Enums` - Enum types
|
||||||
|
|
||||||
|
**Domain:**
|
||||||
|
- `.Name` - Domain name
|
||||||
|
- `.Tables` - List of DomainTable references
|
||||||
|
- `.Description`, `.Comment` - Documentation
|
||||||
|
- `.Metadata` - Custom metadata map
|
||||||
|
|
||||||
|
**DomainTable:**
|
||||||
|
- `.TableName` - Name of the table
|
||||||
|
- `.SchemaName` - Schema containing the table
|
||||||
|
- `.RefTable` - Pointer to actual Table object (if loaded)
|
||||||
|
|
||||||
|
**Table:**
|
||||||
|
- `.Name` - Table name
|
||||||
|
- `.Schema` - Schema name
|
||||||
|
- `.Columns` - Map of columns (use `values` function to iterate)
|
||||||
|
- `.Constraints` - Map of constraints
|
||||||
|
- `.Indexes` - Map of indexes
|
||||||
|
- `.Relationships` - Map of relationships
|
||||||
|
- `.Description`, `.Comment` - Documentation
|
||||||
|
|
||||||
|
**Column:**
|
||||||
|
- `.Name` - Column name
|
||||||
|
- `.Type` - SQL type
|
||||||
|
- `.NotNull` - Is NOT NULL
|
||||||
|
- `.IsPrimaryKey` - Is primary key
|
||||||
|
- `.Default` - Default value
|
||||||
|
- `.Description`, `.Comment` - Documentation
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Example 1: TypeScript Interfaces (Table Mode)
|
||||||
|
|
||||||
|
**Template:** `typescript-interface.tmpl`
|
||||||
|
```typescript
|
||||||
|
// Generated from {{ .ParentDatabase.Name }}.{{ .ParentSchema.Name }}.{{ .Table.Name }}
|
||||||
|
|
||||||
|
export interface {{ .Table.Name | toPascalCase }} {
|
||||||
|
{{- range .Table.Columns | values }}
|
||||||
|
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type .NotNull }};
|
||||||
|
{{- end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{- $fks := filterForeignKeys .Table.Constraints }}
|
||||||
|
{{- if $fks }}
|
||||||
|
|
||||||
|
// Foreign Keys:
|
||||||
|
{{- range $fks }}
|
||||||
|
// - {{ .Name }}: references {{ .ReferencedTable }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Command:**
|
||||||
|
```bash
|
||||||
|
relspec templ --from pgsql --from-conn "..." \
|
||||||
|
--template typescript-interface.tmpl \
|
||||||
|
--mode table \
|
||||||
|
--output ./src/types/ \
|
||||||
|
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: Markdown Documentation (Database Mode)
|
||||||
|
|
||||||
|
**Template:** `database-docs.tmpl`
|
||||||
|
```markdown
|
||||||
|
# Database: {{ .Database.Name }}
|
||||||
|
|
||||||
|
{{ if .Database.Description }}{{ .Database.Description }}{{ end }}
|
||||||
|
|
||||||
|
**Statistics:**
|
||||||
|
- Schemas: {{ len .Database.Schemas }}
|
||||||
|
- Tables: {{ .Summary.TotalTables }}
|
||||||
|
- Columns: {{ .Summary.TotalColumns }}
|
||||||
|
|
||||||
|
{{ range .Database.Schemas }}
|
||||||
|
## Schema: {{ .Name }}
|
||||||
|
|
||||||
|
{{ range .Tables }}
|
||||||
|
### {{ .Name }}
|
||||||
|
|
||||||
|
{{ if .Description }}{{ .Description }}{{ end }}
|
||||||
|
|
||||||
|
**Columns:**
|
||||||
|
|
||||||
|
| Column | Type | Nullable | PK | Description |
|
||||||
|
|--------|------|----------|----|----|
|
||||||
|
{{- range .Columns | values }}
|
||||||
|
| {{ .Name }} | `{{ .Type }}` | {{ if .NotNull }}No{{ else }}Yes{{ end }} | {{ if .IsPrimaryKey }}✓{{ end }} | {{ .Description }} |
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{- $fks := filterForeignKeys .Constraints }}
|
||||||
|
{{- if $fks }}
|
||||||
|
|
||||||
|
**Foreign Keys:**
|
||||||
|
|
||||||
|
{{ range $fks }}
|
||||||
|
- `{{ .Name }}`: {{ join .Columns ", " }} → {{ .ReferencedTable }}({{ join .ReferencedColumns ", " }})
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 3: Python SQLAlchemy Models (Table Mode)
|
||||||
|
|
||||||
|
**Template:** `python-model.tmpl`
|
||||||
|
```python
|
||||||
|
"""{{ .Table.Name | toPascalCase }} model for {{ .ParentDatabase.Name }}.{{ .ParentSchema.Name }}"""
|
||||||
|
|
||||||
|
from sqlalchemy import Column
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
|
class {{ .Table.Name | toPascalCase }}(Base):
|
||||||
|
"""{{ if .Table.Description }}{{ .Table.Description }}{{ else }}{{ .Table.Name }} table{{ end }}"""
|
||||||
|
|
||||||
|
__tablename__ = "{{ .Table.Name }}"
|
||||||
|
__table_args__ = {"schema": "{{ .ParentSchema.Name }}"}
|
||||||
|
|
||||||
|
{{- range .Table.Columns | values }}
|
||||||
|
{{ .Name }} = Column({{ sqlToPython .Type }}{{ if .IsPrimaryKey }}, primary_key=True{{ end }}{{ if .NotNull }}, nullable=False{{ end }})
|
||||||
|
{{- end }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 4: GraphQL Schema (Schema Mode)
|
||||||
|
|
||||||
|
**Template:** `graphql-schema.tmpl`
|
||||||
|
```graphql
|
||||||
|
"""{{ .Schema.Name }} schema"""
|
||||||
|
|
||||||
|
{{ range .Schema.Tables }}
|
||||||
|
type {{ .Name | toPascalCase }} {
|
||||||
|
{{- range .Columns | values }}
|
||||||
|
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type .NotNull | replace " | null" "" }}{{ if not .NotNull }}{{ end }}
|
||||||
|
{{- end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
input {{ .Name | toPascalCase }}Input {
|
||||||
|
{{- $cols := filterNotNull .Columns | filterPrimaryKeys }}
|
||||||
|
{{- range $cols }}
|
||||||
|
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type true | replace " | null" "" }}!
|
||||||
|
{{- end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{ end }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 5: SQL Migration (Database Mode)
|
||||||
|
|
||||||
|
**Template:** `migration.tmpl`
|
||||||
|
```sql
|
||||||
|
-- Migration for {{ .Database.Name }}
|
||||||
|
-- Generated: {{ .Metadata.timestamp }}
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
{{ range .Database.Schemas }}
|
||||||
|
-- Schema: {{ .Name }}
|
||||||
|
CREATE SCHEMA IF NOT EXISTS {{ .Name }};
|
||||||
|
|
||||||
|
{{ range .Tables }}
|
||||||
|
CREATE TABLE {{ $.Database.Name }}.{{ .Schema }}.{{ .Name }} (
|
||||||
|
{{- range $i, $col := .Columns | values }}
|
||||||
|
{{- if $i }},{{ end }}
|
||||||
|
{{ $col.Name }} {{ $col.Type }}{{ if $col.NotNull }} NOT NULL{{ end }}{{ if $col.Default }} DEFAULT {{ $col.Default }}{{ end }}
|
||||||
|
{{- end }}
|
||||||
|
);
|
||||||
|
|
||||||
|
{{- $pks := filterPrimaryKeys .Columns }}
|
||||||
|
{{- if $pks }}
|
||||||
|
|
||||||
|
ALTER TABLE {{ $.Database.Name }}.{{ .Schema }}.{{ .Name }}
|
||||||
|
ADD PRIMARY KEY ({{ range $i, $pk := $pks }}{{ if $i }}, {{ end }}{{ $pk.Name }}{{ end }});
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
1. **Use Hyphen for Whitespace Control:**
|
||||||
|
```
|
||||||
|
{{- removes whitespace before
|
||||||
|
-}} removes whitespace after
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Store Intermediate Results:**
|
||||||
|
```
|
||||||
|
{{ $pks := filterPrimaryKeys .Table.Columns }}
|
||||||
|
{{ if $pks }}...{{ end }}
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Check Before Accessing:**
|
||||||
|
```
|
||||||
|
{{ if .Table.Description }}{{ .Table.Description }}{{ end }}
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Use Safe Access for Maps:**
|
||||||
|
```
|
||||||
|
{{ getOr .Metadata "key" "default-value" }}
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Iterate Map Values:**
|
||||||
|
```
|
||||||
|
{{ range .Table.Columns | values }}...{{ end }}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
**Error: "wrong type for value"**
|
||||||
|
- Check function parameter order (e.g., `sqlToGo .Type .NotNull` not `.NotNull .Type`)
|
||||||
|
|
||||||
|
**Error: "can't evaluate field"**
|
||||||
|
- Field doesn't exist on the object
|
||||||
|
- Use `{{ if .Field }}` to check before accessing
|
||||||
|
|
||||||
|
**Empty Output:**
|
||||||
|
- Check your mode matches your template expectations
|
||||||
|
- Verify data exists (use `{{ .Database | toJSON }}` to inspect)
|
||||||
|
|
||||||
|
**Whitespace Issues:**
|
||||||
|
- Use `{{-` and `-}}` to control whitespace
|
||||||
|
- Run output through a formatter if needed
|
||||||
|
|
||||||
|
## Additional Resources
|
||||||
|
|
||||||
|
- [Go Template Documentation](https://pkg.go.dev/text/template)
|
||||||
|
- [RelSpec Documentation](../README.md)
|
||||||
|
- [Model Structure Reference](../pkg/models/)
|
||||||
|
- [Example Templates](../examples/templates/)
|
||||||
18
go.mod
18
go.mod
@@ -3,29 +3,45 @@ module git.warky.dev/wdevs/relspecgo
|
|||||||
go 1.24.0
|
go 1.24.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/gdamore/tcell/v2 v2.8.1
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/jackc/pgx/v5 v5.7.6
|
github.com/jackc/pgx/v5 v5.7.6
|
||||||
|
github.com/rivo/tview v0.42.0
|
||||||
github.com/spf13/cobra v1.10.2
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/stretchr/testify v1.11.1
|
github.com/stretchr/testify v1.11.1
|
||||||
github.com/uptrace/bun v1.2.16
|
github.com/uptrace/bun v1.2.16
|
||||||
|
golang.org/x/text v0.28.0
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
|
modernc.org/sqlite v1.44.3
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
|
github.com/gdamore/encoding v1.0.1 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
github.com/kr/pretty v0.3.1 // indirect
|
github.com/kr/pretty v0.3.1 // indirect
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
|
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||||
github.com/spf13/pflag v1.0.10 // indirect
|
github.com/spf13/pflag v1.0.10 // indirect
|
||||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
golang.org/x/crypto v0.41.0 // indirect
|
golang.org/x/crypto v0.41.0 // indirect
|
||||||
|
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect
|
||||||
golang.org/x/sys v0.38.0 // indirect
|
golang.org/x/sys v0.38.0 // indirect
|
||||||
golang.org/x/text v0.28.0 // indirect
|
golang.org/x/term v0.34.0 // indirect
|
||||||
|
modernc.org/libc v1.67.6 // indirect
|
||||||
|
modernc.org/mathutil v1.7.1 // indirect
|
||||||
|
modernc.org/memory v1.11.0 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
130
go.sum
130
go.sum
@@ -3,8 +3,19 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3
|
|||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
|
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||||
|
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||||
|
github.com/gdamore/tcell/v2 v2.8.1 h1:KPNxyqclpWpWQlPLx6Xui1pMk8S+7+R37h3g07997NU=
|
||||||
|
github.com/gdamore/tcell/v2 v2.8.1/go.mod h1:bj8ori1BG3OYMjmb3IklZVWfZUJ1UBQt9JXrOCOhGWw=
|
||||||
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||||
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||||
|
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
@@ -21,11 +32,27 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
|||||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
|
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||||
|
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
||||||
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
|
github.com/rivo/tview v0.42.0 h1:b/ftp+RxtDsHSaynXTbJb+/n/BxDEi+W3UfF5jILK6c=
|
||||||
|
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
|
||||||
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
|
github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||||
@@ -48,18 +75,117 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU
|
|||||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||||
|
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||||
|
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||||
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
||||||
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
||||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
|
||||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||||
|
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
|
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||||
|
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||||
|
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||||
|
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||||
|
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
|
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||||
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
|
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||||
|
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||||
|
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||||
|
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||||
|
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||||
|
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
|
||||||
|
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||||
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||||
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
|
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||||
|
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||||
|
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||||
|
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
||||||
|
modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||||
|
modernc.org/ccgo/v4 v4.30.1 h1:4r4U1J6Fhj98NKfSjnPUN7Ze2c6MnAdL0hWw6+LrJpc=
|
||||||
|
modernc.org/ccgo/v4 v4.30.1/go.mod h1:bIOeI1JL54Utlxn+LwrFyjCx2n2RDiYEaJVSrgdrRfM=
|
||||||
|
modernc.org/fileutil v1.3.40 h1:ZGMswMNc9JOCrcrakF1HrvmergNLAmxOPjizirpfqBA=
|
||||||
|
modernc.org/fileutil v1.3.40/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
|
||||||
|
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
|
||||||
|
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
|
||||||
|
modernc.org/gc/v3 v3.1.1 h1:k8T3gkXWY9sEiytKhcgyiZ2L0DTyCQ/nvX+LoCljoRE=
|
||||||
|
modernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
|
||||||
|
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
|
||||||
|
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
|
||||||
|
modernc.org/libc v1.67.6 h1:eVOQvpModVLKOdT+LvBPjdQqfrZq+pC39BygcT+E7OI=
|
||||||
|
modernc.org/libc v1.67.6/go.mod h1:JAhxUVlolfYDErnwiqaLvUqc8nfb2r6S6slAgZOnaiE=
|
||||||
|
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||||
|
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
|
||||||
|
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
|
||||||
|
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
|
||||||
|
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
||||||
|
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||||
|
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||||
|
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||||
|
modernc.org/sqlite v1.44.3 h1:+39JvV/HWMcYslAwRxHb8067w+2zowvFOUrOWIy9PjY=
|
||||||
|
modernc.org/sqlite v1.44.3/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA=
|
||||||
|
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||||
|
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||||
|
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||||
|
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||||
|
|||||||
714
pkg/commontypes/commontypes_test.go
Normal file
714
pkg/commontypes/commontypes_test.go
Normal file
@@ -0,0 +1,714 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestExtractBaseType(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{"varchar with length", "varchar(100)", "varchar"},
|
||||||
|
{"VARCHAR uppercase with length", "VARCHAR(255)", "varchar"},
|
||||||
|
{"numeric with precision", "numeric(10,2)", "numeric"},
|
||||||
|
{"NUMERIC uppercase", "NUMERIC(18,4)", "numeric"},
|
||||||
|
{"decimal with precision", "decimal(15,3)", "decimal"},
|
||||||
|
{"char with length", "char(50)", "char"},
|
||||||
|
{"simple integer", "integer", "integer"},
|
||||||
|
{"simple text", "text", "text"},
|
||||||
|
{"bigint", "bigint", "bigint"},
|
||||||
|
{"With spaces", " varchar(100) ", "varchar"},
|
||||||
|
{"No parentheses", "boolean", "boolean"},
|
||||||
|
{"Empty string", "", ""},
|
||||||
|
{"Mixed case", "VarChar(100)", "varchar"},
|
||||||
|
{"timestamp with time zone", "timestamp(6) with time zone", "timestamp"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := ExtractBaseType(tt.sqlType)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("ExtractBaseType(%q) = %q, want %q", tt.sqlType, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeType(t *testing.T) {
|
||||||
|
// NormalizeType is an alias for ExtractBaseType, test that they behave the same
|
||||||
|
testCases := []string{
|
||||||
|
"varchar(100)",
|
||||||
|
"numeric(10,2)",
|
||||||
|
"integer",
|
||||||
|
"text",
|
||||||
|
" VARCHAR(255) ",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc, func(t *testing.T) {
|
||||||
|
extracted := ExtractBaseType(tc)
|
||||||
|
normalized := NormalizeType(tc)
|
||||||
|
if extracted != normalized {
|
||||||
|
t.Errorf("ExtractBaseType(%q) = %q, but NormalizeType(%q) = %q",
|
||||||
|
tc, extracted, tc, normalized)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSQLToGo(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
nullable bool
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Integer types (nullable)
|
||||||
|
{"integer nullable", "integer", true, "int32"},
|
||||||
|
{"bigint nullable", "bigint", true, "int64"},
|
||||||
|
{"smallint nullable", "smallint", true, "int16"},
|
||||||
|
{"serial nullable", "serial", true, "int32"},
|
||||||
|
|
||||||
|
// Integer types (not nullable)
|
||||||
|
{"integer not nullable", "integer", false, "*int32"},
|
||||||
|
{"bigint not nullable", "bigint", false, "*int64"},
|
||||||
|
{"smallint not nullable", "smallint", false, "*int16"},
|
||||||
|
|
||||||
|
// String types (nullable)
|
||||||
|
{"text nullable", "text", true, "string"},
|
||||||
|
{"varchar nullable", "varchar", true, "string"},
|
||||||
|
{"varchar with length nullable", "varchar(100)", true, "string"},
|
||||||
|
|
||||||
|
// String types (not nullable)
|
||||||
|
{"text not nullable", "text", false, "*string"},
|
||||||
|
{"varchar not nullable", "varchar", false, "*string"},
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
{"boolean nullable", "boolean", true, "bool"},
|
||||||
|
{"boolean not nullable", "boolean", false, "*bool"},
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
{"real nullable", "real", true, "float32"},
|
||||||
|
{"double precision nullable", "double precision", true, "float64"},
|
||||||
|
{"real not nullable", "real", false, "*float32"},
|
||||||
|
{"double precision not nullable", "double precision", false, "*float64"},
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
{"timestamp nullable", "timestamp", true, "time.Time"},
|
||||||
|
{"date nullable", "date", true, "time.Time"},
|
||||||
|
{"timestamp not nullable", "timestamp", false, "*time.Time"},
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
{"bytea nullable", "bytea", true, "[]byte"},
|
||||||
|
{"bytea not nullable", "bytea", false, "[]byte"}, // Slices don't get pointer
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
{"uuid nullable", "uuid", true, "string"},
|
||||||
|
{"uuid not nullable", "uuid", false, "*string"},
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
{"json nullable", "json", true, "string"},
|
||||||
|
{"jsonb nullable", "jsonb", true, "string"},
|
||||||
|
|
||||||
|
// Array
|
||||||
|
{"array nullable", "array", true, "[]string"},
|
||||||
|
{"array not nullable", "array", false, "[]string"}, // Slices don't get pointer
|
||||||
|
|
||||||
|
// Unknown types
|
||||||
|
{"unknown type nullable", "unknowntype", true, "interface{}"},
|
||||||
|
{"unknown type not nullable", "unknowntype", false, "interface{}"}, // Interface doesn't get pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SQLToGo(tt.sqlType, tt.nullable)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SQLToGo(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSQLToTypeScript(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
nullable bool
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Integer types
|
||||||
|
{"integer nullable", "integer", true, "number"},
|
||||||
|
{"integer not nullable", "integer", false, "number | null"},
|
||||||
|
{"bigint nullable", "bigint", true, "number"},
|
||||||
|
{"bigint not nullable", "bigint", false, "number | null"},
|
||||||
|
|
||||||
|
// String types
|
||||||
|
{"text nullable", "text", true, "string"},
|
||||||
|
{"text not nullable", "text", false, "string | null"},
|
||||||
|
{"varchar nullable", "varchar", true, "string"},
|
||||||
|
{"varchar(100) nullable", "varchar(100)", true, "string"},
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
{"boolean nullable", "boolean", true, "boolean"},
|
||||||
|
{"boolean not nullable", "boolean", false, "boolean | null"},
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
{"real nullable", "real", true, "number"},
|
||||||
|
{"double precision nullable", "double precision", true, "number"},
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
{"timestamp nullable", "timestamp", true, "Date"},
|
||||||
|
{"date nullable", "date", true, "Date"},
|
||||||
|
{"timestamp not nullable", "timestamp", false, "Date | null"},
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
{"bytea nullable", "bytea", true, "Buffer"},
|
||||||
|
{"bytea not nullable", "bytea", false, "Buffer | null"},
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
{"json nullable", "json", true, "any"},
|
||||||
|
{"jsonb nullable", "jsonb", true, "any"},
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
{"uuid nullable", "uuid", true, "string"},
|
||||||
|
|
||||||
|
// Unknown types
|
||||||
|
{"unknown type nullable", "unknowntype", true, "any"},
|
||||||
|
{"unknown type not nullable", "unknowntype", false, "any | null"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SQLToTypeScript(tt.sqlType, tt.nullable)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SQLToTypeScript(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSQLToPython(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Integer types
|
||||||
|
{"integer", "integer", "int"},
|
||||||
|
{"bigint", "bigint", "int"},
|
||||||
|
{"smallint", "smallint", "int"},
|
||||||
|
|
||||||
|
// String types
|
||||||
|
{"text", "text", "str"},
|
||||||
|
{"varchar", "varchar", "str"},
|
||||||
|
{"varchar(100)", "varchar(100)", "str"},
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
{"boolean", "boolean", "bool"},
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
{"real", "real", "float"},
|
||||||
|
{"double precision", "double precision", "float"},
|
||||||
|
{"numeric", "numeric", "Decimal"},
|
||||||
|
{"decimal", "decimal", "Decimal"},
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
{"timestamp", "timestamp", "datetime"},
|
||||||
|
{"date", "date", "date"},
|
||||||
|
{"time", "time", "time"},
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
{"bytea", "bytea", "bytes"},
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
{"json", "json", "dict"},
|
||||||
|
{"jsonb", "jsonb", "dict"},
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
{"uuid", "uuid", "UUID"},
|
||||||
|
|
||||||
|
// Array
|
||||||
|
{"array", "array", "list"},
|
||||||
|
|
||||||
|
// Unknown types
|
||||||
|
{"unknown type", "unknowntype", "Any"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SQLToPython(tt.sqlType)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SQLToPython(%q) = %q, want %q", tt.sqlType, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSQLToCSharp(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
nullable bool
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Integer types (nullable)
|
||||||
|
{"integer nullable", "integer", true, "int"},
|
||||||
|
{"bigint nullable", "bigint", true, "long"},
|
||||||
|
{"smallint nullable", "smallint", true, "short"},
|
||||||
|
|
||||||
|
// Integer types (not nullable - value types get ?)
|
||||||
|
{"integer not nullable", "integer", false, "int?"},
|
||||||
|
{"bigint not nullable", "bigint", false, "long?"},
|
||||||
|
{"smallint not nullable", "smallint", false, "short?"},
|
||||||
|
|
||||||
|
// String types (reference types, no ? needed)
|
||||||
|
{"text nullable", "text", true, "string"},
|
||||||
|
{"text not nullable", "text", false, "string"},
|
||||||
|
{"varchar nullable", "varchar", true, "string"},
|
||||||
|
{"varchar(100) nullable", "varchar(100)", true, "string"},
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
{"boolean nullable", "boolean", true, "bool"},
|
||||||
|
{"boolean not nullable", "boolean", false, "bool?"},
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
{"real nullable", "real", true, "float"},
|
||||||
|
{"double precision nullable", "double precision", true, "double"},
|
||||||
|
{"decimal nullable", "decimal", true, "decimal"},
|
||||||
|
{"real not nullable", "real", false, "float?"},
|
||||||
|
{"double precision not nullable", "double precision", false, "double?"},
|
||||||
|
{"decimal not nullable", "decimal", false, "decimal?"},
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
{"timestamp nullable", "timestamp", true, "DateTime"},
|
||||||
|
{"date nullable", "date", true, "DateTime"},
|
||||||
|
{"timestamptz nullable", "timestamptz", true, "DateTimeOffset"},
|
||||||
|
{"timestamp not nullable", "timestamp", false, "DateTime?"},
|
||||||
|
{"timestamptz not nullable", "timestamptz", false, "DateTimeOffset?"},
|
||||||
|
|
||||||
|
// Binary (array type, no ?)
|
||||||
|
{"bytea nullable", "bytea", true, "byte[]"},
|
||||||
|
{"bytea not nullable", "bytea", false, "byte[]"},
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
{"uuid nullable", "uuid", true, "Guid"},
|
||||||
|
{"uuid not nullable", "uuid", false, "Guid?"},
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
{"json nullable", "json", true, "string"},
|
||||||
|
|
||||||
|
// Unknown types (object is reference type)
|
||||||
|
{"unknown type nullable", "unknowntype", true, "object"},
|
||||||
|
{"unknown type not nullable", "unknowntype", false, "object"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SQLToCSharp(tt.sqlType, tt.nullable)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SQLToCSharp(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNeedsTimeImport(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
goType string
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{"time.Time type", "time.Time", true},
|
||||||
|
{"pointer to time.Time", "*time.Time", true},
|
||||||
|
{"int32 type", "int32", false},
|
||||||
|
{"string type", "string", false},
|
||||||
|
{"bool type", "bool", false},
|
||||||
|
{"[]byte type", "[]byte", false},
|
||||||
|
{"interface{}", "interface{}", false},
|
||||||
|
{"empty string", "", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := NeedsTimeImport(tt.goType)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("NeedsTimeImport(%q) = %v, want %v", tt.goType, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGoTypeMap(t *testing.T) {
|
||||||
|
// Test that the map contains expected entries
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"integer": "int32",
|
||||||
|
"bigint": "int64",
|
||||||
|
"text": "string",
|
||||||
|
"boolean": "bool",
|
||||||
|
"double precision": "float64",
|
||||||
|
"bytea": "[]byte",
|
||||||
|
"timestamp": "time.Time",
|
||||||
|
"uuid": "string",
|
||||||
|
"json": "string",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlType, expectedGoType := range expectedMappings {
|
||||||
|
if goType, ok := GoTypeMap[sqlType]; !ok {
|
||||||
|
t.Errorf("GoTypeMap missing entry for %q", sqlType)
|
||||||
|
} else if goType != expectedGoType {
|
||||||
|
t.Errorf("GoTypeMap[%q] = %q, want %q", sqlType, goType, expectedGoType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(GoTypeMap) == 0 {
|
||||||
|
t.Error("GoTypeMap is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTypeScriptTypeMap(t *testing.T) {
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"integer": "number",
|
||||||
|
"bigint": "number",
|
||||||
|
"text": "string",
|
||||||
|
"boolean": "boolean",
|
||||||
|
"double precision": "number",
|
||||||
|
"bytea": "Buffer",
|
||||||
|
"timestamp": "Date",
|
||||||
|
"uuid": "string",
|
||||||
|
"json": "any",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlType, expectedTSType := range expectedMappings {
|
||||||
|
if tsType, ok := TypeScriptTypeMap[sqlType]; !ok {
|
||||||
|
t.Errorf("TypeScriptTypeMap missing entry for %q", sqlType)
|
||||||
|
} else if tsType != expectedTSType {
|
||||||
|
t.Errorf("TypeScriptTypeMap[%q] = %q, want %q", sqlType, tsType, expectedTSType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(TypeScriptTypeMap) == 0 {
|
||||||
|
t.Error("TypeScriptTypeMap is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPythonTypeMap(t *testing.T) {
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"integer": "int",
|
||||||
|
"bigint": "int",
|
||||||
|
"text": "str",
|
||||||
|
"boolean": "bool",
|
||||||
|
"real": "float",
|
||||||
|
"numeric": "Decimal",
|
||||||
|
"bytea": "bytes",
|
||||||
|
"date": "date",
|
||||||
|
"uuid": "UUID",
|
||||||
|
"json": "dict",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlType, expectedPyType := range expectedMappings {
|
||||||
|
if pyType, ok := PythonTypeMap[sqlType]; !ok {
|
||||||
|
t.Errorf("PythonTypeMap missing entry for %q", sqlType)
|
||||||
|
} else if pyType != expectedPyType {
|
||||||
|
t.Errorf("PythonTypeMap[%q] = %q, want %q", sqlType, pyType, expectedPyType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(PythonTypeMap) == 0 {
|
||||||
|
t.Error("PythonTypeMap is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCSharpTypeMap(t *testing.T) {
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"integer": "int",
|
||||||
|
"bigint": "long",
|
||||||
|
"smallint": "short",
|
||||||
|
"text": "string",
|
||||||
|
"boolean": "bool",
|
||||||
|
"double precision": "double",
|
||||||
|
"decimal": "decimal",
|
||||||
|
"bytea": "byte[]",
|
||||||
|
"timestamp": "DateTime",
|
||||||
|
"uuid": "Guid",
|
||||||
|
"json": "string",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlType, expectedCSType := range expectedMappings {
|
||||||
|
if csType, ok := CSharpTypeMap[sqlType]; !ok {
|
||||||
|
t.Errorf("CSharpTypeMap missing entry for %q", sqlType)
|
||||||
|
} else if csType != expectedCSType {
|
||||||
|
t.Errorf("CSharpTypeMap[%q] = %q, want %q", sqlType, csType, expectedCSType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(CSharpTypeMap) == 0 {
|
||||||
|
t.Error("CSharpTypeMap is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSQLToJava(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
nullable bool
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Integer types
|
||||||
|
{"integer nullable", "integer", true, "Integer"},
|
||||||
|
{"integer not nullable", "integer", false, "Integer"},
|
||||||
|
{"bigint nullable", "bigint", true, "Long"},
|
||||||
|
{"smallint nullable", "smallint", true, "Short"},
|
||||||
|
|
||||||
|
// String types
|
||||||
|
{"text nullable", "text", true, "String"},
|
||||||
|
{"varchar nullable", "varchar", true, "String"},
|
||||||
|
{"varchar(100) nullable", "varchar(100)", true, "String"},
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
{"boolean nullable", "boolean", true, "Boolean"},
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
{"real nullable", "real", true, "Float"},
|
||||||
|
{"double precision nullable", "double precision", true, "Double"},
|
||||||
|
{"numeric nullable", "numeric", true, "BigDecimal"},
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
{"timestamp nullable", "timestamp", true, "Timestamp"},
|
||||||
|
{"date nullable", "date", true, "Date"},
|
||||||
|
{"time nullable", "time", true, "Time"},
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
{"bytea nullable", "bytea", true, "byte[]"},
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
{"uuid nullable", "uuid", true, "UUID"},
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
{"json nullable", "json", true, "String"},
|
||||||
|
|
||||||
|
// Unknown types
|
||||||
|
{"unknown type nullable", "unknowntype", true, "Object"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SQLToJava(tt.sqlType, tt.nullable)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SQLToJava(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSQLToPhp(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
nullable bool
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Integer types (nullable)
|
||||||
|
{"integer nullable", "integer", true, "int"},
|
||||||
|
{"bigint nullable", "bigint", true, "int"},
|
||||||
|
{"smallint nullable", "smallint", true, "int"},
|
||||||
|
|
||||||
|
// Integer types (not nullable)
|
||||||
|
{"integer not nullable", "integer", false, "?int"},
|
||||||
|
{"bigint not nullable", "bigint", false, "?int"},
|
||||||
|
|
||||||
|
// String types
|
||||||
|
{"text nullable", "text", true, "string"},
|
||||||
|
{"text not nullable", "text", false, "?string"},
|
||||||
|
{"varchar nullable", "varchar", true, "string"},
|
||||||
|
{"varchar(100) nullable", "varchar(100)", true, "string"},
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
{"boolean nullable", "boolean", true, "bool"},
|
||||||
|
{"boolean not nullable", "boolean", false, "?bool"},
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
{"real nullable", "real", true, "float"},
|
||||||
|
{"double precision nullable", "double precision", true, "float"},
|
||||||
|
{"real not nullable", "real", false, "?float"},
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
{"timestamp nullable", "timestamp", true, "\\DateTime"},
|
||||||
|
{"date nullable", "date", true, "\\DateTime"},
|
||||||
|
{"timestamp not nullable", "timestamp", false, "?\\DateTime"},
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
{"bytea nullable", "bytea", true, "string"},
|
||||||
|
{"bytea not nullable", "bytea", false, "?string"},
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
{"json nullable", "json", true, "array"},
|
||||||
|
{"json not nullable", "json", false, "?array"},
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
{"uuid nullable", "uuid", true, "string"},
|
||||||
|
|
||||||
|
// Unknown types
|
||||||
|
{"unknown type nullable", "unknowntype", true, "mixed"},
|
||||||
|
{"unknown type not nullable", "unknowntype", false, "mixed"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SQLToPhp(tt.sqlType, tt.nullable)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SQLToPhp(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSQLToRust(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqlType string
|
||||||
|
nullable bool
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Integer types (nullable)
|
||||||
|
{"integer nullable", "integer", true, "i32"},
|
||||||
|
{"bigint nullable", "bigint", true, "i64"},
|
||||||
|
{"smallint nullable", "smallint", true, "i16"},
|
||||||
|
|
||||||
|
// Integer types (not nullable)
|
||||||
|
{"integer not nullable", "integer", false, "Option<i32>"},
|
||||||
|
{"bigint not nullable", "bigint", false, "Option<i64>"},
|
||||||
|
{"smallint not nullable", "smallint", false, "Option<i16>"},
|
||||||
|
|
||||||
|
// String types
|
||||||
|
{"text nullable", "text", true, "String"},
|
||||||
|
{"text not nullable", "text", false, "Option<String>"},
|
||||||
|
{"varchar nullable", "varchar", true, "String"},
|
||||||
|
{"varchar(100) nullable", "varchar(100)", true, "String"},
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
{"boolean nullable", "boolean", true, "bool"},
|
||||||
|
{"boolean not nullable", "boolean", false, "Option<bool>"},
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
{"real nullable", "real", true, "f32"},
|
||||||
|
{"double precision nullable", "double precision", true, "f64"},
|
||||||
|
{"real not nullable", "real", false, "Option<f32>"},
|
||||||
|
{"double precision not nullable", "double precision", false, "Option<f64>"},
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
{"timestamp nullable", "timestamp", true, "NaiveDateTime"},
|
||||||
|
{"timestamptz nullable", "timestamptz", true, "DateTime<Utc>"},
|
||||||
|
{"date nullable", "date", true, "NaiveDate"},
|
||||||
|
{"time nullable", "time", true, "NaiveTime"},
|
||||||
|
{"timestamp not nullable", "timestamp", false, "Option<NaiveDateTime>"},
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
{"bytea nullable", "bytea", true, "Vec<u8>"},
|
||||||
|
{"bytea not nullable", "bytea", false, "Option<Vec<u8>>"},
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
{"json nullable", "json", true, "serde_json::Value"},
|
||||||
|
{"json not nullable", "json", false, "Option<serde_json::Value>"},
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
{"uuid nullable", "uuid", true, "String"},
|
||||||
|
|
||||||
|
// Unknown types
|
||||||
|
{"unknown type nullable", "unknowntype", true, "String"},
|
||||||
|
{"unknown type not nullable", "unknowntype", false, "Option<String>"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SQLToRust(tt.sqlType, tt.nullable)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SQLToRust(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestJavaTypeMap(t *testing.T) {
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"integer": "Integer",
|
||||||
|
"bigint": "Long",
|
||||||
|
"smallint": "Short",
|
||||||
|
"text": "String",
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"double precision": "Double",
|
||||||
|
"numeric": "BigDecimal",
|
||||||
|
"bytea": "byte[]",
|
||||||
|
"timestamp": "Timestamp",
|
||||||
|
"uuid": "UUID",
|
||||||
|
"date": "Date",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlType, expectedJavaType := range expectedMappings {
|
||||||
|
if javaType, ok := JavaTypeMap[sqlType]; !ok {
|
||||||
|
t.Errorf("JavaTypeMap missing entry for %q", sqlType)
|
||||||
|
} else if javaType != expectedJavaType {
|
||||||
|
t.Errorf("JavaTypeMap[%q] = %q, want %q", sqlType, javaType, expectedJavaType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(JavaTypeMap) == 0 {
|
||||||
|
t.Error("JavaTypeMap is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPHPTypeMap(t *testing.T) {
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"integer": "int",
|
||||||
|
"bigint": "int",
|
||||||
|
"text": "string",
|
||||||
|
"boolean": "bool",
|
||||||
|
"double precision": "float",
|
||||||
|
"bytea": "string",
|
||||||
|
"timestamp": "\\DateTime",
|
||||||
|
"uuid": "string",
|
||||||
|
"json": "array",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlType, expectedPHPType := range expectedMappings {
|
||||||
|
if phpType, ok := PHPTypeMap[sqlType]; !ok {
|
||||||
|
t.Errorf("PHPTypeMap missing entry for %q", sqlType)
|
||||||
|
} else if phpType != expectedPHPType {
|
||||||
|
t.Errorf("PHPTypeMap[%q] = %q, want %q", sqlType, phpType, expectedPHPType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(PHPTypeMap) == 0 {
|
||||||
|
t.Error("PHPTypeMap is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRustTypeMap(t *testing.T) {
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"integer": "i32",
|
||||||
|
"bigint": "i64",
|
||||||
|
"smallint": "i16",
|
||||||
|
"text": "String",
|
||||||
|
"boolean": "bool",
|
||||||
|
"double precision": "f64",
|
||||||
|
"real": "f32",
|
||||||
|
"bytea": "Vec<u8>",
|
||||||
|
"timestamp": "NaiveDateTime",
|
||||||
|
"timestamptz": "DateTime<Utc>",
|
||||||
|
"date": "NaiveDate",
|
||||||
|
"json": "serde_json::Value",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlType, expectedRustType := range expectedMappings {
|
||||||
|
if rustType, ok := RustTypeMap[sqlType]; !ok {
|
||||||
|
t.Errorf("RustTypeMap missing entry for %q", sqlType)
|
||||||
|
} else if rustType != expectedRustType {
|
||||||
|
t.Errorf("RustTypeMap[%q] = %q, want %q", sqlType, rustType, expectedRustType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(RustTypeMap) == 0 {
|
||||||
|
t.Error("RustTypeMap is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
74
pkg/commontypes/csharp.go
Normal file
74
pkg/commontypes/csharp.go
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
// CSharpTypeMap maps PostgreSQL types to C# types
|
||||||
|
var CSharpTypeMap = map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "int",
|
||||||
|
"int": "int",
|
||||||
|
"int4": "int",
|
||||||
|
"smallint": "short",
|
||||||
|
"int2": "short",
|
||||||
|
"bigint": "long",
|
||||||
|
"int8": "long",
|
||||||
|
"serial": "int",
|
||||||
|
"bigserial": "long",
|
||||||
|
"smallserial": "short",
|
||||||
|
|
||||||
|
// String types
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"char": "string",
|
||||||
|
"character": "string",
|
||||||
|
"citext": "string",
|
||||||
|
"bpchar": "string",
|
||||||
|
"uuid": "Guid",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "bool",
|
||||||
|
"bool": "bool",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"real": "float",
|
||||||
|
"float4": "float",
|
||||||
|
"double precision": "double",
|
||||||
|
"float8": "double",
|
||||||
|
"numeric": "decimal",
|
||||||
|
"decimal": "decimal",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"timestamp": "DateTime",
|
||||||
|
"timestamp without time zone": "DateTime",
|
||||||
|
"timestamp with time zone": "DateTimeOffset",
|
||||||
|
"timestamptz": "DateTimeOffset",
|
||||||
|
"date": "DateTime",
|
||||||
|
"time": "TimeSpan",
|
||||||
|
"time without time zone": "TimeSpan",
|
||||||
|
"time with time zone": "DateTimeOffset",
|
||||||
|
"timetz": "DateTimeOffset",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "byte[]",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "string",
|
||||||
|
"jsonb": "string",
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLToCSharp converts SQL types to C# types
|
||||||
|
func SQLToCSharp(sqlType string, nullable bool) string {
|
||||||
|
baseType := ExtractBaseType(sqlType)
|
||||||
|
|
||||||
|
csType, ok := CSharpTypeMap[baseType]
|
||||||
|
if !ok {
|
||||||
|
csType = "object"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle nullable value types (reference types are already nullable)
|
||||||
|
if !nullable && csType != "string" && !strings.HasSuffix(csType, "[]") && csType != "object" {
|
||||||
|
return csType + "?"
|
||||||
|
}
|
||||||
|
|
||||||
|
return csType
|
||||||
|
}
|
||||||
28
pkg/commontypes/doc.go
Normal file
28
pkg/commontypes/doc.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
// Package commontypes provides shared type definitions used across multiple packages.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The commontypes package contains common data structures, constants, and type
|
||||||
|
// definitions that are shared between different parts of RelSpec but don't belong
|
||||||
|
// to the core models package.
|
||||||
|
//
|
||||||
|
// # Purpose
|
||||||
|
//
|
||||||
|
// This package helps avoid circular dependencies by providing a common location
|
||||||
|
// for types that are used by multiple packages without creating import cycles.
|
||||||
|
//
|
||||||
|
// # Contents
|
||||||
|
//
|
||||||
|
// Common types may include:
|
||||||
|
// - Shared enums and constants
|
||||||
|
// - Utility type aliases
|
||||||
|
// - Common error types
|
||||||
|
// - Shared configuration structures
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// import "git.warky.dev/wdevs/relspecgo/pkg/commontypes"
|
||||||
|
//
|
||||||
|
// // Use common types
|
||||||
|
// var formatType commontypes.FormatType
|
||||||
|
package commontypes
|
||||||
89
pkg/commontypes/golang.go
Normal file
89
pkg/commontypes/golang.go
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
// GoTypeMap maps PostgreSQL types to Go types
|
||||||
|
var GoTypeMap = map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "int32",
|
||||||
|
"int": "int32",
|
||||||
|
"int4": "int32",
|
||||||
|
"smallint": "int16",
|
||||||
|
"int2": "int16",
|
||||||
|
"bigint": "int64",
|
||||||
|
"int8": "int64",
|
||||||
|
"serial": "int32",
|
||||||
|
"bigserial": "int64",
|
||||||
|
"smallserial": "int16",
|
||||||
|
|
||||||
|
// String types
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"char": "string",
|
||||||
|
"character": "string",
|
||||||
|
"citext": "string",
|
||||||
|
"bpchar": "string",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "bool",
|
||||||
|
"bool": "bool",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"real": "float32",
|
||||||
|
"float4": "float32",
|
||||||
|
"double precision": "float64",
|
||||||
|
"float8": "float64",
|
||||||
|
"numeric": "float64",
|
||||||
|
"decimal": "float64",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"timestamp": "time.Time",
|
||||||
|
"timestamp without time zone": "time.Time",
|
||||||
|
"timestamp with time zone": "time.Time",
|
||||||
|
"timestamptz": "time.Time",
|
||||||
|
"date": "time.Time",
|
||||||
|
"time": "time.Time",
|
||||||
|
"time without time zone": "time.Time",
|
||||||
|
"time with time zone": "time.Time",
|
||||||
|
"timetz": "time.Time",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "[]byte",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "string",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "string",
|
||||||
|
"jsonb": "string",
|
||||||
|
|
||||||
|
// Array
|
||||||
|
"array": "[]string",
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLToGo converts SQL types to Go types
|
||||||
|
func SQLToGo(sqlType string, nullable bool) string {
|
||||||
|
baseType := ExtractBaseType(sqlType)
|
||||||
|
|
||||||
|
goType, ok := GoTypeMap[baseType]
|
||||||
|
if !ok {
|
||||||
|
goType = "interface{}"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle nullable types
|
||||||
|
if nullable {
|
||||||
|
return goType
|
||||||
|
}
|
||||||
|
|
||||||
|
// For nullable, use pointer types (except for slices and interfaces)
|
||||||
|
if !strings.HasPrefix(goType, "[]") && goType != "interface{}" {
|
||||||
|
return "*" + goType
|
||||||
|
}
|
||||||
|
|
||||||
|
return goType
|
||||||
|
}
|
||||||
|
|
||||||
|
// NeedsTimeImport checks if a Go type requires the time package
|
||||||
|
func NeedsTimeImport(goType string) bool {
|
||||||
|
return strings.Contains(goType, "time.Time")
|
||||||
|
}
|
||||||
68
pkg/commontypes/java.go
Normal file
68
pkg/commontypes/java.go
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
// JavaTypeMap maps PostgreSQL types to Java types
|
||||||
|
var JavaTypeMap = map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "Integer",
|
||||||
|
"int": "Integer",
|
||||||
|
"int4": "Integer",
|
||||||
|
"smallint": "Short",
|
||||||
|
"int2": "Short",
|
||||||
|
"bigint": "Long",
|
||||||
|
"int8": "Long",
|
||||||
|
"serial": "Integer",
|
||||||
|
"bigserial": "Long",
|
||||||
|
"smallserial": "Short",
|
||||||
|
|
||||||
|
// String types
|
||||||
|
"text": "String",
|
||||||
|
"varchar": "String",
|
||||||
|
"char": "String",
|
||||||
|
"character": "String",
|
||||||
|
"citext": "String",
|
||||||
|
"bpchar": "String",
|
||||||
|
"uuid": "UUID",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"bool": "Boolean",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"real": "Float",
|
||||||
|
"float4": "Float",
|
||||||
|
"double precision": "Double",
|
||||||
|
"float8": "Double",
|
||||||
|
"numeric": "BigDecimal",
|
||||||
|
"decimal": "BigDecimal",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"timestamp": "Timestamp",
|
||||||
|
"timestamp without time zone": "Timestamp",
|
||||||
|
"timestamp with time zone": "Timestamp",
|
||||||
|
"timestamptz": "Timestamp",
|
||||||
|
"date": "Date",
|
||||||
|
"time": "Time",
|
||||||
|
"time without time zone": "Time",
|
||||||
|
"time with time zone": "Time",
|
||||||
|
"timetz": "Time",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "byte[]",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "String",
|
||||||
|
"jsonb": "String",
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLToJava converts SQL types to Java types
|
||||||
|
func SQLToJava(sqlType string, nullable bool) string {
|
||||||
|
baseType := ExtractBaseType(sqlType)
|
||||||
|
|
||||||
|
javaType, ok := JavaTypeMap[baseType]
|
||||||
|
if !ok {
|
||||||
|
javaType = "Object"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Java uses wrapper classes for nullable types by default
|
||||||
|
return javaType
|
||||||
|
}
|
||||||
72
pkg/commontypes/php.go
Normal file
72
pkg/commontypes/php.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
// PHPTypeMap maps PostgreSQL types to PHP types
|
||||||
|
var PHPTypeMap = map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "int",
|
||||||
|
"int": "int",
|
||||||
|
"int4": "int",
|
||||||
|
"smallint": "int",
|
||||||
|
"int2": "int",
|
||||||
|
"bigint": "int",
|
||||||
|
"int8": "int",
|
||||||
|
"serial": "int",
|
||||||
|
"bigserial": "int",
|
||||||
|
"smallserial": "int",
|
||||||
|
|
||||||
|
// String types
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"char": "string",
|
||||||
|
"character": "string",
|
||||||
|
"citext": "string",
|
||||||
|
"bpchar": "string",
|
||||||
|
"uuid": "string",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "bool",
|
||||||
|
"bool": "bool",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"real": "float",
|
||||||
|
"float4": "float",
|
||||||
|
"double precision": "float",
|
||||||
|
"float8": "float",
|
||||||
|
"numeric": "float",
|
||||||
|
"decimal": "float",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"timestamp": "\\DateTime",
|
||||||
|
"timestamp without time zone": "\\DateTime",
|
||||||
|
"timestamp with time zone": "\\DateTime",
|
||||||
|
"timestamptz": "\\DateTime",
|
||||||
|
"date": "\\DateTime",
|
||||||
|
"time": "\\DateTime",
|
||||||
|
"time without time zone": "\\DateTime",
|
||||||
|
"time with time zone": "\\DateTime",
|
||||||
|
"timetz": "\\DateTime",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "string",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "array",
|
||||||
|
"jsonb": "array",
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLToPhp converts SQL types to PHP types
|
||||||
|
func SQLToPhp(sqlType string, nullable bool) string {
|
||||||
|
baseType := ExtractBaseType(sqlType)
|
||||||
|
|
||||||
|
phpType, ok := PHPTypeMap[baseType]
|
||||||
|
if !ok {
|
||||||
|
phpType = "mixed"
|
||||||
|
}
|
||||||
|
|
||||||
|
// PHP 7.1+ supports nullable types with ?Type syntax
|
||||||
|
if !nullable && phpType != "mixed" {
|
||||||
|
return "?" + phpType
|
||||||
|
}
|
||||||
|
|
||||||
|
return phpType
|
||||||
|
}
|
||||||
71
pkg/commontypes/python.go
Normal file
71
pkg/commontypes/python.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
// PythonTypeMap maps PostgreSQL types to Python types
|
||||||
|
var PythonTypeMap = map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "int",
|
||||||
|
"int": "int",
|
||||||
|
"int4": "int",
|
||||||
|
"smallint": "int",
|
||||||
|
"int2": "int",
|
||||||
|
"bigint": "int",
|
||||||
|
"int8": "int",
|
||||||
|
"serial": "int",
|
||||||
|
"bigserial": "int",
|
||||||
|
"smallserial": "int",
|
||||||
|
|
||||||
|
// String types
|
||||||
|
"text": "str",
|
||||||
|
"varchar": "str",
|
||||||
|
"char": "str",
|
||||||
|
"character": "str",
|
||||||
|
"citext": "str",
|
||||||
|
"bpchar": "str",
|
||||||
|
"uuid": "UUID",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "bool",
|
||||||
|
"bool": "bool",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"real": "float",
|
||||||
|
"float4": "float",
|
||||||
|
"double precision": "float",
|
||||||
|
"float8": "float",
|
||||||
|
"numeric": "Decimal",
|
||||||
|
"decimal": "Decimal",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"timestamp": "datetime",
|
||||||
|
"timestamp without time zone": "datetime",
|
||||||
|
"timestamp with time zone": "datetime",
|
||||||
|
"timestamptz": "datetime",
|
||||||
|
"date": "date",
|
||||||
|
"time": "time",
|
||||||
|
"time without time zone": "time",
|
||||||
|
"time with time zone": "time",
|
||||||
|
"timetz": "time",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "bytes",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "dict",
|
||||||
|
"jsonb": "dict",
|
||||||
|
|
||||||
|
// Array
|
||||||
|
"array": "list",
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLToPython converts SQL types to Python types
|
||||||
|
func SQLToPython(sqlType string) string {
|
||||||
|
baseType := ExtractBaseType(sqlType)
|
||||||
|
|
||||||
|
pyType, ok := PythonTypeMap[baseType]
|
||||||
|
if !ok {
|
||||||
|
pyType = "Any"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Python uses Optional[Type] for nullable, but we return the base type
|
||||||
|
return pyType
|
||||||
|
}
|
||||||
72
pkg/commontypes/rust.go
Normal file
72
pkg/commontypes/rust.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
// RustTypeMap maps PostgreSQL types to Rust types
|
||||||
|
var RustTypeMap = map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "i32",
|
||||||
|
"int": "i32",
|
||||||
|
"int4": "i32",
|
||||||
|
"smallint": "i16",
|
||||||
|
"int2": "i16",
|
||||||
|
"bigint": "i64",
|
||||||
|
"int8": "i64",
|
||||||
|
"serial": "i32",
|
||||||
|
"bigserial": "i64",
|
||||||
|
"smallserial": "i16",
|
||||||
|
|
||||||
|
// String types
|
||||||
|
"text": "String",
|
||||||
|
"varchar": "String",
|
||||||
|
"char": "String",
|
||||||
|
"character": "String",
|
||||||
|
"citext": "String",
|
||||||
|
"bpchar": "String",
|
||||||
|
"uuid": "String",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "bool",
|
||||||
|
"bool": "bool",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"real": "f32",
|
||||||
|
"float4": "f32",
|
||||||
|
"double precision": "f64",
|
||||||
|
"float8": "f64",
|
||||||
|
"numeric": "f64",
|
||||||
|
"decimal": "f64",
|
||||||
|
|
||||||
|
// Date/Time types (using chrono crate)
|
||||||
|
"timestamp": "NaiveDateTime",
|
||||||
|
"timestamp without time zone": "NaiveDateTime",
|
||||||
|
"timestamp with time zone": "DateTime<Utc>",
|
||||||
|
"timestamptz": "DateTime<Utc>",
|
||||||
|
"date": "NaiveDate",
|
||||||
|
"time": "NaiveTime",
|
||||||
|
"time without time zone": "NaiveTime",
|
||||||
|
"time with time zone": "DateTime<Utc>",
|
||||||
|
"timetz": "DateTime<Utc>",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "Vec<u8>",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "serde_json::Value",
|
||||||
|
"jsonb": "serde_json::Value",
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLToRust converts SQL types to Rust types
|
||||||
|
func SQLToRust(sqlType string, nullable bool) string {
|
||||||
|
baseType := ExtractBaseType(sqlType)
|
||||||
|
|
||||||
|
rustType, ok := RustTypeMap[baseType]
|
||||||
|
if !ok {
|
||||||
|
rustType = "String"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle nullable types with Option<T>
|
||||||
|
if nullable {
|
||||||
|
return rustType
|
||||||
|
}
|
||||||
|
|
||||||
|
return "Option<" + rustType + ">"
|
||||||
|
}
|
||||||
22
pkg/commontypes/sql.go
Normal file
22
pkg/commontypes/sql.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
// ExtractBaseType extracts the base type from a SQL type string
|
||||||
|
// Examples: varchar(100) → varchar, numeric(10,2) → numeric
|
||||||
|
func ExtractBaseType(sqlType string) string {
|
||||||
|
sqlType = strings.ToLower(strings.TrimSpace(sqlType))
|
||||||
|
|
||||||
|
// Remove everything after '('
|
||||||
|
if idx := strings.Index(sqlType, "("); idx > 0 {
|
||||||
|
sqlType = sqlType[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// NormalizeType normalizes a SQL type to its base form
|
||||||
|
// Alias for ExtractBaseType for backwards compatibility
|
||||||
|
func NormalizeType(sqlType string) string {
|
||||||
|
return ExtractBaseType(sqlType)
|
||||||
|
}
|
||||||
75
pkg/commontypes/typescript.go
Normal file
75
pkg/commontypes/typescript.go
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
package commontypes
|
||||||
|
|
||||||
|
// TypeScriptTypeMap maps PostgreSQL types to TypeScript types
|
||||||
|
var TypeScriptTypeMap = map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "number",
|
||||||
|
"int": "number",
|
||||||
|
"int4": "number",
|
||||||
|
"smallint": "number",
|
||||||
|
"int2": "number",
|
||||||
|
"bigint": "number",
|
||||||
|
"int8": "number",
|
||||||
|
"serial": "number",
|
||||||
|
"bigserial": "number",
|
||||||
|
"smallserial": "number",
|
||||||
|
|
||||||
|
// String types
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"char": "string",
|
||||||
|
"character": "string",
|
||||||
|
"citext": "string",
|
||||||
|
"bpchar": "string",
|
||||||
|
"uuid": "string",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "boolean",
|
||||||
|
"bool": "boolean",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"real": "number",
|
||||||
|
"float4": "number",
|
||||||
|
"double precision": "number",
|
||||||
|
"float8": "number",
|
||||||
|
"numeric": "number",
|
||||||
|
"decimal": "number",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"timestamp": "Date",
|
||||||
|
"timestamp without time zone": "Date",
|
||||||
|
"timestamp with time zone": "Date",
|
||||||
|
"timestamptz": "Date",
|
||||||
|
"date": "Date",
|
||||||
|
"time": "Date",
|
||||||
|
"time without time zone": "Date",
|
||||||
|
"time with time zone": "Date",
|
||||||
|
"timetz": "Date",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "Buffer",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "any",
|
||||||
|
"jsonb": "any",
|
||||||
|
|
||||||
|
// Array
|
||||||
|
"array": "any[]",
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLToTypeScript converts SQL types to TypeScript types
|
||||||
|
func SQLToTypeScript(sqlType string, nullable bool) string {
|
||||||
|
baseType := ExtractBaseType(sqlType)
|
||||||
|
|
||||||
|
tsType, ok := TypeScriptTypeMap[baseType]
|
||||||
|
if !ok {
|
||||||
|
tsType = "any"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle nullable types
|
||||||
|
if nullable {
|
||||||
|
return tsType
|
||||||
|
}
|
||||||
|
|
||||||
|
return tsType + " | null"
|
||||||
|
}
|
||||||
558
pkg/diff/diff_test.go
Normal file
558
pkg/diff/diff_test.go
Normal file
@@ -0,0 +1,558 @@
|
|||||||
|
package diff
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCompareDatabases(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source *models.Database
|
||||||
|
target *models.Database
|
||||||
|
want func(*DiffResult) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical databases",
|
||||||
|
source: &models.Database{
|
||||||
|
Name: "source",
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
},
|
||||||
|
target: &models.Database{
|
||||||
|
Name: "target",
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
},
|
||||||
|
want: func(r *DiffResult) bool {
|
||||||
|
return r.Source == "source" && r.Target == "target" &&
|
||||||
|
len(r.Schemas.Missing) == 0 && len(r.Schemas.Extra) == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "different schemas",
|
||||||
|
source: &models.Database{
|
||||||
|
Name: "source",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
target: &models.Database{
|
||||||
|
Name: "target",
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
},
|
||||||
|
want: func(r *DiffResult) bool {
|
||||||
|
return len(r.Schemas.Missing) == 1 && r.Schemas.Missing[0].Name == "public"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := CompareDatabases(tt.source, tt.target)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("CompareDatabases() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareColumns(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source map[string]*models.Column
|
||||||
|
target map[string]*models.Column
|
||||||
|
want func(*ColumnDiff) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical columns",
|
||||||
|
source: map[string]*models.Column{},
|
||||||
|
target: map[string]*models.Column{},
|
||||||
|
want: func(d *ColumnDiff) bool {
|
||||||
|
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing column",
|
||||||
|
source: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "integer"},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Column{},
|
||||||
|
want: func(d *ColumnDiff) bool {
|
||||||
|
return len(d.Missing) == 1 && d.Missing[0].Name == "id"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "extra column",
|
||||||
|
source: map[string]*models.Column{},
|
||||||
|
target: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "integer"},
|
||||||
|
},
|
||||||
|
want: func(d *ColumnDiff) bool {
|
||||||
|
return len(d.Extra) == 1 && d.Extra[0].Name == "id"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "modified column type",
|
||||||
|
source: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "integer"},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "bigint"},
|
||||||
|
},
|
||||||
|
want: func(d *ColumnDiff) bool {
|
||||||
|
return len(d.Modified) == 1 && d.Modified[0].Name == "id" &&
|
||||||
|
d.Modified[0].Changes["type"] != nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "modified column nullable",
|
||||||
|
source: map[string]*models.Column{
|
||||||
|
"name": {Name: "name", Type: "text", NotNull: true},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Column{
|
||||||
|
"name": {Name: "name", Type: "text", NotNull: false},
|
||||||
|
},
|
||||||
|
want: func(d *ColumnDiff) bool {
|
||||||
|
return len(d.Modified) == 1 && d.Modified[0].Changes["not_null"] != nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "modified column length",
|
||||||
|
source: map[string]*models.Column{
|
||||||
|
"name": {Name: "name", Type: "varchar", Length: 100},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Column{
|
||||||
|
"name": {Name: "name", Type: "varchar", Length: 255},
|
||||||
|
},
|
||||||
|
want: func(d *ColumnDiff) bool {
|
||||||
|
return len(d.Modified) == 1 && d.Modified[0].Changes["length"] != nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := compareColumns(tt.source, tt.target)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("compareColumns() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareColumnDetails(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source *models.Column
|
||||||
|
target *models.Column
|
||||||
|
want int // number of changes
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical columns",
|
||||||
|
source: &models.Column{Name: "id", Type: "integer"},
|
||||||
|
target: &models.Column{Name: "id", Type: "integer"},
|
||||||
|
want: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "type change",
|
||||||
|
source: &models.Column{Name: "id", Type: "integer"},
|
||||||
|
target: &models.Column{Name: "id", Type: "bigint"},
|
||||||
|
want: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "length change",
|
||||||
|
source: &models.Column{Name: "name", Type: "varchar", Length: 100},
|
||||||
|
target: &models.Column{Name: "name", Type: "varchar", Length: 255},
|
||||||
|
want: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "precision change",
|
||||||
|
source: &models.Column{Name: "price", Type: "numeric", Precision: 10},
|
||||||
|
target: &models.Column{Name: "price", Type: "numeric", Precision: 12},
|
||||||
|
want: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "scale change",
|
||||||
|
source: &models.Column{Name: "price", Type: "numeric", Scale: 2},
|
||||||
|
target: &models.Column{Name: "price", Type: "numeric", Scale: 4},
|
||||||
|
want: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "not null change",
|
||||||
|
source: &models.Column{Name: "name", Type: "text", NotNull: true},
|
||||||
|
target: &models.Column{Name: "name", Type: "text", NotNull: false},
|
||||||
|
want: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "auto increment change",
|
||||||
|
source: &models.Column{Name: "id", Type: "integer", AutoIncrement: true},
|
||||||
|
target: &models.Column{Name: "id", Type: "integer", AutoIncrement: false},
|
||||||
|
want: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "primary key change",
|
||||||
|
source: &models.Column{Name: "id", Type: "integer", IsPrimaryKey: true},
|
||||||
|
target: &models.Column{Name: "id", Type: "integer", IsPrimaryKey: false},
|
||||||
|
want: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple changes",
|
||||||
|
source: &models.Column{Name: "id", Type: "integer", NotNull: true, AutoIncrement: true},
|
||||||
|
target: &models.Column{Name: "id", Type: "bigint", NotNull: false, AutoIncrement: false},
|
||||||
|
want: 3,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := compareColumnDetails(tt.source, tt.target)
|
||||||
|
if len(got) != tt.want {
|
||||||
|
t.Errorf("compareColumnDetails() = %d changes, want %d", len(got), tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareIndexes(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source map[string]*models.Index
|
||||||
|
target map[string]*models.Index
|
||||||
|
want func(*IndexDiff) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical indexes",
|
||||||
|
source: map[string]*models.Index{},
|
||||||
|
target: map[string]*models.Index{},
|
||||||
|
want: func(d *IndexDiff) bool {
|
||||||
|
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing index",
|
||||||
|
source: map[string]*models.Index{
|
||||||
|
"idx_name": {Name: "idx_name", Columns: []string{"name"}},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Index{},
|
||||||
|
want: func(d *IndexDiff) bool {
|
||||||
|
return len(d.Missing) == 1 && d.Missing[0].Name == "idx_name"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "extra index",
|
||||||
|
source: map[string]*models.Index{},
|
||||||
|
target: map[string]*models.Index{
|
||||||
|
"idx_name": {Name: "idx_name", Columns: []string{"name"}},
|
||||||
|
},
|
||||||
|
want: func(d *IndexDiff) bool {
|
||||||
|
return len(d.Extra) == 1 && d.Extra[0].Name == "idx_name"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "modified index uniqueness",
|
||||||
|
source: map[string]*models.Index{
|
||||||
|
"idx_name": {Name: "idx_name", Columns: []string{"name"}, Unique: false},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Index{
|
||||||
|
"idx_name": {Name: "idx_name", Columns: []string{"name"}, Unique: true},
|
||||||
|
},
|
||||||
|
want: func(d *IndexDiff) bool {
|
||||||
|
return len(d.Modified) == 1 && d.Modified[0].Name == "idx_name"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := compareIndexes(tt.source, tt.target)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("compareIndexes() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareConstraints(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source map[string]*models.Constraint
|
||||||
|
target map[string]*models.Constraint
|
||||||
|
want func(*ConstraintDiff) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical constraints",
|
||||||
|
source: map[string]*models.Constraint{},
|
||||||
|
target: map[string]*models.Constraint{},
|
||||||
|
want: func(d *ConstraintDiff) bool {
|
||||||
|
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing constraint",
|
||||||
|
source: map[string]*models.Constraint{
|
||||||
|
"pk_id": {Name: "pk_id", Type: "PRIMARY KEY", Columns: []string{"id"}},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Constraint{},
|
||||||
|
want: func(d *ConstraintDiff) bool {
|
||||||
|
return len(d.Missing) == 1 && d.Missing[0].Name == "pk_id"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "extra constraint",
|
||||||
|
source: map[string]*models.Constraint{},
|
||||||
|
target: map[string]*models.Constraint{
|
||||||
|
"pk_id": {Name: "pk_id", Type: "PRIMARY KEY", Columns: []string{"id"}},
|
||||||
|
},
|
||||||
|
want: func(d *ConstraintDiff) bool {
|
||||||
|
return len(d.Extra) == 1 && d.Extra[0].Name == "pk_id"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := compareConstraints(tt.source, tt.target)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("compareConstraints() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareRelationships(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source map[string]*models.Relationship
|
||||||
|
target map[string]*models.Relationship
|
||||||
|
want func(*RelationshipDiff) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical relationships",
|
||||||
|
source: map[string]*models.Relationship{},
|
||||||
|
target: map[string]*models.Relationship{},
|
||||||
|
want: func(d *RelationshipDiff) bool {
|
||||||
|
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing relationship",
|
||||||
|
source: map[string]*models.Relationship{
|
||||||
|
"fk_user": {Name: "fk_user", Type: "FOREIGN KEY"},
|
||||||
|
},
|
||||||
|
target: map[string]*models.Relationship{},
|
||||||
|
want: func(d *RelationshipDiff) bool {
|
||||||
|
return len(d.Missing) == 1 && d.Missing[0].Name == "fk_user"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "extra relationship",
|
||||||
|
source: map[string]*models.Relationship{},
|
||||||
|
target: map[string]*models.Relationship{
|
||||||
|
"fk_user": {Name: "fk_user", Type: "FOREIGN KEY"},
|
||||||
|
},
|
||||||
|
want: func(d *RelationshipDiff) bool {
|
||||||
|
return len(d.Extra) == 1 && d.Extra[0].Name == "fk_user"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := compareRelationships(tt.source, tt.target)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("compareRelationships() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareTables(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source []*models.Table
|
||||||
|
target []*models.Table
|
||||||
|
want func(*TableDiff) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical tables",
|
||||||
|
source: []*models.Table{},
|
||||||
|
target: []*models.Table{},
|
||||||
|
want: func(d *TableDiff) bool {
|
||||||
|
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing table",
|
||||||
|
source: []*models.Table{
|
||||||
|
{Name: "users", Schema: "public"},
|
||||||
|
},
|
||||||
|
target: []*models.Table{},
|
||||||
|
want: func(d *TableDiff) bool {
|
||||||
|
return len(d.Missing) == 1 && d.Missing[0].Name == "users"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "extra table",
|
||||||
|
source: []*models.Table{},
|
||||||
|
target: []*models.Table{
|
||||||
|
{Name: "users", Schema: "public"},
|
||||||
|
},
|
||||||
|
want: func(d *TableDiff) bool {
|
||||||
|
return len(d.Extra) == 1 && d.Extra[0].Name == "users"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "modified table",
|
||||||
|
source: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "integer"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
target: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "bigint"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: func(d *TableDiff) bool {
|
||||||
|
return len(d.Modified) == 1 && d.Modified[0].Name == "users"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := compareTables(tt.source, tt.target)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("compareTables() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareSchemas(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
source []*models.Schema
|
||||||
|
target []*models.Schema
|
||||||
|
want func(*SchemaDiff) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "identical schemas",
|
||||||
|
source: []*models.Schema{},
|
||||||
|
target: []*models.Schema{},
|
||||||
|
want: func(d *SchemaDiff) bool {
|
||||||
|
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing schema",
|
||||||
|
source: []*models.Schema{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
target: []*models.Schema{},
|
||||||
|
want: func(d *SchemaDiff) bool {
|
||||||
|
return len(d.Missing) == 1 && d.Missing[0].Name == "public"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "extra schema",
|
||||||
|
source: []*models.Schema{},
|
||||||
|
target: []*models.Schema{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
want: func(d *SchemaDiff) bool {
|
||||||
|
return len(d.Extra) == 1 && d.Extra[0].Name == "public"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := compareSchemas(tt.source, tt.target)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("compareSchemas() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsEmpty(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
v interface{}
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{"empty ColumnDiff", &ColumnDiff{Missing: []*models.Column{}, Extra: []*models.Column{}, Modified: []*ColumnChange{}}, true},
|
||||||
|
{"ColumnDiff with missing", &ColumnDiff{Missing: []*models.Column{{Name: "id"}}, Extra: []*models.Column{}, Modified: []*ColumnChange{}}, false},
|
||||||
|
{"ColumnDiff with extra", &ColumnDiff{Missing: []*models.Column{}, Extra: []*models.Column{{Name: "id"}}, Modified: []*ColumnChange{}}, false},
|
||||||
|
{"empty IndexDiff", &IndexDiff{Missing: []*models.Index{}, Extra: []*models.Index{}, Modified: []*IndexChange{}}, true},
|
||||||
|
{"IndexDiff with missing", &IndexDiff{Missing: []*models.Index{{Name: "idx"}}, Extra: []*models.Index{}, Modified: []*IndexChange{}}, false},
|
||||||
|
{"empty TableDiff", &TableDiff{Missing: []*models.Table{}, Extra: []*models.Table{}, Modified: []*TableChange{}}, true},
|
||||||
|
{"TableDiff with extra", &TableDiff{Missing: []*models.Table{}, Extra: []*models.Table{{Name: "users"}}, Modified: []*TableChange{}}, false},
|
||||||
|
{"empty ConstraintDiff", &ConstraintDiff{Missing: []*models.Constraint{}, Extra: []*models.Constraint{}, Modified: []*ConstraintChange{}}, true},
|
||||||
|
{"empty RelationshipDiff", &RelationshipDiff{Missing: []*models.Relationship{}, Extra: []*models.Relationship{}, Modified: []*RelationshipChange{}}, true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := isEmpty(tt.v)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("isEmpty() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestComputeSummary(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
result *DiffResult
|
||||||
|
want func(*Summary) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "empty diff",
|
||||||
|
result: &DiffResult{
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{},
|
||||||
|
Extra: []*models.Schema{},
|
||||||
|
Modified: []*SchemaChange{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: func(s *Summary) bool {
|
||||||
|
return s.Schemas.Missing == 0 && s.Schemas.Extra == 0 && s.Schemas.Modified == 0
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "schemas with differences",
|
||||||
|
result: &DiffResult{
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{{Name: "schema1"}},
|
||||||
|
Extra: []*models.Schema{{Name: "schema2"}, {Name: "schema3"}},
|
||||||
|
Modified: []*SchemaChange{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: func(s *Summary) bool {
|
||||||
|
return s.Schemas.Missing == 1 && s.Schemas.Extra == 2 && s.Schemas.Modified == 1
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := ComputeSummary(tt.result)
|
||||||
|
if !tt.want(got) {
|
||||||
|
t.Errorf("ComputeSummary() result doesn't match expectations")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
43
pkg/diff/doc.go
Normal file
43
pkg/diff/doc.go
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
// Package diff provides utilities for comparing database schemas and identifying differences.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The diff package compares two database models at various granularity levels (database,
|
||||||
|
// schema, table, column) and produces detailed reports of differences including:
|
||||||
|
// - Missing items (present in source but not in target)
|
||||||
|
// - Extra items (present in target but not in source)
|
||||||
|
// - Modified items (present in both but with different properties)
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// Compare two databases and format the output:
|
||||||
|
//
|
||||||
|
// result := diff.CompareDatabases(sourceDB, targetDB)
|
||||||
|
// err := diff.FormatDiff(result, diff.OutputFormatText, os.Stdout)
|
||||||
|
//
|
||||||
|
// # Output Formats
|
||||||
|
//
|
||||||
|
// The package supports multiple output formats:
|
||||||
|
// - OutputFormatText: Human-readable text format
|
||||||
|
// - OutputFormatJSON: Structured JSON output
|
||||||
|
// - OutputFormatYAML: Structured YAML output
|
||||||
|
//
|
||||||
|
// # Comparison Scope
|
||||||
|
//
|
||||||
|
// The comparison covers:
|
||||||
|
// - Schemas: Name, description, and contents
|
||||||
|
// - Tables: Name, description, and all sub-elements
|
||||||
|
// - Columns: Type, nullability, defaults, constraints
|
||||||
|
// - Indexes: Columns, uniqueness, type
|
||||||
|
// - Constraints: Type, columns, references
|
||||||
|
// - Relationships: Type, from/to tables and columns
|
||||||
|
// - Views: Definition and columns
|
||||||
|
// - Sequences: Start value, increment, min/max values
|
||||||
|
//
|
||||||
|
// # Use Cases
|
||||||
|
//
|
||||||
|
// - Schema migration planning
|
||||||
|
// - Database synchronization verification
|
||||||
|
// - Change tracking and auditing
|
||||||
|
// - CI/CD pipeline validation
|
||||||
|
package diff
|
||||||
440
pkg/diff/formatters_test.go
Normal file
440
pkg/diff/formatters_test.go
Normal file
@@ -0,0 +1,440 @@
|
|||||||
|
package diff
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFormatDiff(t *testing.T) {
|
||||||
|
result := &DiffResult{
|
||||||
|
Source: "source_db",
|
||||||
|
Target: "target_db",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{},
|
||||||
|
Extra: []*models.Schema{},
|
||||||
|
Modified: []*SchemaChange{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
format OutputFormat
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{"summary format", FormatSummary, false},
|
||||||
|
{"json format", FormatJSON, false},
|
||||||
|
{"html format", FormatHTML, false},
|
||||||
|
{"invalid format", OutputFormat("invalid"), true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := FormatDiff(result, tt.format, &buf)
|
||||||
|
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("FormatDiff() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !tt.wantErr && buf.Len() == 0 {
|
||||||
|
t.Error("FormatDiff() produced empty output")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatSummary(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
result *DiffResult
|
||||||
|
wantStr []string // strings that should appear in output
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no differences",
|
||||||
|
result: &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{},
|
||||||
|
Extra: []*models.Schema{},
|
||||||
|
Modified: []*SchemaChange{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantStr: []string{"source", "target", "No differences found"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with schema differences",
|
||||||
|
result: &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{{Name: "schema1"}},
|
||||||
|
Extra: []*models.Schema{{Name: "schema2"}},
|
||||||
|
Modified: []*SchemaChange{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantStr: []string{"Schemas:", "Missing: 1", "Extra: 1", "Modified: 1"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with table differences",
|
||||||
|
result: &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Modified: []*SchemaChange{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: &TableDiff{
|
||||||
|
Missing: []*models.Table{{Name: "users"}},
|
||||||
|
Extra: []*models.Table{{Name: "posts"}},
|
||||||
|
Modified: []*TableChange{
|
||||||
|
{Name: "comments", Schema: "public"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantStr: []string{"Tables:", "Missing: 1", "Extra: 1", "Modified: 1"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := formatSummary(tt.result, &buf)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("formatSummary() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
for _, want := range tt.wantStr {
|
||||||
|
if !strings.Contains(output, want) {
|
||||||
|
t.Errorf("formatSummary() output doesn't contain %q\nGot: %s", want, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatJSON(t *testing.T) {
|
||||||
|
result := &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{{Name: "schema1"}},
|
||||||
|
Extra: []*models.Schema{},
|
||||||
|
Modified: []*SchemaChange{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := formatJSON(result, &buf)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("formatJSON() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if output is valid JSON
|
||||||
|
var decoded DiffResult
|
||||||
|
if err := json.Unmarshal(buf.Bytes(), &decoded); err != nil {
|
||||||
|
t.Errorf("formatJSON() produced invalid JSON: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check basic structure
|
||||||
|
if decoded.Source != "source" {
|
||||||
|
t.Errorf("formatJSON() source = %v, want %v", decoded.Source, "source")
|
||||||
|
}
|
||||||
|
if decoded.Target != "target" {
|
||||||
|
t.Errorf("formatJSON() target = %v, want %v", decoded.Target, "target")
|
||||||
|
}
|
||||||
|
if len(decoded.Schemas.Missing) != 1 {
|
||||||
|
t.Errorf("formatJSON() missing schemas = %v, want 1", len(decoded.Schemas.Missing))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatHTML(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
result *DiffResult
|
||||||
|
wantStr []string // HTML elements/content that should appear
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "basic HTML structure",
|
||||||
|
result: &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{},
|
||||||
|
Extra: []*models.Schema{},
|
||||||
|
Modified: []*SchemaChange{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantStr: []string{
|
||||||
|
"<!DOCTYPE html>",
|
||||||
|
"<title>Database Diff Report</title>",
|
||||||
|
"source",
|
||||||
|
"target",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with schema differences",
|
||||||
|
result: &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{{Name: "missing_schema"}},
|
||||||
|
Extra: []*models.Schema{{Name: "extra_schema"}},
|
||||||
|
Modified: []*SchemaChange{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantStr: []string{
|
||||||
|
"<!DOCTYPE html>",
|
||||||
|
"missing_schema",
|
||||||
|
"extra_schema",
|
||||||
|
"MISSING",
|
||||||
|
"EXTRA",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with table modifications",
|
||||||
|
result: &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Modified: []*SchemaChange{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: &TableDiff{
|
||||||
|
Modified: []*TableChange{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: &ColumnDiff{
|
||||||
|
Missing: []*models.Column{{Name: "email", Type: "text"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantStr: []string{
|
||||||
|
"public",
|
||||||
|
"users",
|
||||||
|
"email",
|
||||||
|
"text",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := formatHTML(tt.result, &buf)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("formatHTML() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
for _, want := range tt.wantStr {
|
||||||
|
if !strings.Contains(output, want) {
|
||||||
|
t.Errorf("formatHTML() output doesn't contain %q", want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatSummaryWithColumns(t *testing.T) {
|
||||||
|
result := &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Modified: []*SchemaChange{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: &TableDiff{
|
||||||
|
Modified: []*TableChange{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: &ColumnDiff{
|
||||||
|
Missing: []*models.Column{{Name: "email"}},
|
||||||
|
Extra: []*models.Column{{Name: "phone"}, {Name: "address"}},
|
||||||
|
Modified: []*ColumnChange{
|
||||||
|
{Name: "name"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := formatSummary(result, &buf)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("formatSummary() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
wantStrings := []string{
|
||||||
|
"Columns:",
|
||||||
|
"Missing: 1",
|
||||||
|
"Extra: 2",
|
||||||
|
"Modified: 1",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, want := range wantStrings {
|
||||||
|
if !strings.Contains(output, want) {
|
||||||
|
t.Errorf("formatSummary() output doesn't contain %q\nGot: %s", want, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatSummaryWithIndexes(t *testing.T) {
|
||||||
|
result := &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Modified: []*SchemaChange{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: &TableDiff{
|
||||||
|
Modified: []*TableChange{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Indexes: &IndexDiff{
|
||||||
|
Missing: []*models.Index{{Name: "idx_email"}},
|
||||||
|
Extra: []*models.Index{{Name: "idx_phone"}},
|
||||||
|
Modified: []*IndexChange{{Name: "idx_name"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := formatSummary(result, &buf)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("formatSummary() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
if !strings.Contains(output, "Indexes:") {
|
||||||
|
t.Error("formatSummary() output doesn't contain Indexes section")
|
||||||
|
}
|
||||||
|
if !strings.Contains(output, "Missing: 1") {
|
||||||
|
t.Error("formatSummary() output doesn't contain correct missing count")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatSummaryWithConstraints(t *testing.T) {
|
||||||
|
result := &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Modified: []*SchemaChange{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: &TableDiff{
|
||||||
|
Modified: []*TableChange{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Constraints: &ConstraintDiff{
|
||||||
|
Missing: []*models.Constraint{{Name: "pk_users", Type: "PRIMARY KEY"}},
|
||||||
|
Extra: []*models.Constraint{{Name: "fk_users_roles", Type: "FOREIGN KEY"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := formatSummary(result, &buf)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("formatSummary() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
if !strings.Contains(output, "Constraints:") {
|
||||||
|
t.Error("formatSummary() output doesn't contain Constraints section")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatJSONIndentation(t *testing.T) {
|
||||||
|
result := &DiffResult{
|
||||||
|
Source: "source",
|
||||||
|
Target: "target",
|
||||||
|
Schemas: &SchemaDiff{
|
||||||
|
Missing: []*models.Schema{{Name: "test"}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := formatJSON(result, &buf)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("formatJSON() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that JSON is indented (has newlines and spaces)
|
||||||
|
output := buf.String()
|
||||||
|
if !strings.Contains(output, "\n") {
|
||||||
|
t.Error("formatJSON() should produce indented JSON with newlines")
|
||||||
|
}
|
||||||
|
if !strings.Contains(output, " ") {
|
||||||
|
t.Error("formatJSON() should produce indented JSON with spaces")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOutputFormatConstants(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
format OutputFormat
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{"summary constant", FormatSummary, "summary"},
|
||||||
|
{"json constant", FormatJSON, "json"},
|
||||||
|
{"html constant", FormatHTML, "html"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if string(tt.format) != tt.want {
|
||||||
|
t.Errorf("OutputFormat %v = %v, want %v", tt.name, tt.format, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
# RelSpec Inspector Rules Configuration Example
|
||||||
|
# Copy this file to .relspec-rules.yaml and customize as needed
|
||||||
|
|
||||||
|
version: "1.0"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
# ============================================================================
|
||||||
|
# PRIMARY KEY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate primary key column naming convention
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: warn # enforce|warn|off
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id_" # Regex pattern - PK columns must start with "id_"
|
||||||
|
message: "Primary key columns should start with 'id_'"
|
||||||
|
|
||||||
|
# Validate primary key data types
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: warn
|
||||||
|
function: primary_key_datatype
|
||||||
|
allowed_types:
|
||||||
|
- bigserial
|
||||||
|
- bigint
|
||||||
|
- int
|
||||||
|
- serial
|
||||||
|
- integer
|
||||||
|
- int4
|
||||||
|
- int8
|
||||||
|
message: "Primary keys should use integer types (bigserial, bigint, int, serial)"
|
||||||
|
|
||||||
|
# Check if primary keys have auto-increment enabled
|
||||||
|
primary_key_auto_increment:
|
||||||
|
enabled: off # Often disabled as not all PKs need auto-increment
|
||||||
|
function: primary_key_auto_increment
|
||||||
|
require_auto_increment: true
|
||||||
|
message: "Primary key without auto-increment detected"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# FOREIGN KEY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate foreign key column naming convention
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^rid_" # FK columns must start with "rid_" (referenced id)
|
||||||
|
message: "Foreign key columns should start with 'rid_'"
|
||||||
|
|
||||||
|
# Validate foreign key constraint naming convention
|
||||||
|
foreign_key_constraint_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_constraint_naming
|
||||||
|
pattern: "^fk_" # FK constraints must start with "fk_"
|
||||||
|
message: "Foreign key constraint names should start with 'fk_'"
|
||||||
|
|
||||||
|
# Ensure foreign key columns have indexes for performance
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_index
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign key columns should have indexes for optimal performance"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# NAMING CONVENTION RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate table naming follows snake_case convention
|
||||||
|
table_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr # Generic regex validator for table names
|
||||||
|
case: lowercase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||||
|
message: "Table names should be lowercase with underscores (snake_case)"
|
||||||
|
|
||||||
|
# Validate column naming follows snake_case convention
|
||||||
|
column_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: column_regexpr # Generic regex validator for column names
|
||||||
|
case: lowercase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||||
|
message: "Column names should be lowercase with underscores (snake_case)"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# LENGTH RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Limit table name length (PostgreSQL max is 63, but 64 is common practice)
|
||||||
|
table_name_length:
|
||||||
|
enabled: warn
|
||||||
|
function: table_name_length
|
||||||
|
max_length: 64
|
||||||
|
message: "Table name exceeds recommended maximum length of 64 characters"
|
||||||
|
|
||||||
|
# Limit column name length
|
||||||
|
column_name_length:
|
||||||
|
enabled: warn
|
||||||
|
function: column_name_length
|
||||||
|
max_length: 64
|
||||||
|
message: "Column name exceeds recommended maximum length of 64 characters"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# RESERVED KEYWORDS
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Warn about using SQL reserved keywords as identifiers
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: warn
|
||||||
|
function: reserved_words
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Using SQL reserved keywords as identifiers can cause issues"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# SCHEMA INTEGRITY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Ensure all tables have primary keys
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: warn
|
||||||
|
function: have_primary_key
|
||||||
|
message: "Table is missing a primary key"
|
||||||
|
|
||||||
|
# Detect orphaned foreign keys (referencing non-existent tables)
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: warn
|
||||||
|
function: orphaned_foreign_key
|
||||||
|
message: "Foreign key references a non-existent table"
|
||||||
|
|
||||||
|
# Detect circular foreign key dependencies
|
||||||
|
circular_dependency:
|
||||||
|
enabled: warn
|
||||||
|
function: circular_dependency
|
||||||
|
message: "Circular foreign key dependency detected"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# RULE CONFIGURATION NOTES
|
||||||
|
# ============================================================================
|
||||||
|
#
|
||||||
|
# enabled: Controls rule enforcement level
|
||||||
|
# - enforce: Violations are errors (exit code 1)
|
||||||
|
# - warn: Violations are warnings (exit code 0)
|
||||||
|
# - off: Rule is disabled
|
||||||
|
#
|
||||||
|
# function: The validation function to execute
|
||||||
|
# - Must match a registered validator function
|
||||||
|
# - Generic functions like table_regexpr and column_regexpr can be reused
|
||||||
|
#
|
||||||
|
# pattern: Regular expression for pattern matching
|
||||||
|
# - Used by naming validators
|
||||||
|
# - Must be valid Go regex syntax
|
||||||
|
#
|
||||||
|
# message: Custom message shown when rule is violated
|
||||||
|
# - Should be clear and actionable
|
||||||
|
# - Explains what the violation is and how to fix it
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
|
# CUSTOM RULES EXAMPLES
|
||||||
|
# ============================================================================
|
||||||
|
#
|
||||||
|
# You can add custom rules using the generic validator functions:
|
||||||
|
#
|
||||||
|
# # Example: Ensure table names don't contain numbers
|
||||||
|
# table_no_numbers:
|
||||||
|
# enabled: warn
|
||||||
|
# function: table_regexpr
|
||||||
|
# pattern: "^[a-z_]+$"
|
||||||
|
# message: "Table names should not contain numbers"
|
||||||
|
#
|
||||||
|
# # Example: Audit columns must end with _audit
|
||||||
|
# audit_column_suffix:
|
||||||
|
# enabled: enforce
|
||||||
|
# function: column_regexpr
|
||||||
|
# pattern: ".*_audit$"
|
||||||
|
# message: "Audit columns must end with '_audit'"
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
472
pkg/inspector/PLAN.md
Normal file
472
pkg/inspector/PLAN.md
Normal file
@@ -0,0 +1,472 @@
|
|||||||
|
# Inspector Feature Implementation Plan
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Add a model inspection feature that validates database schemas against configurable rules. The inspector will read any supported format, apply validation rules from a YAML config, and output a report in markdown or JSON format.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Core Components
|
||||||
|
|
||||||
|
1. **CLI Command** (`cmd/relspec/inspect.go`)
|
||||||
|
- New subcommand: `relspec inspect`
|
||||||
|
- Flags:
|
||||||
|
- `--from` (required): Input format (dbml, pgsql, json, etc.)
|
||||||
|
- `--from-path`: File path for file-based formats
|
||||||
|
- `--from-conn`: Connection string for database formats
|
||||||
|
- `--rules` (optional): Path to rules YAML file (default: `.relspec-rules.yaml`)
|
||||||
|
- `--output-format`: Report format (markdown, json) (default: markdown)
|
||||||
|
- `--output`: Output file path (default: stdout)
|
||||||
|
- `--schema`: Schema name filter (optional)
|
||||||
|
|
||||||
|
2. **Inspector Package** (`pkg/inspector/`)
|
||||||
|
- `inspector.go`: Main inspector logic
|
||||||
|
- `rules.go`: Rule definitions and configuration
|
||||||
|
- `validators.go`: Individual validation rule implementations
|
||||||
|
- `report.go`: Report generation (markdown, JSON)
|
||||||
|
- `config.go`: YAML config loading and parsing
|
||||||
|
|
||||||
|
### Data Flow
|
||||||
|
```
|
||||||
|
Input Format → Reader → Database Model → Inspector → Validation Results → Report Formatter → Output
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rules Configuration Structure
|
||||||
|
|
||||||
|
### YAML Schema (`rules.yaml`)
|
||||||
|
```yaml
|
||||||
|
version: "1.0"
|
||||||
|
rules:
|
||||||
|
# Primary Key Rules
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^id_" # regex pattern
|
||||||
|
message: "Primary key columns must start with 'id_'"
|
||||||
|
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
allowed_types: ["bigserial", "bigint", "int", "serial", "integer"]
|
||||||
|
message: "Primary keys must use approved integer types"
|
||||||
|
|
||||||
|
primary_key_auto_increment:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
require_auto_increment: true|false
|
||||||
|
message: "Primary keys without auto-increment detected"
|
||||||
|
|
||||||
|
# Foreign Key Rules
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^rid_"
|
||||||
|
message: "Foreign key columns must start with 'rid_'"
|
||||||
|
|
||||||
|
foreign_key_constraint_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^fk_"
|
||||||
|
message: "Foreign key constraint names must start with 'fk_'"
|
||||||
|
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign keys should have indexes"
|
||||||
|
|
||||||
|
# Naming Convention Rules
|
||||||
|
table_naming_case:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
case: "lowercase" # lowercase, uppercase, snake_case, camelCase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names must be lowercase with underscores"
|
||||||
|
|
||||||
|
column_naming_case:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
case: "lowercase"
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Column names must be lowercase with underscores"
|
||||||
|
|
||||||
|
# Length Rules
|
||||||
|
table_name_length:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
max_length: 64
|
||||||
|
message: "Table name exceeds maximum length"
|
||||||
|
|
||||||
|
column_name_length:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
max_length: 64
|
||||||
|
message: "Column name exceeds maximum length"
|
||||||
|
|
||||||
|
# Reserved Keywords
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Using reserved SQL keywords"
|
||||||
|
|
||||||
|
# Schema Integrity Rules
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Table missing primary key"
|
||||||
|
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Foreign key references non-existent table"
|
||||||
|
|
||||||
|
circular_dependency:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Circular foreign key dependency detected"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rule Levels
|
||||||
|
- **enforce**: Violations are errors (exit code 1)
|
||||||
|
- **warn**: Violations are warnings (exit code 0)
|
||||||
|
- **off**: Rule disabled
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### 1. Inspector Core (`pkg/inspector/inspector.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Inspector struct {
|
||||||
|
config *Config
|
||||||
|
db *models.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
type ValidationResult struct {
|
||||||
|
RuleName string
|
||||||
|
Level string // "error" or "warning"
|
||||||
|
Message string
|
||||||
|
Location string // e.g., "schema.table.column"
|
||||||
|
Context map[string]interface{}
|
||||||
|
Passed bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type InspectorReport struct {
|
||||||
|
Summary ReportSummary
|
||||||
|
Violations []ValidationResult
|
||||||
|
GeneratedAt time.Time
|
||||||
|
Database string
|
||||||
|
SourceFormat string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReportSummary struct {
|
||||||
|
TotalRules int
|
||||||
|
RulesChecked int
|
||||||
|
RulesSkipped int
|
||||||
|
ErrorCount int
|
||||||
|
WarningCount int
|
||||||
|
PassedCount int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewInspector(db *models.Database, config *Config) *Inspector
|
||||||
|
func (i *Inspector) Inspect() (*InspectorReport, error)
|
||||||
|
func (i *Inspector) validateDatabase() []ValidationResult
|
||||||
|
func (i *Inspector) validateSchema(schema *models.Schema) []ValidationResult
|
||||||
|
func (i *Inspector) validateTable(table *models.Table) []ValidationResult
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Rule Definitions (`pkg/inspector/rules.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Config struct {
|
||||||
|
Version string
|
||||||
|
Rules map[string]Rule
|
||||||
|
}
|
||||||
|
|
||||||
|
type Rule struct {
|
||||||
|
Enabled string // "enforce", "warn", "off"
|
||||||
|
Message string
|
||||||
|
Pattern string
|
||||||
|
AllowedTypes []string
|
||||||
|
MaxLength int
|
||||||
|
Case string
|
||||||
|
RequireIndex bool
|
||||||
|
CheckTables bool
|
||||||
|
CheckColumns bool
|
||||||
|
// ... rule-specific fields
|
||||||
|
}
|
||||||
|
|
||||||
|
type RuleValidator interface {
|
||||||
|
Name() string
|
||||||
|
Validate(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadConfig(path string) (*Config, error)
|
||||||
|
func GetDefaultConfig() *Config
|
||||||
|
```
|
||||||
|
|
||||||
|
**Configuration Loading Behavior:**
|
||||||
|
- If `--rules` flag is provided but file not found: Use default configuration (don't error)
|
||||||
|
- If file exists but is invalid YAML: Return error
|
||||||
|
- Default configuration has sensible rules enabled at "warn" level
|
||||||
|
- Users can override by creating their own `.relspec-rules.yaml` file
|
||||||
|
|
||||||
|
### 3. Validators (`pkg/inspector/validators.go`)
|
||||||
|
|
||||||
|
Each validator implements rule logic:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Primary Key Validators
|
||||||
|
func validatePrimaryKeyNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validatePrimaryKeyDatatype(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Foreign Key Validators
|
||||||
|
func validateForeignKeyColumnNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateForeignKeyIndex(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Naming Convention Validators
|
||||||
|
func validateTableNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateColumnNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Length Validators
|
||||||
|
func validateTableNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateColumnNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Reserved Keywords Validator
|
||||||
|
func validateReservedKeywords(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Integrity Validators
|
||||||
|
func validateMissingPrimaryKey(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateOrphanedForeignKey(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateCircularDependency(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Registry of all validators
|
||||||
|
var validators = map[string]RuleValidator{
|
||||||
|
"primary_key_naming": primaryKeyNamingValidator{},
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Report Formatting (`pkg/inspector/report.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type ReportFormatter interface {
|
||||||
|
Format(report *InspectorReport) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type MarkdownFormatter struct {
|
||||||
|
UseColors bool // ANSI colors for terminal output
|
||||||
|
}
|
||||||
|
type JSONFormatter struct{}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error)
|
||||||
|
func (f *JSONFormatter) Format(report *InspectorReport) (string, error)
|
||||||
|
|
||||||
|
// Helper to detect if output is a TTY (terminal)
|
||||||
|
func isTerminal(w io.Writer) bool
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output Behavior:**
|
||||||
|
- Markdown format will use ANSI color codes when outputting to a terminal (TTY)
|
||||||
|
- When piped or redirected to a file, plain markdown without colors
|
||||||
|
- Colors: Red for errors, Yellow for warnings, Green for passed checks
|
||||||
|
|
||||||
|
**Markdown Format Example:**
|
||||||
|
```markdown
|
||||||
|
# RelSpec Inspector Report
|
||||||
|
|
||||||
|
**Database:** my_database
|
||||||
|
**Source Format:** pgsql
|
||||||
|
**Generated:** 2025-12-31 10:30:45
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
- Rules Checked: 12
|
||||||
|
- Errors: 3
|
||||||
|
- Warnings: 5
|
||||||
|
- Passed: 4
|
||||||
|
|
||||||
|
## Violations
|
||||||
|
|
||||||
|
### Errors (3)
|
||||||
|
|
||||||
|
#### primary_key_naming
|
||||||
|
**Table:** users, **Column:** user_id
|
||||||
|
Primary key columns must start with 'id_'
|
||||||
|
|
||||||
|
#### table_name_length
|
||||||
|
**Table:** user_authentication_sessions_with_metadata
|
||||||
|
Table name exceeds maximum length (64 characters)
|
||||||
|
|
||||||
|
### Warnings (5)
|
||||||
|
|
||||||
|
#### foreign_key_index
|
||||||
|
**Table:** orders, **Column:** customer_id
|
||||||
|
Foreign keys should have indexes
|
||||||
|
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
**JSON Format Example:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"summary": {
|
||||||
|
"total_rules": 12,
|
||||||
|
"rules_checked": 12,
|
||||||
|
"error_count": 3,
|
||||||
|
"warning_count": 5,
|
||||||
|
"passed_count": 4
|
||||||
|
},
|
||||||
|
"violations": [
|
||||||
|
{
|
||||||
|
"rule_name": "primary_key_naming",
|
||||||
|
"level": "error",
|
||||||
|
"message": "Primary key columns must start with 'id_'",
|
||||||
|
"location": "public.users.user_id",
|
||||||
|
"context": {
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "user_id",
|
||||||
|
"current_name": "user_id",
|
||||||
|
"expected_pattern": "^id_"
|
||||||
|
},
|
||||||
|
"passed": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"generated_at": "2025-12-31T10:30:45Z",
|
||||||
|
"database": "my_database",
|
||||||
|
"source_format": "pgsql"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. CLI Command (`cmd/relspec/inspect.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
var inspectCmd = &cobra.Command{
|
||||||
|
Use: "inspect",
|
||||||
|
Short: "Inspect and validate database schemas against rules",
|
||||||
|
Long: `Read database schemas from various formats and validate against configurable rules.`,
|
||||||
|
RunE: runInspect,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
inspectCmd.Flags().String("from", "", "Input format (dbml, pgsql, json, etc.)")
|
||||||
|
inspectCmd.Flags().String("from-path", "", "Input file path")
|
||||||
|
inspectCmd.Flags().String("from-conn", "", "Database connection string")
|
||||||
|
inspectCmd.Flags().String("rules", ".relspec-rules.yaml", "Rules configuration file")
|
||||||
|
inspectCmd.Flags().String("output-format", "markdown", "Output format (markdown, json)")
|
||||||
|
inspectCmd.Flags().String("output", "", "Output file (default: stdout)")
|
||||||
|
inspectCmd.Flags().String("schema", "", "Filter by schema name")
|
||||||
|
inspectCmd.MarkFlagRequired("from")
|
||||||
|
}
|
||||||
|
|
||||||
|
func runInspect(cmd *cobra.Command, args []string) error {
|
||||||
|
// 1. Parse flags
|
||||||
|
// 2. Create reader (reuse pattern from convert.go)
|
||||||
|
// 3. Read database
|
||||||
|
// 4. Load rules config (use defaults if file not found)
|
||||||
|
// 5. Create inspector
|
||||||
|
// 6. Run inspection
|
||||||
|
// 7. Detect if output is terminal (for color support)
|
||||||
|
// 8. Format report (with/without ANSI colors)
|
||||||
|
// 9. Write output
|
||||||
|
// 10. Exit with appropriate code (0 if no errors, 1 if errors)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Phases
|
||||||
|
|
||||||
|
### Phase 1: Core Infrastructure
|
||||||
|
1. Create `pkg/inspector/` package structure
|
||||||
|
2. Implement `Config` and YAML loading
|
||||||
|
3. Implement `Inspector` core with basic validation framework
|
||||||
|
4. Create CLI command skeleton
|
||||||
|
|
||||||
|
### Phase 2: Basic Validators
|
||||||
|
1. Implement naming convention validators
|
||||||
|
- Primary key naming
|
||||||
|
- Foreign key column naming
|
||||||
|
- Foreign key constraint naming
|
||||||
|
- Table/column case validation
|
||||||
|
2. Implement length validators
|
||||||
|
3. Implement reserved keywords validator (leverage `pkg/pgsql/keywords.go`)
|
||||||
|
|
||||||
|
### Phase 3: Advanced Validators
|
||||||
|
1. Implement datatype validators
|
||||||
|
2. Implement integrity validators (missing PK, orphaned FK, circular deps)
|
||||||
|
3. Implement foreign key index validator
|
||||||
|
|
||||||
|
### Phase 4: Reporting
|
||||||
|
1. Implement `InspectorReport` structure
|
||||||
|
2. Implement markdown formatter
|
||||||
|
3. Implement JSON formatter
|
||||||
|
4. Add summary statistics
|
||||||
|
|
||||||
|
### Phase 5: CLI Integration
|
||||||
|
1. Wire up CLI command with flags
|
||||||
|
2. Integrate reader factory (from convert.go pattern)
|
||||||
|
3. Add output file handling
|
||||||
|
4. Add exit code logic
|
||||||
|
5. Add progress reporting
|
||||||
|
|
||||||
|
### Phase 6: Testing & Documentation
|
||||||
|
1. Unit tests for validators
|
||||||
|
2. Integration tests with sample schemas
|
||||||
|
3. Test with all reader formats
|
||||||
|
4. Update README with inspector documentation
|
||||||
|
5. Create example rules configuration file
|
||||||
|
|
||||||
|
## Files to Create
|
||||||
|
|
||||||
|
1. `pkg/inspector/inspector.go` - Core inspector logic
|
||||||
|
2. `pkg/inspector/rules.go` - Rule definitions and config loading
|
||||||
|
3. `pkg/inspector/validators.go` - Validation implementations
|
||||||
|
4. `pkg/inspector/report.go` - Report formatting
|
||||||
|
5. `pkg/inspector/config.go` - Config utilities
|
||||||
|
6. `cmd/relspec/inspect.go` - CLI command
|
||||||
|
7. `.relspec-rules.yaml.example` - Example configuration
|
||||||
|
8. `pkg/inspector/inspector_test.go` - Tests
|
||||||
|
|
||||||
|
## Files to Modify
|
||||||
|
|
||||||
|
1. `cmd/relspec/root.go` - Register inspect command
|
||||||
|
2. `README.md` - Add inspector documentation (if requested)
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgresql://localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Output JSON report to file
|
||||||
|
relspec inspect --from json --from-path db.json --output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
|
||||||
|
# Use custom rules location
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules /path/to/rules.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Exit Codes
|
||||||
|
- 0: Success (no errors, only warnings or all passed)
|
||||||
|
- 1: Validation errors found (rules with level="enforce" failed)
|
||||||
|
- 2: Runtime error (invalid config, reader error, etc.)
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
- Existing: `pkg/models`, `pkg/readers`, `pkg/pgsql/keywords.go`
|
||||||
|
- New: `gopkg.in/yaml.v3` for YAML parsing (may already be in go.mod)
|
||||||
|
|
||||||
|
## Design Decisions
|
||||||
|
|
||||||
|
### Confirmed Choices (from user)
|
||||||
|
1. **Example config file**: Create `.relspec-rules.yaml.example` in repository root with documented examples
|
||||||
|
2. **Missing rules file**: Use sensible built-in defaults (don't error), all rules at "warn" level by default
|
||||||
|
3. **Terminal output**: ANSI colors (red/yellow/green) when outputting to terminal, plain markdown when piped/redirected
|
||||||
|
4. **Foreign key naming**: Separate configurable rules for both FK column names and FK constraint names
|
||||||
|
|
||||||
|
### Architecture Rationale
|
||||||
|
1. **Why YAML for config?**: Human-readable, supports comments, standard for config files
|
||||||
|
2. **Why three levels (enforce/warn/off)?**: Flexibility for gradual adoption, different contexts
|
||||||
|
3. **Why markdown + JSON?**: Markdown for human review, JSON for tooling integration
|
||||||
|
4. **Why pkg/inspector?**: Follows existing package structure, separates concerns
|
||||||
|
5. **Reuse readers**: Leverage existing reader infrastructure, supports all formats automatically
|
||||||
|
6. **Exit codes**: Follow standard conventions (0=success, 1=validation fail, 2=error)
|
||||||
|
|
||||||
|
## Future Enhancements (Not in Scope)
|
||||||
|
- Auto-fix mode (automatically rename columns, etc.)
|
||||||
|
- Custom rule plugins
|
||||||
|
- HTML report format
|
||||||
|
- Rule templates for different databases
|
||||||
|
- CI/CD integration examples
|
||||||
|
- Performance metrics in report
|
||||||
485
pkg/inspector/README.md
Normal file
485
pkg/inspector/README.md
Normal file
@@ -0,0 +1,485 @@
|
|||||||
|
# RelSpec Inspector
|
||||||
|
|
||||||
|
> Database Schema Validation and Linting Tool
|
||||||
|
|
||||||
|
The RelSpec Inspector validates database schemas against configurable rules, helping you maintain consistency, enforce naming conventions, and catch common schema design issues across your database models.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Inspector reads database schemas from any supported RelSpec format and validates them against a set of configurable rules. It generates detailed reports highlighting violations, warnings, and passed checks.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Flexible Rule Configuration**: YAML-based rules with three severity levels (enforce, warn, off)
|
||||||
|
- **Generic Validators**: Reusable regex-based validators for custom naming conventions
|
||||||
|
- **Multiple Input Formats**: Works with all RelSpec readers (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||||
|
- **Multiple Output Formats**: Markdown with ANSI colors for terminals, JSON for tooling integration
|
||||||
|
- **Smart Defaults**: Works out-of-the-box with sensible default rules
|
||||||
|
- **Terminal-Aware**: Automatic color support detection for improved readability
|
||||||
|
- **Exit Codes**: Proper exit codes for CI/CD integration
|
||||||
|
|
||||||
|
[Todo List of Features](./TODO.md)
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml
|
||||||
|
|
||||||
|
# Inspect with custom rules
|
||||||
|
relspec inspect --from json --from-path db.json --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Output JSON report to file
|
||||||
|
relspec inspect --from pgsql --from-conn "..." \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
Create a `.relspec-rules.yaml` file to customize validation rules. If the file doesn't exist, the inspector uses sensible defaults.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
version: "1.0"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
# Primary key columns must start with "id_"
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce # enforce|warn|off
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id_"
|
||||||
|
message: "Primary key columns must start with 'id_'"
|
||||||
|
|
||||||
|
# Foreign key columns must start with "rid_"
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^rid_"
|
||||||
|
message: "Foreign key columns should start with 'rid_'"
|
||||||
|
|
||||||
|
# Table names must be lowercase snake_case
|
||||||
|
table_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr # Generic regex validator
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names should be lowercase with underscores"
|
||||||
|
|
||||||
|
# Ensure all tables have primary keys
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: warn
|
||||||
|
function: have_primary_key
|
||||||
|
message: "Table is missing a primary key"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Built-in Validation Rules
|
||||||
|
|
||||||
|
### Primary Key Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `primary_key_naming` | `primary_key_naming` | Validate PK column names against regex pattern |
|
||||||
|
| `primary_key_datatype` | `primary_key_datatype` | Enforce approved PK data types (bigint, serial, etc.) |
|
||||||
|
| `primary_key_auto_increment` | `primary_key_auto_increment` | Check if PKs have auto-increment enabled |
|
||||||
|
|
||||||
|
### Foreign Key Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `foreign_key_column_naming` | `foreign_key_column_naming` | Validate FK column names against regex pattern |
|
||||||
|
| `foreign_key_constraint_naming` | `foreign_key_constraint_naming` | Validate FK constraint names against regex pattern |
|
||||||
|
| `foreign_key_index` | `foreign_key_index` | Ensure FK columns have indexes for performance |
|
||||||
|
|
||||||
|
### Naming Convention Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `table_naming_case` | `table_regexpr` | Generic regex validator for table names |
|
||||||
|
| `column_naming_case` | `column_regexpr` | Generic regex validator for column names |
|
||||||
|
|
||||||
|
### Length Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `table_name_length` | `table_name_length` | Limit table name length (default: 64 chars) |
|
||||||
|
| `column_name_length` | `column_name_length` | Limit column name length (default: 64 chars) |
|
||||||
|
|
||||||
|
### Reserved Keywords
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `reserved_keywords` | `reserved_words` | Detect use of SQL reserved keywords as identifiers |
|
||||||
|
|
||||||
|
### Schema Integrity Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `missing_primary_key` | `have_primary_key` | Ensure tables have primary keys |
|
||||||
|
| `orphaned_foreign_key` | `orphaned_foreign_key` | Detect FKs referencing non-existent tables |
|
||||||
|
| `circular_dependency` | `circular_dependency` | Detect circular FK dependencies |
|
||||||
|
|
||||||
|
## Rule Configuration
|
||||||
|
|
||||||
|
### Severity Levels
|
||||||
|
|
||||||
|
Rules support three severity levels:
|
||||||
|
|
||||||
|
- **`enforce`**: Violations are errors (exit code 1)
|
||||||
|
- **`warn`**: Violations are warnings (exit code 0)
|
||||||
|
- **`off`**: Rule is disabled
|
||||||
|
|
||||||
|
### Rule Structure
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
rule_name:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
function: validator_function_name
|
||||||
|
message: "Custom message shown on violation"
|
||||||
|
# Rule-specific parameters
|
||||||
|
pattern: "^regex_pattern$" # For pattern-based validators
|
||||||
|
allowed_types: [type1, type2] # For type validators
|
||||||
|
max_length: 64 # For length validators
|
||||||
|
check_tables: true # For keyword validator
|
||||||
|
check_columns: true # For keyword validator
|
||||||
|
require_index: true # For FK index validator
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generic Validators
|
||||||
|
|
||||||
|
The inspector provides generic validator functions that can be reused for custom rules:
|
||||||
|
|
||||||
|
### `table_regexpr`
|
||||||
|
|
||||||
|
Generic regex validator for table names. Create custom table naming rules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Example: Ensure table names don't contain numbers
|
||||||
|
table_no_numbers:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^[a-z_]+$"
|
||||||
|
message: "Table names should not contain numbers"
|
||||||
|
|
||||||
|
# Example: Tables must start with "tbl_"
|
||||||
|
table_prefix:
|
||||||
|
enabled: enforce
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^tbl_[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names must start with 'tbl_'"
|
||||||
|
```
|
||||||
|
|
||||||
|
### `column_regexpr`
|
||||||
|
|
||||||
|
Generic regex validator for column names. Create custom column naming rules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Example: Audit columns must end with "_audit"
|
||||||
|
audit_column_suffix:
|
||||||
|
enabled: enforce
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: ".*_audit$"
|
||||||
|
message: "Audit columns must end with '_audit'"
|
||||||
|
|
||||||
|
# Example: Timestamp columns must end with "_at"
|
||||||
|
timestamp_suffix:
|
||||||
|
enabled: warn
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: ".*(created|updated|deleted)_at$"
|
||||||
|
message: "Timestamp columns should end with '_at'"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Formats
|
||||||
|
|
||||||
|
### Markdown (Default)
|
||||||
|
|
||||||
|
Human-readable markdown report with ANSI colors when outputting to a terminal:
|
||||||
|
|
||||||
|
```
|
||||||
|
# RelSpec Inspector Report
|
||||||
|
|
||||||
|
**Database:** my_database
|
||||||
|
**Source Format:** pgsql
|
||||||
|
**Generated:** 2025-12-31T10:30:45Z
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
- Rules Checked: 13
|
||||||
|
- Errors: 2
|
||||||
|
- Warnings: 5
|
||||||
|
- Passed: 120
|
||||||
|
|
||||||
|
## Violations
|
||||||
|
|
||||||
|
### Errors (2)
|
||||||
|
|
||||||
|
#### primary_key_naming
|
||||||
|
**Location:** public.users.user_id
|
||||||
|
**Message:** Primary key columns must start with 'id_'
|
||||||
|
**Details:** expected_pattern=^id_
|
||||||
|
|
||||||
|
### Warnings (5)
|
||||||
|
|
||||||
|
#### foreign_key_index
|
||||||
|
**Location:** public.orders.customer_id
|
||||||
|
**Message:** Foreign key columns should have indexes
|
||||||
|
**Details:** has_index=false
|
||||||
|
```
|
||||||
|
|
||||||
|
### JSON
|
||||||
|
|
||||||
|
Structured JSON output for tooling integration:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"summary": {
|
||||||
|
"total_rules": 13,
|
||||||
|
"rules_checked": 13,
|
||||||
|
"error_count": 2,
|
||||||
|
"warning_count": 5,
|
||||||
|
"passed_count": 120
|
||||||
|
},
|
||||||
|
"violations": [
|
||||||
|
{
|
||||||
|
"rule_name": "primary_key_naming",
|
||||||
|
"level": "error",
|
||||||
|
"message": "Primary key columns must start with 'id_'",
|
||||||
|
"location": "public.users.user_id",
|
||||||
|
"context": {
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "user_id",
|
||||||
|
"expected_pattern": "^id_"
|
||||||
|
},
|
||||||
|
"passed": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"generated_at": "2025-12-31T10:30:45Z",
|
||||||
|
"database": "my_database",
|
||||||
|
"source_format": "pgsql"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Reference
|
||||||
|
|
||||||
|
### Flags
|
||||||
|
|
||||||
|
| Flag | Type | Description |
|
||||||
|
|------|------|-------------|
|
||||||
|
| `--from` | string | **Required**. Source format (dbml, pgsql, json, yaml, gorm, etc.) |
|
||||||
|
| `--from-path` | string | Source file path (for file-based formats) |
|
||||||
|
| `--from-conn` | string | Connection string (for database formats) |
|
||||||
|
| `--rules` | string | Path to rules YAML file (default: `.relspec-rules.yaml`) |
|
||||||
|
| `--output-format` | string | Output format: `markdown` or `json` (default: `markdown`) |
|
||||||
|
| `--output` | string | Output file path (default: stdout) |
|
||||||
|
| `--schema` | string | Filter to specific schema by name |
|
||||||
|
|
||||||
|
### Exit Codes
|
||||||
|
|
||||||
|
| Code | Meaning |
|
||||||
|
|------|---------|
|
||||||
|
| 0 | Success (no errors, only warnings or all passed) |
|
||||||
|
| 1 | Validation errors found (rules with `enabled: enforce` failed) |
|
||||||
|
| 2 | Runtime error (invalid config, reader error, etc.) |
|
||||||
|
|
||||||
|
## CI/CD Integration
|
||||||
|
|
||||||
|
### GitHub Actions Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: Schema Validation
|
||||||
|
|
||||||
|
on: [pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install RelSpec
|
||||||
|
run: go install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
||||||
|
|
||||||
|
- name: Validate Schema
|
||||||
|
run: |
|
||||||
|
relspec inspect \
|
||||||
|
--from dbml \
|
||||||
|
--from-path schema.dbml \
|
||||||
|
--rules .relspec-rules.yaml \
|
||||||
|
--output-format json \
|
||||||
|
--output validation-report.json
|
||||||
|
|
||||||
|
- name: Upload Report
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: validation-report
|
||||||
|
path: validation-report.json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pre-commit Hook Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# .git/hooks/pre-commit
|
||||||
|
|
||||||
|
echo "Running schema validation..."
|
||||||
|
|
||||||
|
relspec inspect \
|
||||||
|
--from dbml \
|
||||||
|
--from-path schema.dbml \
|
||||||
|
--rules .relspec-rules.yaml
|
||||||
|
|
||||||
|
exit $?
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Configuration File
|
||||||
|
|
||||||
|
See [`.relspec-rules.yaml.example`](../../.relspec-rules.yaml.example) for a fully documented example configuration with all available rules and customization options.
|
||||||
|
|
||||||
|
## Common Use Cases
|
||||||
|
|
||||||
|
### Enforce Naming Standards
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Ensure consistent naming across your schema
|
||||||
|
table_naming_case:
|
||||||
|
enabled: enforce
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Tables must use snake_case"
|
||||||
|
|
||||||
|
column_naming_case:
|
||||||
|
enabled: enforce
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Columns must use snake_case"
|
||||||
|
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id$"
|
||||||
|
message: "Primary key must be named 'id'"
|
||||||
|
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: enforce
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^[a-z]+_id$"
|
||||||
|
message: "Foreign keys must end with '_id'"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Performance Best Practices
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Ensure optimal database performance
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: enforce
|
||||||
|
function: foreign_key_index
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign keys must have indexes"
|
||||||
|
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: enforce
|
||||||
|
function: primary_key_datatype
|
||||||
|
allowed_types: [bigserial, bigint]
|
||||||
|
message: "Use bigserial or bigint for primary keys"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Integrity
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Prevent common schema issues
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: enforce
|
||||||
|
function: have_primary_key
|
||||||
|
message: "All tables must have a primary key"
|
||||||
|
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: enforce
|
||||||
|
function: orphaned_foreign_key
|
||||||
|
message: "Foreign keys must reference existing tables"
|
||||||
|
|
||||||
|
circular_dependency:
|
||||||
|
enabled: warn
|
||||||
|
function: circular_dependency
|
||||||
|
message: "Circular dependencies detected"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Avoid Reserved Keywords
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: warn
|
||||||
|
function: reserved_words
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Avoid using SQL reserved keywords"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Programmatic Usage
|
||||||
|
|
||||||
|
You can use the inspector programmatically in your Go code:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Load your database model
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "my_database",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
// ... your schema
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load rules configuration
|
||||||
|
config, err := inspector.LoadConfig(".relspec-rules.yaml")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create and run inspector
|
||||||
|
insp := inspector.NewInspector(db, config)
|
||||||
|
report, err := insp.Inspect()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate report
|
||||||
|
formatter := inspector.NewMarkdownFormatter(os.Stdout)
|
||||||
|
output, err := formatter.Format(report)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(output)
|
||||||
|
|
||||||
|
// Check for errors
|
||||||
|
if report.HasErrors() {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions are welcome! To add a new validator:
|
||||||
|
|
||||||
|
1. Add the validator function to `validators.go`
|
||||||
|
2. Register it in `inspector.go` `getValidator()` function
|
||||||
|
3. Add default configuration to `rules.go` `GetDefaultConfig()`
|
||||||
|
4. Update this README with the new rule documentation
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Apache License 2.0 - See [LICENSE](../../LICENSE) for details.
|
||||||
65
pkg/inspector/TODO.md
Normal file
65
pkg/inspector/TODO.md
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
## Inspector TODO
|
||||||
|
|
||||||
|
See the [Inspector README](./README.md) for complete documentation of implemented features.
|
||||||
|
|
||||||
|
### Implemented ✓
|
||||||
|
|
||||||
|
- [x] Core validation framework with configurable rules
|
||||||
|
- [x] YAML configuration with three severity levels (enforce/warn/off)
|
||||||
|
- [x] Generic validators (table_regexpr, column_regexpr)
|
||||||
|
- [x] Primary key validation (naming, datatype, auto-increment)
|
||||||
|
- [x] Foreign key validation (column naming, constraint naming, indexes)
|
||||||
|
- [x] Naming convention validation (snake_case, custom patterns)
|
||||||
|
- [x] Length validation (table names, column names)
|
||||||
|
- [x] Reserved keywords detection
|
||||||
|
- [x] Schema integrity checks (missing PKs, orphaned FKs, circular dependencies)
|
||||||
|
- [x] Multiple output formats (Markdown with ANSI colors, JSON)
|
||||||
|
- [x] Terminal-aware color output
|
||||||
|
- [x] All input formats supported (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||||
|
- [x] CI/CD integration support (proper exit codes)
|
||||||
|
- [x] Comprehensive documentation and examples
|
||||||
|
|
||||||
|
### Future Enhancements
|
||||||
|
|
||||||
|
#### Reporting Enhancements
|
||||||
|
- [ ] Add verbose mode to show all passing checks in detail
|
||||||
|
- [ ] Add summary-only mode (suppress violation details)
|
||||||
|
- [ ] Group violations by table/schema in report
|
||||||
|
- [ ] Add statistics: most violated rules, tables with most issues
|
||||||
|
- [ ] HTML report format with interactive filtering
|
||||||
|
|
||||||
|
#### Additional Validators
|
||||||
|
- [ ] Optimal column order for space and storage efficiency
|
||||||
|
- [ ] Similar-sounding column names detection (synonyms, typos)
|
||||||
|
- [ ] Plural/singular table name consistency
|
||||||
|
- [ ] Column order validation (PK first, FKs next, data columns, timestamps last)
|
||||||
|
- [ ] Data type consistency across related columns
|
||||||
|
- [ ] Index coverage analysis
|
||||||
|
- [ ] Unused indexes detection
|
||||||
|
- [ ] Missing indexes on commonly filtered columns
|
||||||
|
- [ ] Table size estimates and warnings for large tables
|
||||||
|
- [ ] Function naming conventions (here we have my rules used in Bitech etc. Work from a rules file.)
|
||||||
|
- [ ] View naming conventions
|
||||||
|
- [ ] Enum naming conventions
|
||||||
|
- [ ] Custom type naming conventions
|
||||||
|
- [ ] Table name consistency across related tables
|
||||||
|
|
||||||
|
#### Auto-Fix Capabilities
|
||||||
|
- [ ] Auto-fix mode (`relspec inspect --fix`)
|
||||||
|
- [ ] Update foreign key types to match primary key types
|
||||||
|
- [ ] Rename foreign keys to match primary key names with configurable prefix/suffix
|
||||||
|
- [ ] Reorder columns according to rules
|
||||||
|
- [ ] Add missing indexes on foreign keys
|
||||||
|
- [ ] Generate migration scripts for fixes
|
||||||
|
- [ ] Dry-run mode to preview changes
|
||||||
|
|
||||||
|
#### Advanced Features
|
||||||
|
- [ ] Custom validator plugins (Go plugin system)
|
||||||
|
- [ ] Rule templates for different databases (PostgreSQL, MySQL, etc.)
|
||||||
|
- [ ] Rule inheritance and composition
|
||||||
|
- [ ] Conditional rules (apply only to certain schemas/tables)
|
||||||
|
- [ ] Performance metrics in report (validation time per rule)
|
||||||
|
- [ ] Caching for large databases
|
||||||
|
- [ ] Incremental validation (only changed tables)
|
||||||
|
- [ ] Watch mode for continuous validation
|
||||||
|
|
||||||
40
pkg/inspector/doc.go
Normal file
40
pkg/inspector/doc.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
// Package inspector provides database introspection capabilities for live databases.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The inspector package contains utilities for connecting to live databases and
|
||||||
|
// extracting their schema information through system catalog queries and metadata
|
||||||
|
// inspection.
|
||||||
|
//
|
||||||
|
// # Features
|
||||||
|
//
|
||||||
|
// - Database connection management
|
||||||
|
// - Schema metadata extraction
|
||||||
|
// - Table structure analysis
|
||||||
|
// - Constraint and index discovery
|
||||||
|
// - Foreign key relationship mapping
|
||||||
|
//
|
||||||
|
// # Supported Databases
|
||||||
|
//
|
||||||
|
// - PostgreSQL (via pgx driver)
|
||||||
|
// - SQLite (via modernc.org/sqlite driver)
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// This package is used internally by database readers (pgsql, sqlite) to perform
|
||||||
|
// live schema introspection:
|
||||||
|
//
|
||||||
|
// inspector := inspector.NewPostgreSQLInspector(connString)
|
||||||
|
// schemas, err := inspector.GetSchemas()
|
||||||
|
// tables, err := inspector.GetTables(schemaName)
|
||||||
|
//
|
||||||
|
// # Architecture
|
||||||
|
//
|
||||||
|
// Each database type has its own inspector implementation that understands the
|
||||||
|
// specific system catalogs and metadata structures of that database system.
|
||||||
|
//
|
||||||
|
// # Security
|
||||||
|
//
|
||||||
|
// Inspectors use read-only operations and never modify database structure.
|
||||||
|
// Connection credentials should be handled securely.
|
||||||
|
package inspector
|
||||||
182
pkg/inspector/inspector.go
Normal file
182
pkg/inspector/inspector.go
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Inspector performs validation on database models
|
||||||
|
type Inspector struct {
|
||||||
|
config *Config
|
||||||
|
db *models.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidationResult represents the result of a single validation check
|
||||||
|
type ValidationResult struct {
|
||||||
|
RuleName string `json:"rule_name"`
|
||||||
|
Level string `json:"level"` // "error" or "warning"
|
||||||
|
Message string `json:"message"`
|
||||||
|
Location string `json:"location"` // e.g., "schema.table.column"
|
||||||
|
Context map[string]interface{} `json:"context"`
|
||||||
|
Passed bool `json:"passed"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InspectorReport contains the complete validation report
|
||||||
|
type InspectorReport struct {
|
||||||
|
Summary ReportSummary `json:"summary"`
|
||||||
|
Violations []ValidationResult `json:"violations"`
|
||||||
|
GeneratedAt time.Time `json:"generated_at"`
|
||||||
|
Database string `json:"database"`
|
||||||
|
SourceFormat string `json:"source_format"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReportSummary contains aggregate statistics
|
||||||
|
type ReportSummary struct {
|
||||||
|
TotalRules int `json:"total_rules"`
|
||||||
|
RulesChecked int `json:"rules_checked"`
|
||||||
|
RulesSkipped int `json:"rules_skipped"`
|
||||||
|
ErrorCount int `json:"error_count"`
|
||||||
|
WarningCount int `json:"warning_count"`
|
||||||
|
PassedCount int `json:"passed_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewInspector creates a new inspector with the given database and configuration
|
||||||
|
func NewInspector(db *models.Database, config *Config) *Inspector {
|
||||||
|
return &Inspector{
|
||||||
|
config: config,
|
||||||
|
db: db,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inspect runs all enabled validation rules and returns a report
|
||||||
|
func (i *Inspector) Inspect() (*InspectorReport, error) {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Run all enabled validators
|
||||||
|
for ruleName, rule := range i.config.Rules {
|
||||||
|
if !rule.IsEnabled() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the validator function for this rule using the function field
|
||||||
|
validator, exists := getValidator(rule.Function)
|
||||||
|
if !exists {
|
||||||
|
// Skip unknown validator functions
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the validator
|
||||||
|
ruleResults := validator(i.db, rule, ruleName)
|
||||||
|
|
||||||
|
// Set the level based on rule configuration
|
||||||
|
level := "warning"
|
||||||
|
if rule.IsEnforced() {
|
||||||
|
level = "error"
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range ruleResults {
|
||||||
|
ruleResults[idx].Level = level
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, ruleResults...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate summary
|
||||||
|
summary := i.generateSummary(results)
|
||||||
|
|
||||||
|
report := &InspectorReport{
|
||||||
|
Summary: summary,
|
||||||
|
Violations: results,
|
||||||
|
GeneratedAt: time.Now(),
|
||||||
|
Database: i.db.Name,
|
||||||
|
SourceFormat: i.db.SourceFormat,
|
||||||
|
}
|
||||||
|
|
||||||
|
return report, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateSummary creates summary statistics from validation results
|
||||||
|
func (i *Inspector) generateSummary(results []ValidationResult) ReportSummary {
|
||||||
|
summary := ReportSummary{
|
||||||
|
TotalRules: len(i.config.Rules),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count enabled rules
|
||||||
|
for _, rule := range i.config.Rules {
|
||||||
|
if rule.IsEnabled() {
|
||||||
|
summary.RulesChecked++
|
||||||
|
} else {
|
||||||
|
summary.RulesSkipped++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count violations by level
|
||||||
|
for _, result := range results {
|
||||||
|
if result.Passed {
|
||||||
|
summary.PassedCount++
|
||||||
|
} else {
|
||||||
|
if result.Level == "error" {
|
||||||
|
summary.ErrorCount++
|
||||||
|
} else {
|
||||||
|
summary.WarningCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasErrors returns true if the report contains any errors
|
||||||
|
func (r *InspectorReport) HasErrors() bool {
|
||||||
|
return r.Summary.ErrorCount > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatorFunc is a function that validates a rule against a database
|
||||||
|
type validatorFunc func(*models.Database, Rule, string) []ValidationResult
|
||||||
|
|
||||||
|
// getValidator returns the validator function for a given function name
|
||||||
|
func getValidator(functionName string) (validatorFunc, bool) {
|
||||||
|
validators := map[string]validatorFunc{
|
||||||
|
"primary_key_naming": validatePrimaryKeyNaming,
|
||||||
|
"primary_key_datatype": validatePrimaryKeyDatatype,
|
||||||
|
"primary_key_auto_increment": validatePrimaryKeyAutoIncrement,
|
||||||
|
"foreign_key_column_naming": validateForeignKeyColumnNaming,
|
||||||
|
"foreign_key_constraint_naming": validateForeignKeyConstraintNaming,
|
||||||
|
"foreign_key_index": validateForeignKeyIndex,
|
||||||
|
"table_regexpr": validateTableNamingCase,
|
||||||
|
"column_regexpr": validateColumnNamingCase,
|
||||||
|
"table_name_length": validateTableNameLength,
|
||||||
|
"column_name_length": validateColumnNameLength,
|
||||||
|
"reserved_words": validateReservedKeywords,
|
||||||
|
"have_primary_key": validateMissingPrimaryKey,
|
||||||
|
"orphaned_foreign_key": validateOrphanedForeignKey,
|
||||||
|
"circular_dependency": validateCircularDependency,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn, exists := validators[functionName]
|
||||||
|
return fn, exists
|
||||||
|
}
|
||||||
|
|
||||||
|
// createResult is a helper to create a validation result
|
||||||
|
func createResult(ruleName string, passed bool, message string, location string, context map[string]interface{}) ValidationResult {
|
||||||
|
return ValidationResult{
|
||||||
|
RuleName: ruleName,
|
||||||
|
Message: message,
|
||||||
|
Location: location,
|
||||||
|
Context: context,
|
||||||
|
Passed: passed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatLocation creates a location string from schema, table, and optional column
|
||||||
|
func formatLocation(schema, table, column string) string {
|
||||||
|
if column != "" {
|
||||||
|
return fmt.Sprintf("%s.%s.%s", schema, table, column)
|
||||||
|
}
|
||||||
|
if table != "" {
|
||||||
|
return fmt.Sprintf("%s.%s", schema, table)
|
||||||
|
}
|
||||||
|
return schema
|
||||||
|
}
|
||||||
238
pkg/inspector/inspector_test.go
Normal file
238
pkg/inspector/inspector_test.go
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewInspector(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
config := GetDefaultConfig()
|
||||||
|
|
||||||
|
inspector := NewInspector(db, config)
|
||||||
|
|
||||||
|
if inspector == nil {
|
||||||
|
t.Fatal("NewInspector() returned nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
if inspector.db != db {
|
||||||
|
t.Error("NewInspector() database not set correctly")
|
||||||
|
}
|
||||||
|
|
||||||
|
if inspector.config != config {
|
||||||
|
t.Error("NewInspector() config not set correctly")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInspect(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
config := GetDefaultConfig()
|
||||||
|
|
||||||
|
inspector := NewInspector(db, config)
|
||||||
|
report, err := inspector.Inspect()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Inspect() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if report == nil {
|
||||||
|
t.Fatal("Inspect() returned nil report")
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Database != db.Name {
|
||||||
|
t.Errorf("Inspect() report.Database = %q, want %q", report.Database, db.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.TotalRules != len(config.Rules) {
|
||||||
|
t.Errorf("Inspect() TotalRules = %d, want %d", report.Summary.TotalRules, len(config.Rules))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(report.Violations) == 0 {
|
||||||
|
t.Error("Inspect() returned no violations, expected some results")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInspectWithDisabledRules(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
config := GetDefaultConfig()
|
||||||
|
|
||||||
|
// Disable all rules
|
||||||
|
for name := range config.Rules {
|
||||||
|
rule := config.Rules[name]
|
||||||
|
rule.Enabled = "off"
|
||||||
|
config.Rules[name] = rule
|
||||||
|
}
|
||||||
|
|
||||||
|
inspector := NewInspector(db, config)
|
||||||
|
report, err := inspector.Inspect()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Inspect() with disabled rules returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.RulesChecked != 0 {
|
||||||
|
t.Errorf("Inspect() RulesChecked = %d, want 0 (all disabled)", report.Summary.RulesChecked)
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.RulesSkipped != len(config.Rules) {
|
||||||
|
t.Errorf("Inspect() RulesSkipped = %d, want %d", report.Summary.RulesSkipped, len(config.Rules))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInspectWithEnforcedRules(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
config := GetDefaultConfig()
|
||||||
|
|
||||||
|
// Enable only one rule and enforce it
|
||||||
|
for name := range config.Rules {
|
||||||
|
rule := config.Rules[name]
|
||||||
|
rule.Enabled = "off"
|
||||||
|
config.Rules[name] = rule
|
||||||
|
}
|
||||||
|
|
||||||
|
primaryKeyRule := config.Rules["primary_key_naming"]
|
||||||
|
primaryKeyRule.Enabled = "enforce"
|
||||||
|
primaryKeyRule.Pattern = "^id$"
|
||||||
|
config.Rules["primary_key_naming"] = primaryKeyRule
|
||||||
|
|
||||||
|
inspector := NewInspector(db, config)
|
||||||
|
report, err := inspector.Inspect()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Inspect() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.RulesChecked != 1 {
|
||||||
|
t.Errorf("Inspect() RulesChecked = %d, want 1", report.Summary.RulesChecked)
|
||||||
|
}
|
||||||
|
|
||||||
|
// All results should be at error level for enforced rules
|
||||||
|
for _, violation := range report.Violations {
|
||||||
|
if violation.Level != "error" {
|
||||||
|
t.Errorf("Enforced rule violation has Level = %q, want \"error\"", violation.Level)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGenerateSummary(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
config := GetDefaultConfig()
|
||||||
|
inspector := NewInspector(db, config)
|
||||||
|
|
||||||
|
results := []ValidationResult{
|
||||||
|
{RuleName: "rule1", Passed: true, Level: "error"},
|
||||||
|
{RuleName: "rule2", Passed: false, Level: "error"},
|
||||||
|
{RuleName: "rule3", Passed: false, Level: "warning"},
|
||||||
|
{RuleName: "rule4", Passed: true, Level: "warning"},
|
||||||
|
}
|
||||||
|
|
||||||
|
summary := inspector.generateSummary(results)
|
||||||
|
|
||||||
|
if summary.PassedCount != 2 {
|
||||||
|
t.Errorf("generateSummary() PassedCount = %d, want 2", summary.PassedCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
if summary.ErrorCount != 1 {
|
||||||
|
t.Errorf("generateSummary() ErrorCount = %d, want 1", summary.ErrorCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
if summary.WarningCount != 1 {
|
||||||
|
t.Errorf("generateSummary() WarningCount = %d, want 1", summary.WarningCount)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHasErrors(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
report *InspectorReport
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "with errors",
|
||||||
|
report: &InspectorReport{
|
||||||
|
Summary: ReportSummary{
|
||||||
|
ErrorCount: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "without errors",
|
||||||
|
report: &InspectorReport{
|
||||||
|
Summary: ReportSummary{
|
||||||
|
ErrorCount: 0,
|
||||||
|
WarningCount: 3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if got := tt.report.HasErrors(); got != tt.want {
|
||||||
|
t.Errorf("HasErrors() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetValidator(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
functionName string
|
||||||
|
wantExists bool
|
||||||
|
}{
|
||||||
|
{"primary_key_naming", "primary_key_naming", true},
|
||||||
|
{"primary_key_datatype", "primary_key_datatype", true},
|
||||||
|
{"foreign_key_column_naming", "foreign_key_column_naming", true},
|
||||||
|
{"table_regexpr", "table_regexpr", true},
|
||||||
|
{"column_regexpr", "column_regexpr", true},
|
||||||
|
{"reserved_words", "reserved_words", true},
|
||||||
|
{"have_primary_key", "have_primary_key", true},
|
||||||
|
{"orphaned_foreign_key", "orphaned_foreign_key", true},
|
||||||
|
{"circular_dependency", "circular_dependency", true},
|
||||||
|
{"unknown_function", "unknown_function", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
_, exists := getValidator(tt.functionName)
|
||||||
|
if exists != tt.wantExists {
|
||||||
|
t.Errorf("getValidator(%q) exists = %v, want %v", tt.functionName, exists, tt.wantExists)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreateResult(t *testing.T) {
|
||||||
|
result := createResult(
|
||||||
|
"test_rule",
|
||||||
|
true,
|
||||||
|
"Test message",
|
||||||
|
"schema.table.column",
|
||||||
|
map[string]interface{}{
|
||||||
|
"key1": "value1",
|
||||||
|
"key2": 42,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.RuleName != "test_rule" {
|
||||||
|
t.Errorf("createResult() RuleName = %q, want \"test_rule\"", result.RuleName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !result.Passed {
|
||||||
|
t.Error("createResult() Passed = false, want true")
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Message != "Test message" {
|
||||||
|
t.Errorf("createResult() Message = %q, want \"Test message\"", result.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Location != "schema.table.column" {
|
||||||
|
t.Errorf("createResult() Location = %q, want \"schema.table.column\"", result.Location)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(result.Context) != 2 {
|
||||||
|
t.Errorf("createResult() Context length = %d, want 2", len(result.Context))
|
||||||
|
}
|
||||||
|
}
|
||||||
229
pkg/inspector/report.go
Normal file
229
pkg/inspector/report.go
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ANSI color codes
|
||||||
|
const (
|
||||||
|
colorReset = "\033[0m"
|
||||||
|
colorRed = "\033[31m"
|
||||||
|
colorYellow = "\033[33m"
|
||||||
|
colorGreen = "\033[32m"
|
||||||
|
colorBold = "\033[1m"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ReportFormatter defines the interface for report formatters
|
||||||
|
type ReportFormatter interface {
|
||||||
|
Format(report *InspectorReport) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarkdownFormatter formats reports as markdown
|
||||||
|
type MarkdownFormatter struct {
|
||||||
|
UseColors bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// JSONFormatter formats reports as JSON
|
||||||
|
type JSONFormatter struct{}
|
||||||
|
|
||||||
|
// NewMarkdownFormatter creates a markdown formatter with color support detection
|
||||||
|
func NewMarkdownFormatter(writer io.Writer) *MarkdownFormatter {
|
||||||
|
return &MarkdownFormatter{
|
||||||
|
UseColors: isTerminal(writer),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewJSONFormatter creates a JSON formatter
|
||||||
|
func NewJSONFormatter() *JSONFormatter {
|
||||||
|
return &JSONFormatter{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format generates a markdown report
|
||||||
|
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error) {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Header
|
||||||
|
sb.WriteString(f.formatHeader("RelSpec Inspector Report"))
|
||||||
|
sb.WriteString("\n\n")
|
||||||
|
|
||||||
|
// Metadata
|
||||||
|
sb.WriteString(f.formatBold("Database:") + " " + report.Database + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Source Format:") + " " + report.SourceFormat + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Generated:") + " " + report.GeneratedAt.Format(time.RFC3339) + "\n")
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
sb.WriteString(f.formatHeader("Summary"))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
sb.WriteString(fmt.Sprintf("- Rules Checked: %d\n", report.Summary.RulesChecked))
|
||||||
|
|
||||||
|
// Color-code error and warning counts
|
||||||
|
if report.Summary.ErrorCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount), colorRed))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount))
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.WarningCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount), colorYellow))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount))
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.PassedCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Passed: %d\n", report.Summary.PassedCount), colorGreen))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Group violations by level
|
||||||
|
errors := []ValidationResult{}
|
||||||
|
warnings := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, v := range report.Violations {
|
||||||
|
if !v.Passed {
|
||||||
|
if v.Level == "error" {
|
||||||
|
errors = append(errors, v)
|
||||||
|
} else {
|
||||||
|
warnings = append(warnings, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Report violations
|
||||||
|
if len(errors) > 0 || len(warnings) > 0 {
|
||||||
|
sb.WriteString(f.formatHeader("Violations"))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Errors
|
||||||
|
if len(errors) > 0 {
|
||||||
|
sb.WriteString(f.formatSubheader(fmt.Sprintf("Errors (%d)", len(errors)), colorRed))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
for _, violation := range errors {
|
||||||
|
sb.WriteString(f.formatViolation(violation, colorRed))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warnings
|
||||||
|
if len(warnings) > 0 {
|
||||||
|
sb.WriteString(f.formatSubheader(fmt.Sprintf("Warnings (%d)", len(warnings)), colorYellow))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
for _, violation := range warnings {
|
||||||
|
sb.WriteString(f.formatViolation(violation, colorYellow))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sb.WriteString(f.colorize("✓ No violations found!\n", colorGreen))
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format generates a JSON report
|
||||||
|
func (f *JSONFormatter) Format(report *InspectorReport) (string, error) {
|
||||||
|
data, err := json.MarshalIndent(report, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to marshal report to JSON: %w", err)
|
||||||
|
}
|
||||||
|
return string(data), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper methods for MarkdownFormatter
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatHeader(text string) string {
|
||||||
|
return f.formatBold("# " + text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatSubheader(text string, color string) string {
|
||||||
|
header := "### " + text
|
||||||
|
if f.UseColors {
|
||||||
|
return color + colorBold + header + colorReset
|
||||||
|
}
|
||||||
|
return header
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatBold(text string) string {
|
||||||
|
if f.UseColors {
|
||||||
|
return colorBold + text + colorReset
|
||||||
|
}
|
||||||
|
return "**" + text + "**"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) colorize(text string, color string) string {
|
||||||
|
if f.UseColors {
|
||||||
|
return color + text + colorReset
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatViolation(v ValidationResult, color string) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Rule name as header
|
||||||
|
if f.UseColors {
|
||||||
|
sb.WriteString(color + "#### " + v.RuleName + colorReset + "\n")
|
||||||
|
} else {
|
||||||
|
sb.WriteString("#### " + v.RuleName + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Location and message
|
||||||
|
sb.WriteString(f.formatBold("Location:") + " " + v.Location + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Message:") + " " + v.Message + "\n")
|
||||||
|
|
||||||
|
// Context details (optional, only show interesting ones)
|
||||||
|
if len(v.Context) > 0 {
|
||||||
|
contextStr := f.formatContext(v.Context)
|
||||||
|
if contextStr != "" {
|
||||||
|
sb.WriteString(f.formatBold("Details:") + " " + contextStr + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatContext(context map[string]interface{}) string {
|
||||||
|
// Extract relevant context information
|
||||||
|
var parts []string
|
||||||
|
|
||||||
|
// Skip schema, table, column as they're in location
|
||||||
|
skipKeys := map[string]bool{
|
||||||
|
"schema": true,
|
||||||
|
"table": true,
|
||||||
|
"column": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value := range context {
|
||||||
|
if skipKeys[key] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
parts = append(parts, fmt.Sprintf("%s=%v", key, value))
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(parts, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// isTerminal checks if the writer is a terminal (supports ANSI colors)
|
||||||
|
func isTerminal(w io.Writer) bool {
|
||||||
|
file, ok := w.(*os.File)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the file descriptor is a terminal
|
||||||
|
stat, err := file.Stat()
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a character device (terminal)
|
||||||
|
// This works on Unix-like systems
|
||||||
|
return (stat.Mode() & os.ModeCharDevice) != 0
|
||||||
|
}
|
||||||
366
pkg/inspector/report_test.go
Normal file
366
pkg/inspector/report_test.go
Normal file
@@ -0,0 +1,366 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func createTestReport() *InspectorReport {
|
||||||
|
return &InspectorReport{
|
||||||
|
Summary: ReportSummary{
|
||||||
|
TotalRules: 10,
|
||||||
|
RulesChecked: 8,
|
||||||
|
RulesSkipped: 2,
|
||||||
|
ErrorCount: 3,
|
||||||
|
WarningCount: 5,
|
||||||
|
PassedCount: 12,
|
||||||
|
},
|
||||||
|
Violations: []ValidationResult{
|
||||||
|
{
|
||||||
|
RuleName: "primary_key_naming",
|
||||||
|
Level: "error",
|
||||||
|
Message: "Primary key should start with 'id_'",
|
||||||
|
Location: "public.users.user_id",
|
||||||
|
Passed: false,
|
||||||
|
Context: map[string]interface{}{
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "user_id",
|
||||||
|
"pattern": "^id_",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RuleName: "table_name_length",
|
||||||
|
Level: "warning",
|
||||||
|
Message: "Table name too long",
|
||||||
|
Location: "public.very_long_table_name_that_exceeds_limits",
|
||||||
|
Passed: false,
|
||||||
|
Context: map[string]interface{}{
|
||||||
|
"schema": "public",
|
||||||
|
"table": "very_long_table_name_that_exceeds_limits",
|
||||||
|
"length": 44,
|
||||||
|
"max_length": 32,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GeneratedAt: time.Now(),
|
||||||
|
Database: "testdb",
|
||||||
|
SourceFormat: "postgresql",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewMarkdownFormatter(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
formatter := NewMarkdownFormatter(&buf)
|
||||||
|
|
||||||
|
if formatter == nil {
|
||||||
|
t.Fatal("NewMarkdownFormatter() returned nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Buffer is not a terminal, so colors should be disabled
|
||||||
|
if formatter.UseColors {
|
||||||
|
t.Error("NewMarkdownFormatter() UseColors should be false for non-terminal")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewJSONFormatter(t *testing.T) {
|
||||||
|
formatter := NewJSONFormatter()
|
||||||
|
|
||||||
|
if formatter == nil {
|
||||||
|
t.Fatal("NewJSONFormatter() returned nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkdownFormatter_Format(t *testing.T) {
|
||||||
|
report := createTestReport()
|
||||||
|
var buf bytes.Buffer
|
||||||
|
formatter := NewMarkdownFormatter(&buf)
|
||||||
|
|
||||||
|
output, err := formatter.Format(report)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("MarkdownFormatter.Format() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that output contains expected sections
|
||||||
|
if !strings.Contains(output, "# RelSpec Inspector Report") {
|
||||||
|
t.Error("Markdown output missing header")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "Database:") {
|
||||||
|
t.Error("Markdown output missing database field")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "testdb") {
|
||||||
|
t.Error("Markdown output missing database name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "Summary") {
|
||||||
|
t.Error("Markdown output missing summary section")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "Rules Checked: 8") {
|
||||||
|
t.Error("Markdown output missing rules checked count")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "Errors: 3") {
|
||||||
|
t.Error("Markdown output missing error count")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "Warnings: 5") {
|
||||||
|
t.Error("Markdown output missing warning count")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "Violations") {
|
||||||
|
t.Error("Markdown output missing violations section")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "primary_key_naming") {
|
||||||
|
t.Error("Markdown output missing rule name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "public.users.user_id") {
|
||||||
|
t.Error("Markdown output missing location")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkdownFormatter_FormatNoViolations(t *testing.T) {
|
||||||
|
report := &InspectorReport{
|
||||||
|
Summary: ReportSummary{
|
||||||
|
TotalRules: 10,
|
||||||
|
RulesChecked: 10,
|
||||||
|
RulesSkipped: 0,
|
||||||
|
ErrorCount: 0,
|
||||||
|
WarningCount: 0,
|
||||||
|
PassedCount: 50,
|
||||||
|
},
|
||||||
|
Violations: []ValidationResult{},
|
||||||
|
GeneratedAt: time.Now(),
|
||||||
|
Database: "testdb",
|
||||||
|
SourceFormat: "postgresql",
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
formatter := NewMarkdownFormatter(&buf)
|
||||||
|
|
||||||
|
output, err := formatter.Format(report)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("MarkdownFormatter.Format() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "No violations found") {
|
||||||
|
t.Error("Markdown output should indicate no violations")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestJSONFormatter_Format(t *testing.T) {
|
||||||
|
report := createTestReport()
|
||||||
|
formatter := NewJSONFormatter()
|
||||||
|
|
||||||
|
output, err := formatter.Format(report)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("JSONFormatter.Format() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify it's valid JSON
|
||||||
|
var decoded InspectorReport
|
||||||
|
if err := json.Unmarshal([]byte(output), &decoded); err != nil {
|
||||||
|
t.Fatalf("JSONFormatter.Format() produced invalid JSON: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check key fields
|
||||||
|
if decoded.Database != "testdb" {
|
||||||
|
t.Errorf("JSON decoded Database = %q, want \"testdb\"", decoded.Database)
|
||||||
|
}
|
||||||
|
|
||||||
|
if decoded.Summary.ErrorCount != 3 {
|
||||||
|
t.Errorf("JSON decoded ErrorCount = %d, want 3", decoded.Summary.ErrorCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(decoded.Violations) != 2 {
|
||||||
|
t.Errorf("JSON decoded Violations length = %d, want 2", len(decoded.Violations))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkdownFormatter_FormatHeader(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
formatter := NewMarkdownFormatter(&buf)
|
||||||
|
|
||||||
|
header := formatter.formatHeader("Test Header")
|
||||||
|
|
||||||
|
if !strings.Contains(header, "# Test Header") {
|
||||||
|
t.Errorf("formatHeader() = %q, want to contain \"# Test Header\"", header)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkdownFormatter_FormatBold(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
useColors bool
|
||||||
|
text string
|
||||||
|
wantContains string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "without colors",
|
||||||
|
useColors: false,
|
||||||
|
text: "Bold Text",
|
||||||
|
wantContains: "**Bold Text**",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with colors",
|
||||||
|
useColors: true,
|
||||||
|
text: "Bold Text",
|
||||||
|
wantContains: "Bold Text",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
formatter := &MarkdownFormatter{UseColors: tt.useColors}
|
||||||
|
result := formatter.formatBold(tt.text)
|
||||||
|
|
||||||
|
if !strings.Contains(result, tt.wantContains) {
|
||||||
|
t.Errorf("formatBold() = %q, want to contain %q", result, tt.wantContains)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkdownFormatter_Colorize(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
useColors bool
|
||||||
|
text string
|
||||||
|
color string
|
||||||
|
wantColor bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "without colors",
|
||||||
|
useColors: false,
|
||||||
|
text: "Test",
|
||||||
|
color: colorRed,
|
||||||
|
wantColor: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with colors",
|
||||||
|
useColors: true,
|
||||||
|
text: "Test",
|
||||||
|
color: colorRed,
|
||||||
|
wantColor: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
formatter := &MarkdownFormatter{UseColors: tt.useColors}
|
||||||
|
result := formatter.colorize(tt.text, tt.color)
|
||||||
|
|
||||||
|
hasColor := strings.Contains(result, tt.color)
|
||||||
|
if hasColor != tt.wantColor {
|
||||||
|
t.Errorf("colorize() has color codes = %v, want %v", hasColor, tt.wantColor)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(result, tt.text) {
|
||||||
|
t.Errorf("colorize() doesn't contain original text %q", tt.text)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkdownFormatter_FormatContext(t *testing.T) {
|
||||||
|
formatter := &MarkdownFormatter{UseColors: false}
|
||||||
|
|
||||||
|
context := map[string]interface{}{
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "id",
|
||||||
|
"pattern": "^id_",
|
||||||
|
"max_length": 64,
|
||||||
|
}
|
||||||
|
|
||||||
|
result := formatter.formatContext(context)
|
||||||
|
|
||||||
|
// Should not include schema, table, column (they're in location)
|
||||||
|
if strings.Contains(result, "schema") {
|
||||||
|
t.Error("formatContext() should skip schema field")
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(result, "table=") {
|
||||||
|
t.Error("formatContext() should skip table field")
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(result, "column=") {
|
||||||
|
t.Error("formatContext() should skip column field")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should include other fields
|
||||||
|
if !strings.Contains(result, "pattern") {
|
||||||
|
t.Error("formatContext() should include pattern field")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(result, "max_length") {
|
||||||
|
t.Error("formatContext() should include max_length field")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkdownFormatter_FormatViolation(t *testing.T) {
|
||||||
|
formatter := &MarkdownFormatter{UseColors: false}
|
||||||
|
|
||||||
|
violation := ValidationResult{
|
||||||
|
RuleName: "test_rule",
|
||||||
|
Level: "error",
|
||||||
|
Message: "Test violation message",
|
||||||
|
Location: "public.users.id",
|
||||||
|
Passed: false,
|
||||||
|
Context: map[string]interface{}{
|
||||||
|
"pattern": "^id_",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := formatter.formatViolation(violation, colorRed)
|
||||||
|
|
||||||
|
if !strings.Contains(result, "test_rule") {
|
||||||
|
t.Error("formatViolation() should include rule name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(result, "Test violation message") {
|
||||||
|
t.Error("formatViolation() should include message")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(result, "public.users.id") {
|
||||||
|
t.Error("formatViolation() should include location")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(result, "Location:") {
|
||||||
|
t.Error("formatViolation() should include Location label")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(result, "Message:") {
|
||||||
|
t.Error("formatViolation() should include Message label")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReportFormatConstants(t *testing.T) {
|
||||||
|
// Test that color constants are defined
|
||||||
|
if colorReset == "" {
|
||||||
|
t.Error("colorReset is not defined")
|
||||||
|
}
|
||||||
|
|
||||||
|
if colorRed == "" {
|
||||||
|
t.Error("colorRed is not defined")
|
||||||
|
}
|
||||||
|
|
||||||
|
if colorYellow == "" {
|
||||||
|
t.Error("colorYellow is not defined")
|
||||||
|
}
|
||||||
|
|
||||||
|
if colorGreen == "" {
|
||||||
|
t.Error("colorGreen is not defined")
|
||||||
|
}
|
||||||
|
|
||||||
|
if colorBold == "" {
|
||||||
|
t.Error("colorBold is not defined")
|
||||||
|
}
|
||||||
|
}
|
||||||
169
pkg/inspector/rules.go
Normal file
169
pkg/inspector/rules.go
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config represents the inspector rules configuration
|
||||||
|
type Config struct {
|
||||||
|
Version string `yaml:"version"`
|
||||||
|
Rules map[string]Rule `yaml:"rules"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rule represents a single validation rule
|
||||||
|
type Rule struct {
|
||||||
|
Enabled string `yaml:"enabled"` // "enforce", "warn", "off"
|
||||||
|
Function string `yaml:"function"` // validator function name
|
||||||
|
Message string `yaml:"message"`
|
||||||
|
Pattern string `yaml:"pattern,omitempty"`
|
||||||
|
AllowedTypes []string `yaml:"allowed_types,omitempty"`
|
||||||
|
MaxLength int `yaml:"max_length,omitempty"`
|
||||||
|
Case string `yaml:"case,omitempty"`
|
||||||
|
RequireIndex bool `yaml:"require_index,omitempty"`
|
||||||
|
CheckTables bool `yaml:"check_tables,omitempty"`
|
||||||
|
CheckColumns bool `yaml:"check_columns,omitempty"`
|
||||||
|
RequireAutoIncrement bool `yaml:"require_auto_increment,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadConfig loads configuration from a YAML file
|
||||||
|
// If the file doesn't exist, returns default configuration
|
||||||
|
// If the file exists but is invalid, returns an error
|
||||||
|
func LoadConfig(path string) (*Config, error) {
|
||||||
|
// Check if file exists
|
||||||
|
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||||
|
// File doesn't exist, use defaults
|
||||||
|
return GetDefaultConfig(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read file
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read config file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse YAML
|
||||||
|
var config Config
|
||||||
|
if err := yaml.Unmarshal(data, &config); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse config YAML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDefaultConfig returns the default inspector configuration
|
||||||
|
// All rules are enabled at "warn" level by default
|
||||||
|
func GetDefaultConfig() *Config {
|
||||||
|
return &Config{
|
||||||
|
Version: "1.0",
|
||||||
|
Rules: map[string]Rule{
|
||||||
|
// Primary Key Rules
|
||||||
|
"primary_key_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "primary_key_naming",
|
||||||
|
Pattern: "^id_",
|
||||||
|
Message: "Primary key columns should start with 'id_'",
|
||||||
|
},
|
||||||
|
"primary_key_datatype": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "primary_key_datatype",
|
||||||
|
AllowedTypes: []string{"bigserial", "bigint", "int", "serial", "integer", "int4", "int8"},
|
||||||
|
Message: "Primary keys should use integer types (bigserial, bigint, int, serial)",
|
||||||
|
},
|
||||||
|
"primary_key_auto_increment": {
|
||||||
|
Enabled: "off",
|
||||||
|
Function: "primary_key_auto_increment",
|
||||||
|
RequireAutoIncrement: true,
|
||||||
|
Message: "Primary key without auto-increment detected",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Foreign Key Rules
|
||||||
|
"foreign_key_column_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_column_naming",
|
||||||
|
Pattern: "^rid_",
|
||||||
|
Message: "Foreign key columns should start with 'rid_'",
|
||||||
|
},
|
||||||
|
"foreign_key_constraint_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_constraint_naming",
|
||||||
|
Pattern: "^fk_",
|
||||||
|
Message: "Foreign key constraint names should start with 'fk_'",
|
||||||
|
},
|
||||||
|
"foreign_key_index": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_index",
|
||||||
|
RequireIndex: true,
|
||||||
|
Message: "Foreign key columns should have indexes for optimal performance",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Naming Convention Rules
|
||||||
|
"table_naming_case": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "table_regexpr",
|
||||||
|
Case: "lowercase",
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Message: "Table names should be lowercase with underscores (snake_case)",
|
||||||
|
},
|
||||||
|
"column_naming_case": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "column_regexpr",
|
||||||
|
Case: "lowercase",
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Message: "Column names should be lowercase with underscores (snake_case)",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Length Rules
|
||||||
|
"table_name_length": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "table_name_length",
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Table name exceeds recommended maximum length of 64 characters",
|
||||||
|
},
|
||||||
|
"column_name_length": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "column_name_length",
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Column name exceeds recommended maximum length of 64 characters",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Reserved Keywords
|
||||||
|
"reserved_keywords": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "reserved_words",
|
||||||
|
CheckTables: true,
|
||||||
|
CheckColumns: true,
|
||||||
|
Message: "Using SQL reserved keywords as identifiers can cause issues",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Schema Integrity Rules
|
||||||
|
"missing_primary_key": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "have_primary_key",
|
||||||
|
Message: "Table is missing a primary key",
|
||||||
|
},
|
||||||
|
"orphaned_foreign_key": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "orphaned_foreign_key",
|
||||||
|
Message: "Foreign key references a non-existent table",
|
||||||
|
},
|
||||||
|
"circular_dependency": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "circular_dependency",
|
||||||
|
Message: "Circular foreign key dependency detected",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEnabled returns true if the rule is enabled (either "enforce" or "warn")
|
||||||
|
func (r *Rule) IsEnabled() bool {
|
||||||
|
return r.Enabled == "enforce" || r.Enabled == "warn"
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEnforced returns true if the rule is set to "enforce" level
|
||||||
|
func (r *Rule) IsEnforced() bool {
|
||||||
|
return r.Enabled == "enforce"
|
||||||
|
}
|
||||||
249
pkg/inspector/rules_test.go
Normal file
249
pkg/inspector/rules_test.go
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGetDefaultConfig(t *testing.T) {
|
||||||
|
config := GetDefaultConfig()
|
||||||
|
|
||||||
|
if config == nil {
|
||||||
|
t.Fatal("GetDefaultConfig() returned nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Version != "1.0" {
|
||||||
|
t.Errorf("GetDefaultConfig() Version = %q, want \"1.0\"", config.Version)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.Rules) == 0 {
|
||||||
|
t.Error("GetDefaultConfig() returned no rules")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that all expected rules are present
|
||||||
|
expectedRules := []string{
|
||||||
|
"primary_key_naming",
|
||||||
|
"primary_key_datatype",
|
||||||
|
"primary_key_auto_increment",
|
||||||
|
"foreign_key_column_naming",
|
||||||
|
"foreign_key_constraint_naming",
|
||||||
|
"foreign_key_index",
|
||||||
|
"table_naming_case",
|
||||||
|
"column_naming_case",
|
||||||
|
"table_name_length",
|
||||||
|
"column_name_length",
|
||||||
|
"reserved_keywords",
|
||||||
|
"missing_primary_key",
|
||||||
|
"orphaned_foreign_key",
|
||||||
|
"circular_dependency",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, ruleName := range expectedRules {
|
||||||
|
if _, exists := config.Rules[ruleName]; !exists {
|
||||||
|
t.Errorf("GetDefaultConfig() missing rule: %q", ruleName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadConfig_NonExistentFile(t *testing.T) {
|
||||||
|
// Try to load a non-existent file
|
||||||
|
config, err := LoadConfig("/path/to/nonexistent/file.yaml")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("LoadConfig() with non-existent file returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should return default config
|
||||||
|
if config == nil {
|
||||||
|
t.Fatal("LoadConfig() returned nil config for non-existent file")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.Rules) == 0 {
|
||||||
|
t.Error("LoadConfig() returned config with no rules")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadConfig_ValidFile(t *testing.T) {
|
||||||
|
// Create a temporary config file
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
configPath := filepath.Join(tmpDir, "test-config.yaml")
|
||||||
|
|
||||||
|
configContent := `version: "1.0"
|
||||||
|
rules:
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: "enforce"
|
||||||
|
function: "primary_key_naming"
|
||||||
|
pattern: "^pk_"
|
||||||
|
message: "Primary keys must start with pk_"
|
||||||
|
table_name_length:
|
||||||
|
enabled: "warn"
|
||||||
|
function: "table_name_length"
|
||||||
|
max_length: 50
|
||||||
|
message: "Table name too long"
|
||||||
|
`
|
||||||
|
|
||||||
|
err := os.WriteFile(configPath, []byte(configContent), 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create test config file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
config, err := LoadConfig(configPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("LoadConfig() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Version != "1.0" {
|
||||||
|
t.Errorf("LoadConfig() Version = %q, want \"1.0\"", config.Version)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.Rules) != 2 {
|
||||||
|
t.Errorf("LoadConfig() loaded %d rules, want 2", len(config.Rules))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check primary_key_naming rule
|
||||||
|
pkRule, exists := config.Rules["primary_key_naming"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("LoadConfig() missing primary_key_naming rule")
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkRule.Enabled != "enforce" {
|
||||||
|
t.Errorf("primary_key_naming.Enabled = %q, want \"enforce\"", pkRule.Enabled)
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkRule.Pattern != "^pk_" {
|
||||||
|
t.Errorf("primary_key_naming.Pattern = %q, want \"^pk_\"", pkRule.Pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check table_name_length rule
|
||||||
|
lengthRule, exists := config.Rules["table_name_length"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("LoadConfig() missing table_name_length rule")
|
||||||
|
}
|
||||||
|
|
||||||
|
if lengthRule.MaxLength != 50 {
|
||||||
|
t.Errorf("table_name_length.MaxLength = %d, want 50", lengthRule.MaxLength)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadConfig_InvalidYAML(t *testing.T) {
|
||||||
|
// Create a temporary invalid config file
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
configPath := filepath.Join(tmpDir, "invalid-config.yaml")
|
||||||
|
|
||||||
|
invalidContent := `invalid: yaml: content: {[}]`
|
||||||
|
|
||||||
|
err := os.WriteFile(configPath, []byte(invalidContent), 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create test config file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = LoadConfig(configPath)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("LoadConfig() with invalid YAML did not return error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRuleIsEnabled(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "enforce is enabled",
|
||||||
|
rule: Rule{Enabled: "enforce"},
|
||||||
|
want: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "warn is enabled",
|
||||||
|
rule: Rule{Enabled: "warn"},
|
||||||
|
want: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "off is not enabled",
|
||||||
|
rule: Rule{Enabled: "off"},
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty is not enabled",
|
||||||
|
rule: Rule{Enabled: ""},
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if got := tt.rule.IsEnabled(); got != tt.want {
|
||||||
|
t.Errorf("Rule.IsEnabled() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRuleIsEnforced(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "enforce is enforced",
|
||||||
|
rule: Rule{Enabled: "enforce"},
|
||||||
|
want: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "warn is not enforced",
|
||||||
|
rule: Rule{Enabled: "warn"},
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "off is not enforced",
|
||||||
|
rule: Rule{Enabled: "off"},
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if got := tt.rule.IsEnforced(); got != tt.want {
|
||||||
|
t.Errorf("Rule.IsEnforced() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDefaultConfigRuleSettings(t *testing.T) {
|
||||||
|
config := GetDefaultConfig()
|
||||||
|
|
||||||
|
// Test specific rule settings
|
||||||
|
pkNamingRule := config.Rules["primary_key_naming"]
|
||||||
|
if pkNamingRule.Function != "primary_key_naming" {
|
||||||
|
t.Errorf("primary_key_naming.Function = %q, want \"primary_key_naming\"", pkNamingRule.Function)
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkNamingRule.Pattern != "^id_" {
|
||||||
|
t.Errorf("primary_key_naming.Pattern = %q, want \"^id_\"", pkNamingRule.Pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test datatype rule
|
||||||
|
pkDatatypeRule := config.Rules["primary_key_datatype"]
|
||||||
|
if len(pkDatatypeRule.AllowedTypes) == 0 {
|
||||||
|
t.Error("primary_key_datatype has no allowed types")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test length rule
|
||||||
|
tableLengthRule := config.Rules["table_name_length"]
|
||||||
|
if tableLengthRule.MaxLength != 64 {
|
||||||
|
t.Errorf("table_name_length.MaxLength = %d, want 64", tableLengthRule.MaxLength)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test reserved keywords rule
|
||||||
|
reservedRule := config.Rules["reserved_keywords"]
|
||||||
|
if !reservedRule.CheckTables {
|
||||||
|
t.Error("reserved_keywords.CheckTables should be true")
|
||||||
|
}
|
||||||
|
if !reservedRule.CheckColumns {
|
||||||
|
t.Error("reserved_keywords.CheckColumns should be true")
|
||||||
|
}
|
||||||
|
}
|
||||||
603
pkg/inspector/validators.go
Normal file
603
pkg/inspector/validators.go
Normal file
@@ -0,0 +1,603 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
|
||||||
|
)
|
||||||
|
|
||||||
|
// validatePrimaryKeyNaming checks that primary key column names match a pattern
|
||||||
|
func validatePrimaryKeyNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := pattern.MatchString(col.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatePrimaryKeyDatatype checks that primary keys use approved data types
|
||||||
|
func validatePrimaryKeyDatatype(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
|
||||||
|
// Normalize type (remove size/precision)
|
||||||
|
normalizedType := normalizeDataType(col.Type)
|
||||||
|
passed := contains(rule.AllowedTypes, normalizedType)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"current_type": col.Type,
|
||||||
|
"allowed_types": rule.AllowedTypes,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatePrimaryKeyAutoIncrement checks primary key auto-increment settings
|
||||||
|
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
|
||||||
|
// Check if auto-increment matches requirement
|
||||||
|
passed := col.AutoIncrement == rule.RequireAutoIncrement
|
||||||
|
|
||||||
|
if !passed {
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
false,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"has_auto_increment": col.AutoIncrement,
|
||||||
|
"require_auto_increment": rule.RequireAutoIncrement,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyColumnNaming checks that foreign key column names match a pattern
|
||||||
|
func validateForeignKeyColumnNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check foreign key constraints
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, colName)
|
||||||
|
passed := pattern.MatchString(colName)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": colName,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyConstraintNaming checks that foreign key constraint names match a pattern
|
||||||
|
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := pattern.MatchString(constraint.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyIndex checks that foreign key columns have indexes
|
||||||
|
func validateForeignKeyIndex(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
if !rule.RequireIndex {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Get all foreign key columns
|
||||||
|
fkColumns := make(map[string]bool)
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, col := range constraint.Columns {
|
||||||
|
fkColumns[col] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if each FK column has an index
|
||||||
|
for fkCol := range fkColumns {
|
||||||
|
hasIndex := false
|
||||||
|
|
||||||
|
// Check table indexes
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
// Index is good if FK column is the first column
|
||||||
|
if len(index.Columns) > 0 && index.Columns[0] == fkCol {
|
||||||
|
hasIndex = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, fkCol)
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
hasIndex,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": fkCol,
|
||||||
|
"has_index": hasIndex,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateTableNamingCase checks table name casing
|
||||||
|
func validateTableNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := pattern.MatchString(table.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"expected_case": rule.Case,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateColumnNamingCase checks column name casing
|
||||||
|
func validateColumnNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := pattern.MatchString(col.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"expected_case": rule.Case,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateTableNameLength checks table name length
|
||||||
|
func validateTableNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := len(table.Name) <= rule.MaxLength
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"length": len(table.Name),
|
||||||
|
"max_length": rule.MaxLength,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateColumnNameLength checks column name length
|
||||||
|
func validateColumnNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := len(col.Name) <= rule.MaxLength
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"length": len(col.Name),
|
||||||
|
"max_length": rule.MaxLength,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateReservedKeywords checks for reserved SQL keywords
|
||||||
|
func validateReservedKeywords(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build keyword map from PostgreSQL keywords
|
||||||
|
keywordSlice := pgsql.GetPostgresKeywords()
|
||||||
|
keywords := make(map[string]bool)
|
||||||
|
for _, kw := range keywordSlice {
|
||||||
|
keywords[strings.ToUpper(kw)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check table name
|
||||||
|
if rule.CheckTables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := !keywords[strings.ToUpper(table.Name)]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"object_type": "table",
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check column names
|
||||||
|
if rule.CheckColumns {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := !keywords[strings.ToUpper(col.Name)]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"object_type": "column",
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateMissingPrimaryKey checks for tables without primary keys
|
||||||
|
func validateMissingPrimaryKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
hasPrimaryKey := false
|
||||||
|
|
||||||
|
// Check columns for primary key
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
hasPrimaryKey = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also check constraints
|
||||||
|
if !hasPrimaryKey {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.PrimaryKeyConstraint {
|
||||||
|
hasPrimaryKey = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
hasPrimaryKey,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateOrphanedForeignKey checks for foreign keys referencing non-existent tables
|
||||||
|
func validateOrphanedForeignKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build a map of existing tables for quick lookup
|
||||||
|
tableExists := make(map[string]bool)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
key := schema.Name + "." + table.Name
|
||||||
|
tableExists[key] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check all foreign key constraints
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
// Build referenced table key
|
||||||
|
refSchema := constraint.ReferencedSchema
|
||||||
|
if refSchema == "" {
|
||||||
|
refSchema = schema.Name
|
||||||
|
}
|
||||||
|
refKey := refSchema + "." + constraint.ReferencedTable
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := tableExists[refKey]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"referenced_schema": refSchema,
|
||||||
|
"referenced_table": constraint.ReferencedTable,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateCircularDependency checks for circular foreign key dependencies
|
||||||
|
func validateCircularDependency(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build dependency graph
|
||||||
|
dependencies := make(map[string][]string)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
refSchema := constraint.ReferencedSchema
|
||||||
|
if refSchema == "" {
|
||||||
|
refSchema = schema.Name
|
||||||
|
}
|
||||||
|
refKey := refSchema + "." + constraint.ReferencedTable
|
||||||
|
|
||||||
|
dependencies[tableKey] = append(dependencies[tableKey], refKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for cycles using DFS
|
||||||
|
for tableKey := range dependencies {
|
||||||
|
visited := make(map[string]bool)
|
||||||
|
recStack := make(map[string]bool)
|
||||||
|
|
||||||
|
if hasCycle(tableKey, dependencies, visited, recStack) {
|
||||||
|
parts := strings.Split(tableKey, ".")
|
||||||
|
location := formatLocation(parts[0], parts[1], "")
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
false,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": parts[0],
|
||||||
|
"table": parts[1],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
|
||||||
|
// hasCycle performs DFS to detect cycles in dependency graph
|
||||||
|
func hasCycle(node string, graph map[string][]string, visited, recStack map[string]bool) bool {
|
||||||
|
visited[node] = true
|
||||||
|
recStack[node] = true
|
||||||
|
|
||||||
|
for _, neighbor := range graph[node] {
|
||||||
|
if !visited[neighbor] {
|
||||||
|
if hasCycle(neighbor, graph, visited, recStack) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if recStack[neighbor] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
recStack[node] = false
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalizeDataType removes size/precision from data type
|
||||||
|
func normalizeDataType(dataType string) string {
|
||||||
|
// Remove everything in parentheses
|
||||||
|
idx := strings.Index(dataType, "(")
|
||||||
|
if idx > 0 {
|
||||||
|
dataType = dataType[:idx]
|
||||||
|
}
|
||||||
|
return strings.ToLower(strings.TrimSpace(dataType))
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains checks if a string slice contains a value
|
||||||
|
func contains(slice []string, value string) bool {
|
||||||
|
for _, item := range slice {
|
||||||
|
if strings.EqualFold(item, value) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
837
pkg/inspector/validators_test.go
Normal file
837
pkg/inspector/validators_test.go
Normal file
@@ -0,0 +1,837 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Helper function to create test database
|
||||||
|
func createTestDatabase() *models.Database {
|
||||||
|
return &models.Database{
|
||||||
|
Name: "testdb",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {
|
||||||
|
Name: "id",
|
||||||
|
Type: "bigserial",
|
||||||
|
IsPrimaryKey: true,
|
||||||
|
AutoIncrement: true,
|
||||||
|
},
|
||||||
|
"username": {
|
||||||
|
Name: "username",
|
||||||
|
Type: "varchar(50)",
|
||||||
|
NotNull: true,
|
||||||
|
IsPrimaryKey: false,
|
||||||
|
},
|
||||||
|
"rid_organization": {
|
||||||
|
Name: "rid_organization",
|
||||||
|
Type: "bigint",
|
||||||
|
NotNull: true,
|
||||||
|
IsPrimaryKey: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Constraints: map[string]*models.Constraint{
|
||||||
|
"fk_users_organization": {
|
||||||
|
Name: "fk_users_organization",
|
||||||
|
Type: models.ForeignKeyConstraint,
|
||||||
|
Columns: []string{"rid_organization"},
|
||||||
|
ReferencedTable: "organizations",
|
||||||
|
ReferencedSchema: "public",
|
||||||
|
ReferencedColumns: []string{"id"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Indexes: map[string]*models.Index{
|
||||||
|
"idx_rid_organization": {
|
||||||
|
Name: "idx_rid_organization",
|
||||||
|
Columns: []string{"rid_organization"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "organizations",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {
|
||||||
|
Name: "id",
|
||||||
|
Type: "bigserial",
|
||||||
|
IsPrimaryKey: true,
|
||||||
|
AutoIncrement: true,
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
Name: "name",
|
||||||
|
Type: "varchar(100)",
|
||||||
|
NotNull: true,
|
||||||
|
IsPrimaryKey: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidatePrimaryKeyNaming(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "matching pattern id",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^id$",
|
||||||
|
Message: "Primary key should be 'id'",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "non-matching pattern id_",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^id_",
|
||||||
|
Message: "Primary key should start with 'id_'",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validatePrimaryKeyNaming(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validatePrimaryKeyNaming() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
if len(results) > 0 && results[0].Passed != tt.wantPass {
|
||||||
|
t.Errorf("validatePrimaryKeyNaming() passed=%v, want %v", results[0].Passed, tt.wantPass)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidatePrimaryKeyDatatype(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "allowed type bigserial",
|
||||||
|
rule: Rule{
|
||||||
|
AllowedTypes: []string{"bigserial", "bigint", "int"},
|
||||||
|
Message: "Primary key should use integer types",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "disallowed type",
|
||||||
|
rule: Rule{
|
||||||
|
AllowedTypes: []string{"uuid"},
|
||||||
|
Message: "Primary key should use UUID",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validatePrimaryKeyDatatype(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validatePrimaryKeyDatatype() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
if len(results) > 0 && results[0].Passed != tt.wantPass {
|
||||||
|
t.Errorf("validatePrimaryKeyDatatype() passed=%v, want %v", results[0].Passed, tt.wantPass)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidatePrimaryKeyAutoIncrement(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "require auto increment",
|
||||||
|
rule: Rule{
|
||||||
|
RequireAutoIncrement: true,
|
||||||
|
Message: "Primary key should have auto-increment",
|
||||||
|
},
|
||||||
|
wantLen: 0, // No violations - all PKs have auto-increment
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "disallow auto increment",
|
||||||
|
rule: Rule{
|
||||||
|
RequireAutoIncrement: false,
|
||||||
|
Message: "Primary key should not have auto-increment",
|
||||||
|
},
|
||||||
|
wantLen: 2, // 2 violations - both PKs have auto-increment
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validatePrimaryKeyAutoIncrement(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validatePrimaryKeyAutoIncrement() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateForeignKeyColumnNaming(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "matching pattern rid_",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^rid_",
|
||||||
|
Message: "Foreign key columns should start with 'rid_'",
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "non-matching pattern fk_",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^fk_",
|
||||||
|
Message: "Foreign key columns should start with 'fk_'",
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
wantPass: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateForeignKeyColumnNaming(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateForeignKeyColumnNaming() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
if len(results) > 0 && results[0].Passed != tt.wantPass {
|
||||||
|
t.Errorf("validateForeignKeyColumnNaming() passed=%v, want %v", results[0].Passed, tt.wantPass)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateForeignKeyConstraintNaming(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "matching pattern fk_",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^fk_",
|
||||||
|
Message: "Foreign key constraints should start with 'fk_'",
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "non-matching pattern FK_",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^FK_",
|
||||||
|
Message: "Foreign key constraints should start with 'FK_'",
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
wantPass: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateForeignKeyConstraintNaming(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateForeignKeyConstraintNaming() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
if len(results) > 0 && results[0].Passed != tt.wantPass {
|
||||||
|
t.Errorf("validateForeignKeyConstraintNaming() passed=%v, want %v", results[0].Passed, tt.wantPass)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateForeignKeyIndex(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "require index with index present",
|
||||||
|
rule: Rule{
|
||||||
|
RequireIndex: true,
|
||||||
|
Message: "Foreign key columns should have indexes",
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no requirement",
|
||||||
|
rule: Rule{
|
||||||
|
RequireIndex: false,
|
||||||
|
Message: "Foreign key index check disabled",
|
||||||
|
},
|
||||||
|
wantLen: 0,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateForeignKeyIndex(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateForeignKeyIndex() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
if len(results) > 0 && results[0].Passed != tt.wantPass {
|
||||||
|
t.Errorf("validateForeignKeyIndex() passed=%v, want %v", results[0].Passed, tt.wantPass)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateTableNamingCase(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "lowercase snake_case pattern",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Case: "lowercase",
|
||||||
|
Message: "Table names should be lowercase snake_case",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "uppercase pattern",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^[A-Z][A-Z0-9_]*$",
|
||||||
|
Case: "uppercase",
|
||||||
|
Message: "Table names should be uppercase",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateTableNamingCase(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateTableNamingCase() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
if len(results) > 0 && results[0].Passed != tt.wantPass {
|
||||||
|
t.Errorf("validateTableNamingCase() passed=%v, want %v", results[0].Passed, tt.wantPass)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateColumnNamingCase(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "lowercase snake_case pattern",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Case: "lowercase",
|
||||||
|
Message: "Column names should be lowercase snake_case",
|
||||||
|
},
|
||||||
|
wantLen: 5, // 5 total columns across both tables
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "camelCase pattern",
|
||||||
|
rule: Rule{
|
||||||
|
Pattern: "^[a-z][a-zA-Z0-9]*$",
|
||||||
|
Case: "camelCase",
|
||||||
|
Message: "Column names should be camelCase",
|
||||||
|
},
|
||||||
|
wantLen: 5,
|
||||||
|
wantPass: false, // rid_organization has underscore
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateColumnNamingCase(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateColumnNamingCase() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateTableNameLength(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "max length 64",
|
||||||
|
rule: Rule{
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Table name too long",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "max length 5",
|
||||||
|
rule: Rule{
|
||||||
|
MaxLength: 5,
|
||||||
|
Message: "Table name too long",
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
wantPass: false, // "users" is 5 chars (passes), "organizations" is 13 (fails)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateTableNameLength(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateTableNameLength() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateColumnNameLength(t *testing.T) {
|
||||||
|
db := createTestDatabase()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
wantPass bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "max length 64",
|
||||||
|
rule: Rule{
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Column name too long",
|
||||||
|
},
|
||||||
|
wantLen: 5,
|
||||||
|
wantPass: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "max length 5",
|
||||||
|
rule: Rule{
|
||||||
|
MaxLength: 5,
|
||||||
|
Message: "Column name too long",
|
||||||
|
},
|
||||||
|
wantLen: 5,
|
||||||
|
wantPass: false, // Some columns exceed 5 chars
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateColumnNameLength(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateColumnNameLength() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateReservedKeywords(t *testing.T) {
|
||||||
|
// Create a database with reserved keywords
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "testdb",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "user", // "user" is a reserved keyword
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {
|
||||||
|
Name: "id",
|
||||||
|
Type: "bigint",
|
||||||
|
IsPrimaryKey: true,
|
||||||
|
},
|
||||||
|
"select": { // "select" is a reserved keyword
|
||||||
|
Name: "select",
|
||||||
|
Type: "varchar(50)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rule Rule
|
||||||
|
wantLen int
|
||||||
|
checkPasses bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "check tables only",
|
||||||
|
rule: Rule{
|
||||||
|
CheckTables: true,
|
||||||
|
CheckColumns: false,
|
||||||
|
Message: "Reserved keyword used",
|
||||||
|
},
|
||||||
|
wantLen: 1, // "user" table
|
||||||
|
checkPasses: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "check columns only",
|
||||||
|
rule: Rule{
|
||||||
|
CheckTables: false,
|
||||||
|
CheckColumns: true,
|
||||||
|
Message: "Reserved keyword used",
|
||||||
|
},
|
||||||
|
wantLen: 2, // "id", "select" columns (id passes, select fails)
|
||||||
|
checkPasses: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "check both",
|
||||||
|
rule: Rule{
|
||||||
|
CheckTables: true,
|
||||||
|
CheckColumns: true,
|
||||||
|
Message: "Reserved keyword used",
|
||||||
|
},
|
||||||
|
wantLen: 3, // "user" table + "id", "select" columns
|
||||||
|
checkPasses: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results := validateReservedKeywords(db, tt.rule, "test_rule")
|
||||||
|
if len(results) != tt.wantLen {
|
||||||
|
t.Errorf("validateReservedKeywords() returned %d results, want %d", len(results), tt.wantLen)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateMissingPrimaryKey(t *testing.T) {
|
||||||
|
// Create database with and without primary keys
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "testdb",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "with_pk",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {
|
||||||
|
Name: "id",
|
||||||
|
Type: "bigint",
|
||||||
|
IsPrimaryKey: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "without_pk",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"name": {
|
||||||
|
Name: "name",
|
||||||
|
Type: "varchar(50)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
rule := Rule{
|
||||||
|
Message: "Table missing primary key",
|
||||||
|
}
|
||||||
|
|
||||||
|
results := validateMissingPrimaryKey(db, rule, "test_rule")
|
||||||
|
|
||||||
|
if len(results) != 2 {
|
||||||
|
t.Errorf("validateMissingPrimaryKey() returned %d results, want 2", len(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
// First result should pass (with_pk has PK)
|
||||||
|
if results[0].Passed != true {
|
||||||
|
t.Errorf("validateMissingPrimaryKey() result[0].Passed=%v, want true", results[0].Passed)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second result should fail (without_pk missing PK)
|
||||||
|
if results[1].Passed != false {
|
||||||
|
t.Errorf("validateMissingPrimaryKey() result[1].Passed=%v, want false", results[1].Passed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateOrphanedForeignKey(t *testing.T) {
|
||||||
|
// Create database with orphaned FK
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "testdb",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {
|
||||||
|
Name: "id",
|
||||||
|
Type: "bigint",
|
||||||
|
IsPrimaryKey: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Constraints: map[string]*models.Constraint{
|
||||||
|
"fk_nonexistent": {
|
||||||
|
Name: "fk_nonexistent",
|
||||||
|
Type: models.ForeignKeyConstraint,
|
||||||
|
Columns: []string{"rid_organization"},
|
||||||
|
ReferencedTable: "nonexistent_table",
|
||||||
|
ReferencedSchema: "public",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
rule := Rule{
|
||||||
|
Message: "Foreign key references non-existent table",
|
||||||
|
}
|
||||||
|
|
||||||
|
results := validateOrphanedForeignKey(db, rule, "test_rule")
|
||||||
|
|
||||||
|
if len(results) != 1 {
|
||||||
|
t.Errorf("validateOrphanedForeignKey() returned %d results, want 1", len(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
if results[0].Passed != false {
|
||||||
|
t.Errorf("validateOrphanedForeignKey() passed=%v, want false", results[0].Passed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateCircularDependency(t *testing.T) {
|
||||||
|
// Create database with circular dependency
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "testdb",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "table_a",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "bigint", IsPrimaryKey: true},
|
||||||
|
},
|
||||||
|
Constraints: map[string]*models.Constraint{
|
||||||
|
"fk_to_b": {
|
||||||
|
Name: "fk_to_b",
|
||||||
|
Type: models.ForeignKeyConstraint,
|
||||||
|
ReferencedTable: "table_b",
|
||||||
|
ReferencedSchema: "public",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "table_b",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "bigint", IsPrimaryKey: true},
|
||||||
|
},
|
||||||
|
Constraints: map[string]*models.Constraint{
|
||||||
|
"fk_to_a": {
|
||||||
|
Name: "fk_to_a",
|
||||||
|
Type: models.ForeignKeyConstraint,
|
||||||
|
ReferencedTable: "table_a",
|
||||||
|
ReferencedSchema: "public",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
rule := Rule{
|
||||||
|
Message: "Circular dependency detected",
|
||||||
|
}
|
||||||
|
|
||||||
|
results := validateCircularDependency(db, rule, "test_rule")
|
||||||
|
|
||||||
|
// Should detect circular dependency in both tables
|
||||||
|
if len(results) == 0 {
|
||||||
|
t.Error("validateCircularDependency() returned 0 results, expected circular dependency detection")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, result := range results {
|
||||||
|
if result.Passed {
|
||||||
|
t.Error("validateCircularDependency() passed=true, want false for circular dependency")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeDataType(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
input string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"varchar(50)", "varchar"},
|
||||||
|
{"decimal(10,2)", "decimal"},
|
||||||
|
{"int", "int"},
|
||||||
|
{"BIGINT", "bigint"},
|
||||||
|
{"VARCHAR(255)", "varchar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.input, func(t *testing.T) {
|
||||||
|
result := normalizeDataType(tt.input)
|
||||||
|
if result != tt.expected {
|
||||||
|
t.Errorf("normalizeDataType(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestContains(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
slice []string
|
||||||
|
value string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"found exact", []string{"foo", "bar", "baz"}, "bar", true},
|
||||||
|
{"not found", []string{"foo", "bar", "baz"}, "qux", false},
|
||||||
|
{"case insensitive match", []string{"foo", "Bar", "baz"}, "bar", true},
|
||||||
|
{"empty slice", []string{}, "foo", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := contains(tt.slice, tt.value)
|
||||||
|
if result != tt.expected {
|
||||||
|
t.Errorf("contains(%v, %q) = %v, want %v", tt.slice, tt.value, result, tt.expected)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHasCycle(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
graph map[string][]string
|
||||||
|
node string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "simple cycle",
|
||||||
|
graph: map[string][]string{
|
||||||
|
"A": {"B"},
|
||||||
|
"B": {"C"},
|
||||||
|
"C": {"A"},
|
||||||
|
},
|
||||||
|
node: "A",
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no cycle",
|
||||||
|
graph: map[string][]string{
|
||||||
|
"A": {"B"},
|
||||||
|
"B": {"C"},
|
||||||
|
"C": {},
|
||||||
|
},
|
||||||
|
node: "A",
|
||||||
|
expected: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "self cycle",
|
||||||
|
graph: map[string][]string{
|
||||||
|
"A": {"A"},
|
||||||
|
},
|
||||||
|
node: "A",
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
visited := make(map[string]bool)
|
||||||
|
recStack := make(map[string]bool)
|
||||||
|
result := hasCycle(tt.node, tt.graph, visited, recStack)
|
||||||
|
if result != tt.expected {
|
||||||
|
t.Errorf("hasCycle() = %v, want %v", result, tt.expected)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatLocation(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
schema string
|
||||||
|
table string
|
||||||
|
column string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"public", "users", "id", "public.users.id"},
|
||||||
|
{"public", "users", "", "public.users"},
|
||||||
|
{"public", "", "", "public"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.expected, func(t *testing.T) {
|
||||||
|
result := formatLocation(tt.schema, tt.table, tt.column)
|
||||||
|
if result != tt.expected {
|
||||||
|
t.Errorf("formatLocation(%q, %q, %q) = %q, want %q",
|
||||||
|
tt.schema, tt.table, tt.column, result, tt.expected)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
627
pkg/merge/merge.go
Normal file
627
pkg/merge/merge.go
Normal file
@@ -0,0 +1,627 @@
|
|||||||
|
// Package merge provides utilities for merging database schemas.
|
||||||
|
// It allows combining schemas from multiple sources while avoiding duplicates,
|
||||||
|
// supporting only additive operations (no deletion or modification of existing items).
|
||||||
|
package merge
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MergeResult represents the result of a merge operation
|
||||||
|
type MergeResult struct {
|
||||||
|
SchemasAdded int
|
||||||
|
TablesAdded int
|
||||||
|
ColumnsAdded int
|
||||||
|
ConstraintsAdded int
|
||||||
|
IndexesAdded int
|
||||||
|
RelationsAdded int
|
||||||
|
DomainsAdded int
|
||||||
|
EnumsAdded int
|
||||||
|
ViewsAdded int
|
||||||
|
SequencesAdded int
|
||||||
|
}
|
||||||
|
|
||||||
|
// MergeOptions contains options for merge operations
|
||||||
|
type MergeOptions struct {
|
||||||
|
SkipDomains bool
|
||||||
|
SkipRelations bool
|
||||||
|
SkipEnums bool
|
||||||
|
SkipViews bool
|
||||||
|
SkipSequences bool
|
||||||
|
SkipTableNames map[string]bool // Tables to skip during merge (keyed by table name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MergeDatabases merges the source database into the target database.
|
||||||
|
// Only adds missing items; existing items are not modified.
|
||||||
|
func MergeDatabases(target, source *models.Database, opts *MergeOptions) *MergeResult {
|
||||||
|
if opts == nil {
|
||||||
|
opts = &MergeOptions{}
|
||||||
|
}
|
||||||
|
|
||||||
|
result := &MergeResult{}
|
||||||
|
|
||||||
|
if target == nil || source == nil {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge schemas and their contents
|
||||||
|
result.merge(target, source, opts)
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) merge(target, source *models.Database, opts *MergeOptions) {
|
||||||
|
// Create maps of existing schemas for quick lookup
|
||||||
|
existingSchemas := make(map[string]*models.Schema)
|
||||||
|
for _, schema := range target.Schemas {
|
||||||
|
existingSchemas[schema.SQLName()] = schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge schemas
|
||||||
|
for _, srcSchema := range source.Schemas {
|
||||||
|
schemaName := srcSchema.SQLName()
|
||||||
|
if tgtSchema, exists := existingSchemas[schemaName]; exists {
|
||||||
|
// Schema exists, merge its contents
|
||||||
|
r.mergeSchemaContents(tgtSchema, srcSchema, opts)
|
||||||
|
} else {
|
||||||
|
// Schema doesn't exist, add it
|
||||||
|
newSchema := cloneSchema(srcSchema)
|
||||||
|
target.Schemas = append(target.Schemas, newSchema)
|
||||||
|
r.SchemasAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge domains if not skipped
|
||||||
|
if !opts.SkipDomains {
|
||||||
|
r.mergeDomains(target, source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeSchemaContents(target, source *models.Schema, opts *MergeOptions) {
|
||||||
|
// Merge tables
|
||||||
|
r.mergeTables(target, source, opts)
|
||||||
|
|
||||||
|
// Merge views if not skipped
|
||||||
|
if !opts.SkipViews {
|
||||||
|
r.mergeViews(target, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge sequences if not skipped
|
||||||
|
if !opts.SkipSequences {
|
||||||
|
r.mergeSequences(target, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge enums if not skipped
|
||||||
|
if !opts.SkipEnums {
|
||||||
|
r.mergeEnums(target, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge relations if not skipped
|
||||||
|
if !opts.SkipRelations {
|
||||||
|
r.mergeRelations(target, source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeTables(schema *models.Schema, source *models.Schema, opts *MergeOptions) {
|
||||||
|
// Create map of existing tables
|
||||||
|
existingTables := make(map[string]*models.Table)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
existingTables[table.SQLName()] = table
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge tables
|
||||||
|
for _, srcTable := range source.Tables {
|
||||||
|
tableName := srcTable.SQLName()
|
||||||
|
|
||||||
|
// Skip if table is in the skip list (case-insensitive)
|
||||||
|
if opts != nil && opts.SkipTableNames != nil && opts.SkipTableNames[strings.ToLower(tableName)] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if tgtTable, exists := existingTables[tableName]; exists {
|
||||||
|
// Table exists, merge its columns, constraints, and indexes
|
||||||
|
r.mergeColumns(tgtTable, srcTable)
|
||||||
|
r.mergeConstraints(tgtTable, srcTable)
|
||||||
|
r.mergeIndexes(tgtTable, srcTable)
|
||||||
|
} else {
|
||||||
|
// Table doesn't exist, add it
|
||||||
|
newTable := cloneTable(srcTable)
|
||||||
|
schema.Tables = append(schema.Tables, newTable)
|
||||||
|
r.TablesAdded++
|
||||||
|
// Count columns in the newly added table
|
||||||
|
r.ColumnsAdded += len(newTable.Columns)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeColumns(table *models.Table, srcTable *models.Table) {
|
||||||
|
// Create map of existing columns
|
||||||
|
existingColumns := make(map[string]*models.Column)
|
||||||
|
for colName := range table.Columns {
|
||||||
|
existingColumns[colName] = table.Columns[colName]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge columns
|
||||||
|
for colName, srcCol := range srcTable.Columns {
|
||||||
|
if _, exists := existingColumns[colName]; !exists {
|
||||||
|
// Column doesn't exist, add it
|
||||||
|
newCol := cloneColumn(srcCol)
|
||||||
|
table.Columns[colName] = newCol
|
||||||
|
r.ColumnsAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeConstraints(table *models.Table, srcTable *models.Table) {
|
||||||
|
// Initialize constraints map if nil
|
||||||
|
if table.Constraints == nil {
|
||||||
|
table.Constraints = make(map[string]*models.Constraint)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create map of existing constraints
|
||||||
|
existingConstraints := make(map[string]*models.Constraint)
|
||||||
|
for constName := range table.Constraints {
|
||||||
|
existingConstraints[constName] = table.Constraints[constName]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge constraints
|
||||||
|
for constName, srcConst := range srcTable.Constraints {
|
||||||
|
if _, exists := existingConstraints[constName]; !exists {
|
||||||
|
// Constraint doesn't exist, add it
|
||||||
|
newConst := cloneConstraint(srcConst)
|
||||||
|
table.Constraints[constName] = newConst
|
||||||
|
r.ConstraintsAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeIndexes(table *models.Table, srcTable *models.Table) {
|
||||||
|
// Initialize indexes map if nil
|
||||||
|
if table.Indexes == nil {
|
||||||
|
table.Indexes = make(map[string]*models.Index)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create map of existing indexes
|
||||||
|
existingIndexes := make(map[string]*models.Index)
|
||||||
|
for idxName := range table.Indexes {
|
||||||
|
existingIndexes[idxName] = table.Indexes[idxName]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge indexes
|
||||||
|
for idxName, srcIdx := range srcTable.Indexes {
|
||||||
|
if _, exists := existingIndexes[idxName]; !exists {
|
||||||
|
// Index doesn't exist, add it
|
||||||
|
newIdx := cloneIndex(srcIdx)
|
||||||
|
table.Indexes[idxName] = newIdx
|
||||||
|
r.IndexesAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeViews(schema *models.Schema, source *models.Schema) {
|
||||||
|
// Create map of existing views
|
||||||
|
existingViews := make(map[string]*models.View)
|
||||||
|
for _, view := range schema.Views {
|
||||||
|
existingViews[view.SQLName()] = view
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge views
|
||||||
|
for _, srcView := range source.Views {
|
||||||
|
viewName := srcView.SQLName()
|
||||||
|
if _, exists := existingViews[viewName]; !exists {
|
||||||
|
// View doesn't exist, add it
|
||||||
|
newView := cloneView(srcView)
|
||||||
|
schema.Views = append(schema.Views, newView)
|
||||||
|
r.ViewsAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeSequences(schema *models.Schema, source *models.Schema) {
|
||||||
|
// Create map of existing sequences
|
||||||
|
existingSequences := make(map[string]*models.Sequence)
|
||||||
|
for _, seq := range schema.Sequences {
|
||||||
|
existingSequences[seq.SQLName()] = seq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge sequences
|
||||||
|
for _, srcSeq := range source.Sequences {
|
||||||
|
seqName := srcSeq.SQLName()
|
||||||
|
if _, exists := existingSequences[seqName]; !exists {
|
||||||
|
// Sequence doesn't exist, add it
|
||||||
|
newSeq := cloneSequence(srcSeq)
|
||||||
|
schema.Sequences = append(schema.Sequences, newSeq)
|
||||||
|
r.SequencesAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeEnums(schema *models.Schema, source *models.Schema) {
|
||||||
|
// Create map of existing enums
|
||||||
|
existingEnums := make(map[string]*models.Enum)
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
existingEnums[enum.SQLName()] = enum
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge enums
|
||||||
|
for _, srcEnum := range source.Enums {
|
||||||
|
enumName := srcEnum.SQLName()
|
||||||
|
if _, exists := existingEnums[enumName]; !exists {
|
||||||
|
// Enum doesn't exist, add it
|
||||||
|
newEnum := cloneEnum(srcEnum)
|
||||||
|
schema.Enums = append(schema.Enums, newEnum)
|
||||||
|
r.EnumsAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeRelations(schema *models.Schema, source *models.Schema) {
|
||||||
|
// Create map of existing relations
|
||||||
|
existingRelations := make(map[string]*models.Relationship)
|
||||||
|
for _, rel := range schema.Relations {
|
||||||
|
existingRelations[rel.SQLName()] = rel
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge relations
|
||||||
|
for _, srcRel := range source.Relations {
|
||||||
|
if _, exists := existingRelations[srcRel.SQLName()]; !exists {
|
||||||
|
// Relation doesn't exist, add it
|
||||||
|
newRel := cloneRelation(srcRel)
|
||||||
|
schema.Relations = append(schema.Relations, newRel)
|
||||||
|
r.RelationsAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MergeResult) mergeDomains(target *models.Database, source *models.Database) {
|
||||||
|
// Create map of existing domains
|
||||||
|
existingDomains := make(map[string]*models.Domain)
|
||||||
|
for _, domain := range target.Domains {
|
||||||
|
existingDomains[domain.SQLName()] = domain
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge domains
|
||||||
|
for _, srcDomain := range source.Domains {
|
||||||
|
domainName := srcDomain.SQLName()
|
||||||
|
if _, exists := existingDomains[domainName]; !exists {
|
||||||
|
// Domain doesn't exist, add it
|
||||||
|
newDomain := cloneDomain(srcDomain)
|
||||||
|
target.Domains = append(target.Domains, newDomain)
|
||||||
|
r.DomainsAdded++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone functions to create deep copies of models
|
||||||
|
|
||||||
|
func cloneSchema(schema *models.Schema) *models.Schema {
|
||||||
|
if schema == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newSchema := &models.Schema{
|
||||||
|
Name: schema.Name,
|
||||||
|
Description: schema.Description,
|
||||||
|
Owner: schema.Owner,
|
||||||
|
Comment: schema.Comment,
|
||||||
|
Sequence: schema.Sequence,
|
||||||
|
UpdatedAt: schema.UpdatedAt,
|
||||||
|
Tables: make([]*models.Table, 0),
|
||||||
|
Views: make([]*models.View, 0),
|
||||||
|
Sequences: make([]*models.Sequence, 0),
|
||||||
|
Enums: make([]*models.Enum, 0),
|
||||||
|
Relations: make([]*models.Relationship, 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Permissions != nil {
|
||||||
|
newSchema.Permissions = make(map[string]string)
|
||||||
|
for k, v := range schema.Permissions {
|
||||||
|
newSchema.Permissions[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Metadata != nil {
|
||||||
|
newSchema.Metadata = make(map[string]interface{})
|
||||||
|
for k, v := range schema.Metadata {
|
||||||
|
newSchema.Metadata[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Scripts != nil {
|
||||||
|
newSchema.Scripts = make([]*models.Script, len(schema.Scripts))
|
||||||
|
copy(newSchema.Scripts, schema.Scripts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
newSchema.Tables = append(newSchema.Tables, cloneTable(table))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone views
|
||||||
|
for _, view := range schema.Views {
|
||||||
|
newSchema.Views = append(newSchema.Views, cloneView(view))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone sequences
|
||||||
|
for _, seq := range schema.Sequences {
|
||||||
|
newSchema.Sequences = append(newSchema.Sequences, cloneSequence(seq))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone enums
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
newSchema.Enums = append(newSchema.Enums, cloneEnum(enum))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone relations
|
||||||
|
for _, rel := range schema.Relations {
|
||||||
|
newSchema.Relations = append(newSchema.Relations, cloneRelation(rel))
|
||||||
|
}
|
||||||
|
|
||||||
|
return newSchema
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneTable(table *models.Table) *models.Table {
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newTable := &models.Table{
|
||||||
|
Name: table.Name,
|
||||||
|
Description: table.Description,
|
||||||
|
Schema: table.Schema,
|
||||||
|
Comment: table.Comment,
|
||||||
|
Sequence: table.Sequence,
|
||||||
|
UpdatedAt: table.UpdatedAt,
|
||||||
|
Columns: make(map[string]*models.Column),
|
||||||
|
Constraints: make(map[string]*models.Constraint),
|
||||||
|
Indexes: make(map[string]*models.Index),
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Metadata != nil {
|
||||||
|
newTable.Metadata = make(map[string]interface{})
|
||||||
|
for k, v := range table.Metadata {
|
||||||
|
newTable.Metadata[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone columns
|
||||||
|
for colName, col := range table.Columns {
|
||||||
|
newTable.Columns[colName] = cloneColumn(col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone constraints
|
||||||
|
for constName, constraint := range table.Constraints {
|
||||||
|
newTable.Constraints[constName] = cloneConstraint(constraint)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone indexes
|
||||||
|
for idxName, index := range table.Indexes {
|
||||||
|
newTable.Indexes[idxName] = cloneIndex(index)
|
||||||
|
}
|
||||||
|
|
||||||
|
return newTable
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneColumn(col *models.Column) *models.Column {
|
||||||
|
if col == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newCol := &models.Column{
|
||||||
|
Name: col.Name,
|
||||||
|
Type: col.Type,
|
||||||
|
Description: col.Description,
|
||||||
|
Comment: col.Comment,
|
||||||
|
IsPrimaryKey: col.IsPrimaryKey,
|
||||||
|
NotNull: col.NotNull,
|
||||||
|
Default: col.Default,
|
||||||
|
Precision: col.Precision,
|
||||||
|
Scale: col.Scale,
|
||||||
|
Length: col.Length,
|
||||||
|
Sequence: col.Sequence,
|
||||||
|
AutoIncrement: col.AutoIncrement,
|
||||||
|
Collation: col.Collation,
|
||||||
|
}
|
||||||
|
|
||||||
|
return newCol
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneConstraint(constraint *models.Constraint) *models.Constraint {
|
||||||
|
if constraint == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newConstraint := &models.Constraint{
|
||||||
|
Type: constraint.Type,
|
||||||
|
Columns: make([]string, len(constraint.Columns)),
|
||||||
|
ReferencedTable: constraint.ReferencedTable,
|
||||||
|
ReferencedSchema: constraint.ReferencedSchema,
|
||||||
|
ReferencedColumns: make([]string, len(constraint.ReferencedColumns)),
|
||||||
|
OnUpdate: constraint.OnUpdate,
|
||||||
|
OnDelete: constraint.OnDelete,
|
||||||
|
Expression: constraint.Expression,
|
||||||
|
Name: constraint.Name,
|
||||||
|
Deferrable: constraint.Deferrable,
|
||||||
|
InitiallyDeferred: constraint.InitiallyDeferred,
|
||||||
|
Sequence: constraint.Sequence,
|
||||||
|
}
|
||||||
|
copy(newConstraint.Columns, constraint.Columns)
|
||||||
|
copy(newConstraint.ReferencedColumns, constraint.ReferencedColumns)
|
||||||
|
return newConstraint
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneIndex(index *models.Index) *models.Index {
|
||||||
|
if index == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newIndex := &models.Index{
|
||||||
|
Name: index.Name,
|
||||||
|
Description: index.Description,
|
||||||
|
Table: index.Table,
|
||||||
|
Schema: index.Schema,
|
||||||
|
Columns: make([]string, len(index.Columns)),
|
||||||
|
Unique: index.Unique,
|
||||||
|
Type: index.Type,
|
||||||
|
Where: index.Where,
|
||||||
|
Concurrent: index.Concurrent,
|
||||||
|
Include: make([]string, len(index.Include)),
|
||||||
|
Comment: index.Comment,
|
||||||
|
Sequence: index.Sequence,
|
||||||
|
}
|
||||||
|
copy(newIndex.Columns, index.Columns)
|
||||||
|
copy(newIndex.Include, index.Include)
|
||||||
|
return newIndex
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneView(view *models.View) *models.View {
|
||||||
|
if view == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newView := &models.View{
|
||||||
|
Name: view.Name,
|
||||||
|
Description: view.Description,
|
||||||
|
Schema: view.Schema,
|
||||||
|
Definition: view.Definition,
|
||||||
|
Comment: view.Comment,
|
||||||
|
Sequence: view.Sequence,
|
||||||
|
Columns: make(map[string]*models.Column),
|
||||||
|
}
|
||||||
|
|
||||||
|
if view.Metadata != nil {
|
||||||
|
newView.Metadata = make(map[string]interface{})
|
||||||
|
for k, v := range view.Metadata {
|
||||||
|
newView.Metadata[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone columns
|
||||||
|
for colName, col := range view.Columns {
|
||||||
|
newView.Columns[colName] = cloneColumn(col)
|
||||||
|
}
|
||||||
|
|
||||||
|
return newView
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneSequence(seq *models.Sequence) *models.Sequence {
|
||||||
|
if seq == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newSeq := &models.Sequence{
|
||||||
|
Name: seq.Name,
|
||||||
|
Description: seq.Description,
|
||||||
|
Schema: seq.Schema,
|
||||||
|
StartValue: seq.StartValue,
|
||||||
|
MinValue: seq.MinValue,
|
||||||
|
MaxValue: seq.MaxValue,
|
||||||
|
IncrementBy: seq.IncrementBy,
|
||||||
|
CacheSize: seq.CacheSize,
|
||||||
|
Cycle: seq.Cycle,
|
||||||
|
OwnedByTable: seq.OwnedByTable,
|
||||||
|
OwnedByColumn: seq.OwnedByColumn,
|
||||||
|
Comment: seq.Comment,
|
||||||
|
Sequence: seq.Sequence,
|
||||||
|
}
|
||||||
|
return newSeq
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneEnum(enum *models.Enum) *models.Enum {
|
||||||
|
if enum == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newEnum := &models.Enum{
|
||||||
|
Name: enum.Name,
|
||||||
|
Values: make([]string, len(enum.Values)),
|
||||||
|
Schema: enum.Schema,
|
||||||
|
}
|
||||||
|
copy(newEnum.Values, enum.Values)
|
||||||
|
return newEnum
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneRelation(rel *models.Relationship) *models.Relationship {
|
||||||
|
if rel == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newRel := &models.Relationship{
|
||||||
|
Name: rel.Name,
|
||||||
|
Type: rel.Type,
|
||||||
|
FromTable: rel.FromTable,
|
||||||
|
FromSchema: rel.FromSchema,
|
||||||
|
FromColumns: make([]string, len(rel.FromColumns)),
|
||||||
|
ToTable: rel.ToTable,
|
||||||
|
ToSchema: rel.ToSchema,
|
||||||
|
ToColumns: make([]string, len(rel.ToColumns)),
|
||||||
|
ForeignKey: rel.ForeignKey,
|
||||||
|
ThroughTable: rel.ThroughTable,
|
||||||
|
ThroughSchema: rel.ThroughSchema,
|
||||||
|
Description: rel.Description,
|
||||||
|
Sequence: rel.Sequence,
|
||||||
|
}
|
||||||
|
|
||||||
|
if rel.Properties != nil {
|
||||||
|
newRel.Properties = make(map[string]string)
|
||||||
|
for k, v := range rel.Properties {
|
||||||
|
newRel.Properties[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
copy(newRel.FromColumns, rel.FromColumns)
|
||||||
|
copy(newRel.ToColumns, rel.ToColumns)
|
||||||
|
return newRel
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneDomain(domain *models.Domain) *models.Domain {
|
||||||
|
if domain == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newDomain := &models.Domain{
|
||||||
|
Name: domain.Name,
|
||||||
|
Description: domain.Description,
|
||||||
|
Comment: domain.Comment,
|
||||||
|
Sequence: domain.Sequence,
|
||||||
|
Tables: make([]*models.DomainTable, len(domain.Tables)),
|
||||||
|
}
|
||||||
|
|
||||||
|
if domain.Metadata != nil {
|
||||||
|
newDomain.Metadata = make(map[string]interface{})
|
||||||
|
for k, v := range domain.Metadata {
|
||||||
|
newDomain.Metadata[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
copy(newDomain.Tables, domain.Tables)
|
||||||
|
return newDomain
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMergeSummary returns a human-readable summary of the merge result
|
||||||
|
func GetMergeSummary(result *MergeResult) string {
|
||||||
|
if result == nil {
|
||||||
|
return "No merge result available"
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := []string{
|
||||||
|
"=== Merge Summary ===",
|
||||||
|
fmt.Sprintf("Schemas added: %d", result.SchemasAdded),
|
||||||
|
fmt.Sprintf("Tables added: %d", result.TablesAdded),
|
||||||
|
fmt.Sprintf("Columns added: %d", result.ColumnsAdded),
|
||||||
|
fmt.Sprintf("Constraints added: %d", result.ConstraintsAdded),
|
||||||
|
fmt.Sprintf("Indexes added: %d", result.IndexesAdded),
|
||||||
|
fmt.Sprintf("Views added: %d", result.ViewsAdded),
|
||||||
|
fmt.Sprintf("Sequences added: %d", result.SequencesAdded),
|
||||||
|
fmt.Sprintf("Enums added: %d", result.EnumsAdded),
|
||||||
|
fmt.Sprintf("Relations added: %d", result.RelationsAdded),
|
||||||
|
fmt.Sprintf("Domains added: %d", result.DomainsAdded),
|
||||||
|
}
|
||||||
|
|
||||||
|
totalAdded := result.SchemasAdded + result.TablesAdded + result.ColumnsAdded +
|
||||||
|
result.ConstraintsAdded + result.IndexesAdded +
|
||||||
|
result.ViewsAdded + result.SequencesAdded + result.EnumsAdded +
|
||||||
|
result.RelationsAdded + result.DomainsAdded
|
||||||
|
|
||||||
|
lines = append(lines, fmt.Sprintf("Total items added: %d", totalAdded))
|
||||||
|
|
||||||
|
summary := ""
|
||||||
|
for _, line := range lines {
|
||||||
|
summary += line + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary
|
||||||
|
}
|
||||||
617
pkg/merge/merge_test.go
Normal file
617
pkg/merge/merge_test.go
Normal file
@@ -0,0 +1,617 @@
|
|||||||
|
package merge
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMergeDatabases_NilInputs(t *testing.T) {
|
||||||
|
result := MergeDatabases(nil, nil, nil)
|
||||||
|
if result == nil {
|
||||||
|
t.Fatal("Expected non-nil result")
|
||||||
|
}
|
||||||
|
if result.SchemasAdded != 0 {
|
||||||
|
t.Errorf("Expected 0 schemas added, got %d", result.SchemasAdded)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeDatabases_NewSchema(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{Name: "auth"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.SchemasAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 schema added, got %d", result.SchemasAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas) != 2 {
|
||||||
|
t.Errorf("Expected 2 schemas in target, got %d", len(target.Schemas))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeDatabases_ExistingSchema(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{Name: "public"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.SchemasAdded != 0 {
|
||||||
|
t.Errorf("Expected 0 schemas added, got %d", result.SchemasAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas) != 1 {
|
||||||
|
t.Errorf("Expected 1 schema in target, got %d", len(target.Schemas))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeTables_NewTable(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "posts",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.TablesAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 table added, got %d", result.TablesAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Tables) != 2 {
|
||||||
|
t.Errorf("Expected 2 tables in target schema, got %d", len(target.Schemas[0].Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeColumns_NewColumn(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "int"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"email": {Name: "email", Type: "varchar"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.ColumnsAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 column added, got %d", result.ColumnsAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Tables[0].Columns) != 2 {
|
||||||
|
t.Errorf("Expected 2 columns in target table, got %d", len(target.Schemas[0].Tables[0].Columns))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeConstraints_NewConstraint(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Constraints: map[string]*models.Constraint{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Constraints: map[string]*models.Constraint{
|
||||||
|
"ukey_users_email": {
|
||||||
|
Type: models.UniqueConstraint,
|
||||||
|
Columns: []string{"email"},
|
||||||
|
Name: "ukey_users_email",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.ConstraintsAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 constraint added, got %d", result.ConstraintsAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Tables[0].Constraints) != 1 {
|
||||||
|
t.Errorf("Expected 1 constraint in target table, got %d", len(target.Schemas[0].Tables[0].Constraints))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeConstraints_NilConstraintsMap(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Constraints: nil, // Nil map
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Constraints: map[string]*models.Constraint{
|
||||||
|
"ukey_users_email": {
|
||||||
|
Type: models.UniqueConstraint,
|
||||||
|
Columns: []string{"email"},
|
||||||
|
Name: "ukey_users_email",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.ConstraintsAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 constraint added, got %d", result.ConstraintsAdded)
|
||||||
|
}
|
||||||
|
if target.Schemas[0].Tables[0].Constraints == nil {
|
||||||
|
t.Error("Expected constraints map to be initialized")
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Tables[0].Constraints) != 1 {
|
||||||
|
t.Errorf("Expected 1 constraint in target table, got %d", len(target.Schemas[0].Tables[0].Constraints))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeIndexes_NewIndex(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Indexes: map[string]*models.Index{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Indexes: map[string]*models.Index{
|
||||||
|
"idx_users_email": {
|
||||||
|
Name: "idx_users_email",
|
||||||
|
Columns: []string{"email"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.IndexesAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Tables[0].Indexes) != 1 {
|
||||||
|
t.Errorf("Expected 1 index in target table, got %d", len(target.Schemas[0].Tables[0].Indexes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeIndexes_NilIndexesMap(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Indexes: nil, // Nil map
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
Indexes: map[string]*models.Index{
|
||||||
|
"idx_users_email": {
|
||||||
|
Name: "idx_users_email",
|
||||||
|
Columns: []string{"email"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.IndexesAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
|
||||||
|
}
|
||||||
|
if target.Schemas[0].Tables[0].Indexes == nil {
|
||||||
|
t.Error("Expected indexes map to be initialized")
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Tables[0].Indexes) != 1 {
|
||||||
|
t.Errorf("Expected 1 index in target table, got %d", len(target.Schemas[0].Tables[0].Indexes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeOptions_SkipTableNames(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "migrations",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := &MergeOptions{
|
||||||
|
SkipTableNames: map[string]bool{
|
||||||
|
"migrations": true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, opts)
|
||||||
|
if result.TablesAdded != 0 {
|
||||||
|
t.Errorf("Expected 0 tables added (skipped), got %d", result.TablesAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table in target schema, got %d", len(target.Schemas[0].Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeViews_NewView(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Views: []*models.View{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Views: []*models.View{
|
||||||
|
{
|
||||||
|
Name: "user_summary",
|
||||||
|
Schema: "public",
|
||||||
|
Definition: "SELECT * FROM users",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.ViewsAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 view added, got %d", result.ViewsAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Views) != 1 {
|
||||||
|
t.Errorf("Expected 1 view in target schema, got %d", len(target.Schemas[0].Views))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeEnums_NewEnum(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Enums: []*models.Enum{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Enums: []*models.Enum{
|
||||||
|
{
|
||||||
|
Name: "user_role",
|
||||||
|
Schema: "public",
|
||||||
|
Values: []string{"admin", "user"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.EnumsAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 enum added, got %d", result.EnumsAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Enums) != 1 {
|
||||||
|
t.Errorf("Expected 1 enum in target schema, got %d", len(target.Schemas[0].Enums))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeDomains_NewDomain(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Domains: []*models.Domain{},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Domains: []*models.Domain{
|
||||||
|
{
|
||||||
|
Name: "auth",
|
||||||
|
Description: "Authentication domain",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.DomainsAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 domain added, got %d", result.DomainsAdded)
|
||||||
|
}
|
||||||
|
if len(target.Domains) != 1 {
|
||||||
|
t.Errorf("Expected 1 domain in target, got %d", len(target.Domains))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeRelations_NewRelation(t *testing.T) {
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Relations: []*models.Relationship{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Relations: []*models.Relationship{
|
||||||
|
{
|
||||||
|
Name: "fk_posts_user",
|
||||||
|
Type: models.OneToMany,
|
||||||
|
FromTable: "posts",
|
||||||
|
FromColumns: []string{"user_id"},
|
||||||
|
ToTable: "users",
|
||||||
|
ToColumns: []string{"id"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
if result.RelationsAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 relation added, got %d", result.RelationsAdded)
|
||||||
|
}
|
||||||
|
if len(target.Schemas[0].Relations) != 1 {
|
||||||
|
t.Errorf("Expected 1 relation in target schema, got %d", len(target.Schemas[0].Relations))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetMergeSummary(t *testing.T) {
|
||||||
|
result := &MergeResult{
|
||||||
|
SchemasAdded: 1,
|
||||||
|
TablesAdded: 2,
|
||||||
|
ColumnsAdded: 5,
|
||||||
|
ConstraintsAdded: 3,
|
||||||
|
IndexesAdded: 2,
|
||||||
|
ViewsAdded: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
summary := GetMergeSummary(result)
|
||||||
|
if summary == "" {
|
||||||
|
t.Error("Expected non-empty summary")
|
||||||
|
}
|
||||||
|
if len(summary) < 50 {
|
||||||
|
t.Errorf("Summary seems too short: %s", summary)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetMergeSummary_Nil(t *testing.T) {
|
||||||
|
summary := GetMergeSummary(nil)
|
||||||
|
if summary == "" {
|
||||||
|
t.Error("Expected non-empty summary for nil result")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestComplexMerge(t *testing.T) {
|
||||||
|
// Target with existing structure
|
||||||
|
target := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"id": {Name: "id", Type: "int"},
|
||||||
|
},
|
||||||
|
Constraints: map[string]*models.Constraint{},
|
||||||
|
Indexes: map[string]*models.Index{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Source with new columns, constraints, and indexes
|
||||||
|
source := &models.Database{
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Tables: []*models.Table{
|
||||||
|
{
|
||||||
|
Name: "users",
|
||||||
|
Schema: "public",
|
||||||
|
Columns: map[string]*models.Column{
|
||||||
|
"email": {Name: "email", Type: "varchar"},
|
||||||
|
"guid": {Name: "guid", Type: "uuid"},
|
||||||
|
},
|
||||||
|
Constraints: map[string]*models.Constraint{
|
||||||
|
"ukey_users_email": {
|
||||||
|
Type: models.UniqueConstraint,
|
||||||
|
Columns: []string{"email"},
|
||||||
|
Name: "ukey_users_email",
|
||||||
|
},
|
||||||
|
"ukey_users_guid": {
|
||||||
|
Type: models.UniqueConstraint,
|
||||||
|
Columns: []string{"guid"},
|
||||||
|
Name: "ukey_users_guid",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Indexes: map[string]*models.Index{
|
||||||
|
"idx_users_email": {
|
||||||
|
Name: "idx_users_email",
|
||||||
|
Columns: []string{"email"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result := MergeDatabases(target, source, nil)
|
||||||
|
|
||||||
|
// Verify counts
|
||||||
|
if result.ColumnsAdded != 2 {
|
||||||
|
t.Errorf("Expected 2 columns added, got %d", result.ColumnsAdded)
|
||||||
|
}
|
||||||
|
if result.ConstraintsAdded != 2 {
|
||||||
|
t.Errorf("Expected 2 constraints added, got %d", result.ConstraintsAdded)
|
||||||
|
}
|
||||||
|
if result.IndexesAdded != 1 {
|
||||||
|
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify target has merged data
|
||||||
|
table := target.Schemas[0].Tables[0]
|
||||||
|
if len(table.Columns) != 3 {
|
||||||
|
t.Errorf("Expected 3 columns in merged table, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
if len(table.Constraints) != 2 {
|
||||||
|
t.Errorf("Expected 2 constraints in merged table, got %d", len(table.Constraints))
|
||||||
|
}
|
||||||
|
if len(table.Indexes) != 1 {
|
||||||
|
t.Errorf("Expected 1 index in merged table, got %d", len(table.Indexes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify specific constraint
|
||||||
|
if _, exists := table.Constraints["ukey_users_guid"]; !exists {
|
||||||
|
t.Error("Expected ukey_users_guid constraint to exist")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,12 @@
|
|||||||
// intermediate representation for converting between various database schema formats.
|
// intermediate representation for converting between various database schema formats.
|
||||||
package models
|
package models
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
// DatabaseType represents the type of database system.
|
// DatabaseType represents the type of database system.
|
||||||
type DatabaseType string
|
type DatabaseType string
|
||||||
@@ -21,10 +26,13 @@ type Database struct {
|
|||||||
Name string `json:"name" yaml:"name"`
|
Name string `json:"name" yaml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
Schemas []*Schema `json:"schemas" yaml:"schemas" xml:"schemas"`
|
Schemas []*Schema `json:"schemas" yaml:"schemas" xml:"schemas"`
|
||||||
|
Domains []*Domain `json:"domains,omitempty" yaml:"domains,omitempty" xml:"domains,omitempty"`
|
||||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
DatabaseType DatabaseType `json:"database_type,omitempty" yaml:"database_type,omitempty" xml:"database_type,omitempty"`
|
DatabaseType DatabaseType `json:"database_type,omitempty" yaml:"database_type,omitempty" xml:"database_type,omitempty"`
|
||||||
DatabaseVersion string `json:"database_version,omitempty" yaml:"database_version,omitempty" xml:"database_version,omitempty"`
|
DatabaseVersion string `json:"database_version,omitempty" yaml:"database_version,omitempty" xml:"database_version,omitempty"`
|
||||||
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
||||||
|
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the database name in lowercase for SQL compatibility.
|
// SQLName returns the database name in lowercase for SQL compatibility.
|
||||||
@@ -32,6 +40,39 @@ func (d *Database) SQLName() string {
|
|||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateDate sets the UpdatedAt field to the current time in RFC3339 format.
|
||||||
|
func (d *Database) UpdateDate() {
|
||||||
|
d.UpdatedAt = time.Now().Format(time.RFC3339)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Domain represents a logical business domain grouping multiple tables from potentially different schemas.
|
||||||
|
// Domains allow for organizing database tables by functional areas (e.g., authentication, user data, financial).
|
||||||
|
type Domain struct {
|
||||||
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
|
Tables []*DomainTable `json:"tables" yaml:"tables" xml:"tables"`
|
||||||
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
|
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
||||||
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLName returns the domain name in lowercase for SQL compatibility.
|
||||||
|
func (d *Domain) SQLName() string {
|
||||||
|
return strings.ToLower(d.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DomainTable represents a reference to a specific table within a domain.
|
||||||
|
// It identifies the table by name and schema, allowing a single domain to include
|
||||||
|
// tables from multiple schemas.
|
||||||
|
type DomainTable struct {
|
||||||
|
TableName string `json:"table_name" yaml:"table_name" xml:"table_name"`
|
||||||
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
|
RefTable *Table `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
|
}
|
||||||
|
|
||||||
// Schema represents a database schema, which is a logical grouping of database objects
|
// Schema represents a database schema, which is a logical grouping of database objects
|
||||||
// such as tables, views, sequences, and relationships within a database.
|
// such as tables, views, sequences, and relationships within a database.
|
||||||
type Schema struct {
|
type Schema struct {
|
||||||
@@ -49,6 +90,16 @@ type Schema struct {
|
|||||||
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
|
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
|
||||||
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
|
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
|
||||||
|
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdaUpdateDateted sets the UpdatedAt field to the current time in RFC3339 format.
|
||||||
|
func (d *Schema) UpdateDate() {
|
||||||
|
d.UpdatedAt = time.Now().Format(time.RFC3339)
|
||||||
|
if d.RefDatabase != nil {
|
||||||
|
d.RefDatabase.UpdateDate()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the schema name in lowercase for SQL compatibility.
|
// SQLName returns the schema name in lowercase for SQL compatibility.
|
||||||
@@ -71,6 +122,16 @@ type Table struct {
|
|||||||
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
|
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateDate sets the UpdatedAt field to the current time in RFC3339 format.
|
||||||
|
func (d *Table) UpdateDate() {
|
||||||
|
d.UpdatedAt = time.Now().Format(time.RFC3339)
|
||||||
|
if d.RefSchema != nil {
|
||||||
|
d.RefSchema.UpdateDate()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the table name in lowercase for SQL compatibility.
|
// SQLName returns the table name in lowercase for SQL compatibility.
|
||||||
@@ -111,6 +172,7 @@ type View struct {
|
|||||||
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the view name in lowercase for SQL compatibility.
|
// SQLName returns the view name in lowercase for SQL compatibility.
|
||||||
@@ -134,6 +196,7 @@ type Sequence struct {
|
|||||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the sequence name in lowercase for SQL compatibility.
|
// SQLName returns the sequence name in lowercase for SQL compatibility.
|
||||||
@@ -158,6 +221,7 @@ type Column struct {
|
|||||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
Collation string `json:"collation,omitempty" yaml:"collation,omitempty" xml:"collation,omitempty"`
|
Collation string `json:"collation,omitempty" yaml:"collation,omitempty" xml:"collation,omitempty"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the column name in lowercase for SQL compatibility.
|
// SQLName returns the column name in lowercase for SQL compatibility.
|
||||||
@@ -180,6 +244,7 @@ type Index struct {
|
|||||||
Include []string `json:"include,omitempty" yaml:"include,omitempty" xml:"include,omitempty"` // INCLUDE columns
|
Include []string `json:"include,omitempty" yaml:"include,omitempty" xml:"include,omitempty"` // INCLUDE columns
|
||||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the index name in lowercase for SQL compatibility.
|
// SQLName returns the index name in lowercase for SQL compatibility.
|
||||||
@@ -214,6 +279,7 @@ type Relationship struct {
|
|||||||
ThroughSchema string `json:"through_schema,omitempty" yaml:"through_schema,omitempty" xml:"through_schema,omitempty"`
|
ThroughSchema string `json:"through_schema,omitempty" yaml:"through_schema,omitempty" xml:"through_schema,omitempty"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the relationship name in lowercase for SQL compatibility.
|
// SQLName returns the relationship name in lowercase for SQL compatibility.
|
||||||
@@ -238,6 +304,7 @@ type Constraint struct {
|
|||||||
Deferrable bool `json:"deferrable,omitempty" yaml:"deferrable,omitempty" xml:"deferrable,omitempty"`
|
Deferrable bool `json:"deferrable,omitempty" yaml:"deferrable,omitempty" xml:"deferrable,omitempty"`
|
||||||
InitiallyDeferred bool `json:"initially_deferred,omitempty" yaml:"initially_deferred,omitempty" xml:"initially_deferred,omitempty"`
|
InitiallyDeferred bool `json:"initially_deferred,omitempty" yaml:"initially_deferred,omitempty" xml:"initially_deferred,omitempty"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the constraint name in lowercase for SQL compatibility.
|
// SQLName returns the constraint name in lowercase for SQL compatibility.
|
||||||
@@ -253,6 +320,7 @@ type Enum struct {
|
|||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Values []string `json:"values" yaml:"values" xml:"values"`
|
Values []string `json:"values" yaml:"values" xml:"values"`
|
||||||
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
|
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the enum name in lowercase for SQL compatibility.
|
// SQLName returns the enum name in lowercase for SQL compatibility.
|
||||||
@@ -260,6 +328,16 @@ func (d *Enum) SQLName() string {
|
|||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// InitEnum initializes a new Enum with empty values slice
|
||||||
|
func InitEnum(name, schema string) *Enum {
|
||||||
|
return &Enum{
|
||||||
|
Name: name,
|
||||||
|
Schema: schema,
|
||||||
|
Values: make([]string, 0),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Supported constraint types.
|
// Supported constraint types.
|
||||||
const (
|
const (
|
||||||
PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record
|
PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record
|
||||||
@@ -281,6 +359,7 @@ type Script struct {
|
|||||||
Version string `json:"version,omitempty" yaml:"version,omitempty" xml:"version,omitempty"`
|
Version string `json:"version,omitempty" yaml:"version,omitempty" xml:"version,omitempty"`
|
||||||
Priority int `json:"priority,omitempty" yaml:"priority,omitempty" xml:"priority,omitempty"`
|
Priority int `json:"priority,omitempty" yaml:"priority,omitempty" xml:"priority,omitempty"`
|
||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
|
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the script name in lowercase for SQL compatibility.
|
// SQLName returns the script name in lowercase for SQL compatibility.
|
||||||
@@ -295,6 +374,8 @@ func InitDatabase(name string) *Database {
|
|||||||
return &Database{
|
return &Database{
|
||||||
Name: name,
|
Name: name,
|
||||||
Schemas: make([]*Schema, 0),
|
Schemas: make([]*Schema, 0),
|
||||||
|
Domains: make([]*Domain, 0),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -308,6 +389,7 @@ func InitSchema(name string) *Schema {
|
|||||||
Permissions: make(map[string]string),
|
Permissions: make(map[string]string),
|
||||||
Metadata: make(map[string]any),
|
Metadata: make(map[string]any),
|
||||||
Scripts: make([]*Script, 0),
|
Scripts: make([]*Script, 0),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -321,6 +403,7 @@ func InitTable(name, schema string) *Table {
|
|||||||
Indexes: make(map[string]*Index),
|
Indexes: make(map[string]*Index),
|
||||||
Relationships: make(map[string]*Relationship),
|
Relationships: make(map[string]*Relationship),
|
||||||
Metadata: make(map[string]any),
|
Metadata: make(map[string]any),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -330,6 +413,7 @@ func InitColumn(name, table, schema string) *Column {
|
|||||||
Name: name,
|
Name: name,
|
||||||
Table: table,
|
Table: table,
|
||||||
Schema: schema,
|
Schema: schema,
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -341,6 +425,7 @@ func InitIndex(name, table, schema string) *Index {
|
|||||||
Schema: schema,
|
Schema: schema,
|
||||||
Columns: make([]string, 0),
|
Columns: make([]string, 0),
|
||||||
Include: make([]string, 0),
|
Include: make([]string, 0),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -353,6 +438,7 @@ func InitRelation(name, schema string) *Relationship {
|
|||||||
Properties: make(map[string]string),
|
Properties: make(map[string]string),
|
||||||
FromColumns: make([]string, 0),
|
FromColumns: make([]string, 0),
|
||||||
ToColumns: make([]string, 0),
|
ToColumns: make([]string, 0),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -362,6 +448,7 @@ func InitRelationship(name string, relType RelationType) *Relationship {
|
|||||||
Name: name,
|
Name: name,
|
||||||
Type: relType,
|
Type: relType,
|
||||||
Properties: make(map[string]string),
|
Properties: make(map[string]string),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -372,6 +459,7 @@ func InitConstraint(name string, constraintType ConstraintType) *Constraint {
|
|||||||
Type: constraintType,
|
Type: constraintType,
|
||||||
Columns: make([]string, 0),
|
Columns: make([]string, 0),
|
||||||
ReferencedColumns: make([]string, 0),
|
ReferencedColumns: make([]string, 0),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -380,6 +468,7 @@ func InitScript(name string) *Script {
|
|||||||
return &Script{
|
return &Script{
|
||||||
Name: name,
|
Name: name,
|
||||||
RunAfter: make([]string, 0),
|
RunAfter: make([]string, 0),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -390,6 +479,7 @@ func InitView(name, schema string) *View {
|
|||||||
Schema: schema,
|
Schema: schema,
|
||||||
Columns: make(map[string]*Column),
|
Columns: make(map[string]*Column),
|
||||||
Metadata: make(map[string]any),
|
Metadata: make(map[string]any),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -400,5 +490,25 @@ func InitSequence(name, schema string) *Sequence {
|
|||||||
Schema: schema,
|
Schema: schema,
|
||||||
IncrementBy: 1,
|
IncrementBy: 1,
|
||||||
StartValue: 1,
|
StartValue: 1,
|
||||||
|
GUID: uuid.New().String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitDomain initializes a new Domain with empty slices and maps
|
||||||
|
func InitDomain(name string) *Domain {
|
||||||
|
return &Domain{
|
||||||
|
Name: name,
|
||||||
|
Tables: make([]*DomainTable, 0),
|
||||||
|
Metadata: make(map[string]any),
|
||||||
|
GUID: uuid.New().String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitDomainTable initializes a new DomainTable reference
|
||||||
|
func InitDomainTable(tableName, schemaName string) *DomainTable {
|
||||||
|
return &DomainTable{
|
||||||
|
TableName: tableName,
|
||||||
|
SchemaName: schemaName,
|
||||||
|
GUID: uuid.New().String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
282
pkg/models/sorting.go
Normal file
282
pkg/models/sorting.go
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SortOrder represents the sort direction
|
||||||
|
type SortOrder bool
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Ascending sort order
|
||||||
|
Ascending SortOrder = false
|
||||||
|
// Descending sort order
|
||||||
|
Descending SortOrder = true
|
||||||
|
)
|
||||||
|
|
||||||
|
// Schema Sorting
|
||||||
|
|
||||||
|
// SortSchemasByName sorts schemas by name
|
||||||
|
func SortSchemasByName(schemas []*Schema, desc bool) error {
|
||||||
|
sort.SliceStable(schemas, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(schemas[i].Name), strings.ToLower(schemas[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortSchemasBySequence sorts schemas by sequence number
|
||||||
|
func SortSchemasBySequence(schemas []*Schema, desc bool) error {
|
||||||
|
sort.SliceStable(schemas, func(i, j int) bool {
|
||||||
|
if desc {
|
||||||
|
return schemas[i].Sequence > schemas[j].Sequence
|
||||||
|
}
|
||||||
|
return schemas[i].Sequence < schemas[j].Sequence
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Table Sorting
|
||||||
|
|
||||||
|
// SortTablesByName sorts tables by name
|
||||||
|
func SortTablesByName(tables []*Table, desc bool) error {
|
||||||
|
sort.SliceStable(tables, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(tables[i].Name), strings.ToLower(tables[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortTablesBySequence sorts tables by sequence number
|
||||||
|
func SortTablesBySequence(tables []*Table, desc bool) error {
|
||||||
|
sort.SliceStable(tables, func(i, j int) bool {
|
||||||
|
if desc {
|
||||||
|
return tables[i].Sequence > tables[j].Sequence
|
||||||
|
}
|
||||||
|
return tables[i].Sequence < tables[j].Sequence
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Column Sorting
|
||||||
|
|
||||||
|
// SortColumnsMapByName converts column map to sorted slice by name
|
||||||
|
func SortColumnsMapByName(columns map[string]*Column, desc bool) []*Column {
|
||||||
|
result := make([]*Column, 0, len(columns))
|
||||||
|
for _, col := range columns {
|
||||||
|
result = append(result, col)
|
||||||
|
}
|
||||||
|
_ = SortColumnsByName(result, desc)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortColumnsMapBySequence converts column map to sorted slice by sequence
|
||||||
|
func SortColumnsMapBySequence(columns map[string]*Column, desc bool) []*Column {
|
||||||
|
result := make([]*Column, 0, len(columns))
|
||||||
|
for _, col := range columns {
|
||||||
|
result = append(result, col)
|
||||||
|
}
|
||||||
|
_ = SortColumnsBySequence(result, desc)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortColumnsByName sorts columns by name
|
||||||
|
func SortColumnsByName(columns []*Column, desc bool) error {
|
||||||
|
sort.SliceStable(columns, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(columns[i].Name), strings.ToLower(columns[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortColumnsBySequence sorts columns by sequence number
|
||||||
|
func SortColumnsBySequence(columns []*Column, desc bool) error {
|
||||||
|
sort.SliceStable(columns, func(i, j int) bool {
|
||||||
|
if desc {
|
||||||
|
return columns[i].Sequence > columns[j].Sequence
|
||||||
|
}
|
||||||
|
return columns[i].Sequence < columns[j].Sequence
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// View Sorting
|
||||||
|
|
||||||
|
// SortViewsByName sorts views by name
|
||||||
|
func SortViewsByName(views []*View, desc bool) error {
|
||||||
|
sort.SliceStable(views, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(views[i].Name), strings.ToLower(views[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortViewsBySequence sorts views by sequence number
|
||||||
|
func SortViewsBySequence(views []*View, desc bool) error {
|
||||||
|
sort.SliceStable(views, func(i, j int) bool {
|
||||||
|
if desc {
|
||||||
|
return views[i].Sequence > views[j].Sequence
|
||||||
|
}
|
||||||
|
return views[i].Sequence < views[j].Sequence
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sequence Sorting
|
||||||
|
|
||||||
|
// SortSequencesByName sorts sequences by name
|
||||||
|
func SortSequencesByName(sequences []*Sequence, desc bool) error {
|
||||||
|
sort.SliceStable(sequences, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(sequences[i].Name), strings.ToLower(sequences[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortSequencesBySequence sorts sequences by sequence number
|
||||||
|
func SortSequencesBySequence(sequences []*Sequence, desc bool) error {
|
||||||
|
sort.SliceStable(sequences, func(i, j int) bool {
|
||||||
|
if desc {
|
||||||
|
return sequences[i].Sequence > sequences[j].Sequence
|
||||||
|
}
|
||||||
|
return sequences[i].Sequence < sequences[j].Sequence
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Index Sorting
|
||||||
|
|
||||||
|
// SortIndexesMapByName converts index map to sorted slice by name
|
||||||
|
func SortIndexesMapByName(indexes map[string]*Index, desc bool) []*Index {
|
||||||
|
result := make([]*Index, 0, len(indexes))
|
||||||
|
for _, idx := range indexes {
|
||||||
|
result = append(result, idx)
|
||||||
|
}
|
||||||
|
_ = SortIndexesByName(result, desc)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortIndexesMapBySequence converts index map to sorted slice by sequence
|
||||||
|
func SortIndexesMapBySequence(indexes map[string]*Index, desc bool) []*Index {
|
||||||
|
result := make([]*Index, 0, len(indexes))
|
||||||
|
for _, idx := range indexes {
|
||||||
|
result = append(result, idx)
|
||||||
|
}
|
||||||
|
_ = SortIndexesBySequence(result, desc)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortIndexesByName sorts indexes by name
|
||||||
|
func SortIndexesByName(indexes []*Index, desc bool) error {
|
||||||
|
sort.SliceStable(indexes, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(indexes[i].Name), strings.ToLower(indexes[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortIndexesBySequence sorts indexes by sequence number
|
||||||
|
func SortIndexesBySequence(indexes []*Index, desc bool) error {
|
||||||
|
sort.SliceStable(indexes, func(i, j int) bool {
|
||||||
|
if desc {
|
||||||
|
return indexes[i].Sequence > indexes[j].Sequence
|
||||||
|
}
|
||||||
|
return indexes[i].Sequence < indexes[j].Sequence
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Constraint Sorting
|
||||||
|
|
||||||
|
// SortConstraintsMapByName converts constraint map to sorted slice by name
|
||||||
|
func SortConstraintsMapByName(constraints map[string]*Constraint, desc bool) []*Constraint {
|
||||||
|
result := make([]*Constraint, 0, len(constraints))
|
||||||
|
for _, c := range constraints {
|
||||||
|
result = append(result, c)
|
||||||
|
}
|
||||||
|
_ = SortConstraintsByName(result, desc)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortConstraintsByName sorts constraints by name
|
||||||
|
func SortConstraintsByName(constraints []*Constraint, desc bool) error {
|
||||||
|
sort.SliceStable(constraints, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(constraints[i].Name), strings.ToLower(constraints[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relationship Sorting
|
||||||
|
|
||||||
|
// SortRelationshipsMapByName converts relationship map to sorted slice by name
|
||||||
|
func SortRelationshipsMapByName(relationships map[string]*Relationship, desc bool) []*Relationship {
|
||||||
|
result := make([]*Relationship, 0, len(relationships))
|
||||||
|
for _, r := range relationships {
|
||||||
|
result = append(result, r)
|
||||||
|
}
|
||||||
|
_ = SortRelationshipsByName(result, desc)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// SortRelationshipsByName sorts relationships by name
|
||||||
|
func SortRelationshipsByName(relationships []*Relationship, desc bool) error {
|
||||||
|
sort.SliceStable(relationships, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(relationships[i].Name), strings.ToLower(relationships[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Script Sorting
|
||||||
|
|
||||||
|
// SortScriptsByName sorts scripts by name
|
||||||
|
func SortScriptsByName(scripts []*Script, desc bool) error {
|
||||||
|
sort.SliceStable(scripts, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(scripts[i].Name), strings.ToLower(scripts[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enum Sorting
|
||||||
|
|
||||||
|
// SortEnumsByName sorts enums by name
|
||||||
|
func SortEnumsByName(enums []*Enum, desc bool) error {
|
||||||
|
sort.SliceStable(enums, func(i, j int) bool {
|
||||||
|
cmp := strings.Compare(strings.ToLower(enums[i].Name), strings.ToLower(enums[j].Name))
|
||||||
|
if desc {
|
||||||
|
return cmp > 0
|
||||||
|
}
|
||||||
|
return cmp < 0
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -4,31 +4,31 @@ import "strings"
|
|||||||
|
|
||||||
var GoToStdTypes = map[string]string{
|
var GoToStdTypes = map[string]string{
|
||||||
"bool": "boolean",
|
"bool": "boolean",
|
||||||
"int64": "integer",
|
"int64": "bigint",
|
||||||
"int": "integer",
|
"int": "integer",
|
||||||
"int8": "integer",
|
"int8": "smallint",
|
||||||
"int16": "integer",
|
"int16": "smallint",
|
||||||
"int32": "integer",
|
"int32": "integer",
|
||||||
"uint": "integer",
|
"uint": "integer",
|
||||||
"uint8": "integer",
|
"uint8": "smallint",
|
||||||
"uint16": "integer",
|
"uint16": "smallint",
|
||||||
"uint32": "integer",
|
"uint32": "integer",
|
||||||
"uint64": "integer",
|
"uint64": "bigint",
|
||||||
"uintptr": "integer",
|
"uintptr": "bigint",
|
||||||
"znullint64": "integer",
|
"znullint64": "bigint",
|
||||||
"znullint32": "integer",
|
"znullint32": "integer",
|
||||||
"znullbyte": "integer",
|
"znullbyte": "smallint",
|
||||||
"float64": "double",
|
"float64": "double",
|
||||||
"float32": "double",
|
"float32": "double",
|
||||||
"complex64": "double",
|
"complex64": "double",
|
||||||
"complex128": "double",
|
"complex128": "double",
|
||||||
"customfloat64": "double",
|
"customfloat64": "double",
|
||||||
"string": "string",
|
"string": "text",
|
||||||
"Pointer": "integer",
|
"Pointer": "bigint",
|
||||||
"[]byte": "blob",
|
"[]byte": "blob",
|
||||||
"customdate": "string",
|
"customdate": "date",
|
||||||
"customtime": "string",
|
"customtime": "time",
|
||||||
"customtimestamp": "string",
|
"customtimestamp": "timestamp",
|
||||||
"sqlfloat64": "double",
|
"sqlfloat64": "double",
|
||||||
"sqlfloat16": "double",
|
"sqlfloat16": "double",
|
||||||
"sqluuid": "uuid",
|
"sqluuid": "uuid",
|
||||||
@@ -36,9 +36,9 @@ var GoToStdTypes = map[string]string{
|
|||||||
"sqljson": "json",
|
"sqljson": "json",
|
||||||
"sqlint64": "bigint",
|
"sqlint64": "bigint",
|
||||||
"sqlint32": "integer",
|
"sqlint32": "integer",
|
||||||
"sqlint16": "integer",
|
"sqlint16": "smallint",
|
||||||
"sqlbool": "boolean",
|
"sqlbool": "boolean",
|
||||||
"sqlstring": "string",
|
"sqlstring": "text",
|
||||||
"nullablejsonb": "jsonb",
|
"nullablejsonb": "jsonb",
|
||||||
"nullablejson": "json",
|
"nullablejson": "json",
|
||||||
"nullableuuid": "uuid",
|
"nullableuuid": "uuid",
|
||||||
@@ -67,7 +67,7 @@ var GoToPGSQLTypes = map[string]string{
|
|||||||
"float32": "real",
|
"float32": "real",
|
||||||
"complex64": "double precision",
|
"complex64": "double precision",
|
||||||
"complex128": "double precision",
|
"complex128": "double precision",
|
||||||
"customfloat64": "double precisio",
|
"customfloat64": "double precision",
|
||||||
"string": "text",
|
"string": "text",
|
||||||
"Pointer": "bigint",
|
"Pointer": "bigint",
|
||||||
"[]byte": "bytea",
|
"[]byte": "bytea",
|
||||||
@@ -81,9 +81,9 @@ var GoToPGSQLTypes = map[string]string{
|
|||||||
"sqljson": "json",
|
"sqljson": "json",
|
||||||
"sqlint64": "bigint",
|
"sqlint64": "bigint",
|
||||||
"sqlint32": "integer",
|
"sqlint32": "integer",
|
||||||
"sqlint16": "integer",
|
"sqlint16": "smallint",
|
||||||
"sqlbool": "boolean",
|
"sqlbool": "boolean",
|
||||||
"sqlstring": "string",
|
"sqlstring": "text",
|
||||||
"nullablejsonb": "jsonb",
|
"nullablejsonb": "jsonb",
|
||||||
"nullablejson": "json",
|
"nullablejson": "json",
|
||||||
"nullableuuid": "uuid",
|
"nullableuuid": "uuid",
|
||||||
|
|||||||
339
pkg/pgsql/datatypes_test.go
Normal file
339
pkg/pgsql/datatypes_test.go
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
package pgsql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestValidSQLType(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sqltype string
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
// PostgreSQL types
|
||||||
|
{"Valid PGSQL bigint", "bigint", true},
|
||||||
|
{"Valid PGSQL integer", "integer", true},
|
||||||
|
{"Valid PGSQL text", "text", true},
|
||||||
|
{"Valid PGSQL boolean", "boolean", true},
|
||||||
|
{"Valid PGSQL double precision", "double precision", true},
|
||||||
|
{"Valid PGSQL bytea", "bytea", true},
|
||||||
|
{"Valid PGSQL uuid", "uuid", true},
|
||||||
|
{"Valid PGSQL jsonb", "jsonb", true},
|
||||||
|
{"Valid PGSQL json", "json", true},
|
||||||
|
{"Valid PGSQL timestamp", "timestamp", true},
|
||||||
|
{"Valid PGSQL date", "date", true},
|
||||||
|
{"Valid PGSQL time", "time", true},
|
||||||
|
{"Valid PGSQL citext", "citext", true},
|
||||||
|
|
||||||
|
// Standard types
|
||||||
|
{"Valid std double", "double", true},
|
||||||
|
{"Valid std blob", "blob", true},
|
||||||
|
|
||||||
|
// Case insensitive
|
||||||
|
{"Case insensitive BIGINT", "BIGINT", true},
|
||||||
|
{"Case insensitive TeXt", "TeXt", true},
|
||||||
|
{"Case insensitive BoOlEaN", "BoOlEaN", true},
|
||||||
|
|
||||||
|
// Invalid types
|
||||||
|
{"Invalid type", "invalidtype", false},
|
||||||
|
{"Invalid type varchar", "varchar", false},
|
||||||
|
{"Empty string", "", false},
|
||||||
|
{"Random string", "foobar", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := ValidSQLType(tt.sqltype)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("ValidSQLType(%q) = %v, want %v", tt.sqltype, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetSQLType(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
anytype string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Go types to PostgreSQL types
|
||||||
|
{"Go bool to boolean", "bool", "boolean"},
|
||||||
|
{"Go int64 to bigint", "int64", "bigint"},
|
||||||
|
{"Go int to integer", "int", "integer"},
|
||||||
|
{"Go string to text", "string", "text"},
|
||||||
|
{"Go float64 to double precision", "float64", "double precision"},
|
||||||
|
{"Go float32 to real", "float32", "real"},
|
||||||
|
{"Go []byte to bytea", "[]byte", "bytea"},
|
||||||
|
|
||||||
|
// SQL types remain SQL types
|
||||||
|
{"SQL bigint", "bigint", "bigint"},
|
||||||
|
{"SQL integer", "integer", "integer"},
|
||||||
|
{"SQL text", "text", "text"},
|
||||||
|
{"SQL boolean", "boolean", "boolean"},
|
||||||
|
{"SQL uuid", "uuid", "uuid"},
|
||||||
|
{"SQL jsonb", "jsonb", "jsonb"},
|
||||||
|
|
||||||
|
// Case insensitive Go types
|
||||||
|
{"Case insensitive BOOL", "BOOL", "boolean"},
|
||||||
|
{"Case insensitive InT64", "InT64", "bigint"},
|
||||||
|
{"Case insensitive STRING", "STRING", "text"},
|
||||||
|
|
||||||
|
// Case insensitive SQL types
|
||||||
|
{"Case insensitive BIGINT", "BIGINT", "bigint"},
|
||||||
|
{"Case insensitive TEXT", "TEXT", "text"},
|
||||||
|
|
||||||
|
// Custom types
|
||||||
|
{"Custom sqluuid", "sqluuid", "uuid"},
|
||||||
|
{"Custom sqljsonb", "sqljsonb", "jsonb"},
|
||||||
|
{"Custom sqlint64", "sqlint64", "bigint"},
|
||||||
|
|
||||||
|
// Unknown types default to text
|
||||||
|
{"Unknown type varchar", "varchar", "text"},
|
||||||
|
{"Unknown type foobar", "foobar", "text"},
|
||||||
|
{"Empty string", "", "text"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := GetSQLType(tt.anytype)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("GetSQLType(%q) = %q, want %q", tt.anytype, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConvertSQLType(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
anytype string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Go types to PostgreSQL types
|
||||||
|
{"Go bool to boolean", "bool", "boolean"},
|
||||||
|
{"Go int64 to bigint", "int64", "bigint"},
|
||||||
|
{"Go int to integer", "int", "integer"},
|
||||||
|
{"Go string to text", "string", "text"},
|
||||||
|
{"Go float64 to double precision", "float64", "double precision"},
|
||||||
|
{"Go float32 to real", "float32", "real"},
|
||||||
|
{"Go []byte to bytea", "[]byte", "bytea"},
|
||||||
|
|
||||||
|
// SQL types remain SQL types
|
||||||
|
{"SQL bigint", "bigint", "bigint"},
|
||||||
|
{"SQL integer", "integer", "integer"},
|
||||||
|
{"SQL text", "text", "text"},
|
||||||
|
{"SQL boolean", "boolean", "boolean"},
|
||||||
|
|
||||||
|
// Case insensitive
|
||||||
|
{"Case insensitive BOOL", "BOOL", "boolean"},
|
||||||
|
{"Case insensitive InT64", "InT64", "bigint"},
|
||||||
|
|
||||||
|
// Unknown types remain unchanged (difference from GetSQLType)
|
||||||
|
{"Unknown type varchar", "varchar", "varchar"},
|
||||||
|
{"Unknown type foobar", "foobar", "foobar"},
|
||||||
|
{"Empty string", "", ""},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := ConvertSQLType(tt.anytype)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("ConvertSQLType(%q) = %q, want %q", tt.anytype, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsGoType(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
typeName string
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
// Go basic types
|
||||||
|
{"Go bool", "bool", true},
|
||||||
|
{"Go int64", "int64", true},
|
||||||
|
{"Go int", "int", true},
|
||||||
|
{"Go int32", "int32", true},
|
||||||
|
{"Go int16", "int16", true},
|
||||||
|
{"Go int8", "int8", true},
|
||||||
|
{"Go uint", "uint", true},
|
||||||
|
{"Go uint64", "uint64", true},
|
||||||
|
{"Go uint32", "uint32", true},
|
||||||
|
{"Go uint16", "uint16", true},
|
||||||
|
{"Go uint8", "uint8", true},
|
||||||
|
{"Go float64", "float64", true},
|
||||||
|
{"Go float32", "float32", true},
|
||||||
|
{"Go string", "string", true},
|
||||||
|
{"Go []byte", "[]byte", true},
|
||||||
|
|
||||||
|
// Go custom types
|
||||||
|
{"Go complex64", "complex64", true},
|
||||||
|
{"Go complex128", "complex128", true},
|
||||||
|
{"Go uintptr", "uintptr", true},
|
||||||
|
{"Go Pointer", "Pointer", true},
|
||||||
|
|
||||||
|
// Custom SQL types
|
||||||
|
{"Custom sqluuid", "sqluuid", true},
|
||||||
|
{"Custom sqljsonb", "sqljsonb", true},
|
||||||
|
{"Custom sqlint64", "sqlint64", true},
|
||||||
|
{"Custom customdate", "customdate", true},
|
||||||
|
{"Custom customtime", "customtime", true},
|
||||||
|
|
||||||
|
// Case insensitive
|
||||||
|
{"Case insensitive BOOL", "BOOL", true},
|
||||||
|
{"Case insensitive InT64", "InT64", true},
|
||||||
|
{"Case insensitive STRING", "STRING", true},
|
||||||
|
|
||||||
|
// SQL types (not Go types)
|
||||||
|
{"SQL bigint", "bigint", false},
|
||||||
|
{"SQL integer", "integer", false},
|
||||||
|
{"SQL text", "text", false},
|
||||||
|
{"SQL boolean", "boolean", false},
|
||||||
|
|
||||||
|
// Invalid types
|
||||||
|
{"Invalid type", "invalidtype", false},
|
||||||
|
{"Empty string", "", false},
|
||||||
|
{"Random string", "foobar", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := IsGoType(tt.typeName)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("IsGoType(%q) = %v, want %v", tt.typeName, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetStdTypeFromGo(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
typeName string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
// Go types to standard SQL types
|
||||||
|
{"Go bool to boolean", "bool", "boolean"},
|
||||||
|
{"Go int64 to bigint", "int64", "bigint"},
|
||||||
|
{"Go int to integer", "int", "integer"},
|
||||||
|
{"Go string to text", "string", "text"},
|
||||||
|
{"Go float64 to double", "float64", "double"},
|
||||||
|
{"Go float32 to double", "float32", "double"},
|
||||||
|
{"Go []byte to blob", "[]byte", "blob"},
|
||||||
|
{"Go int32 to integer", "int32", "integer"},
|
||||||
|
{"Go int16 to smallint", "int16", "smallint"},
|
||||||
|
|
||||||
|
// Custom types
|
||||||
|
{"Custom sqluuid to uuid", "sqluuid", "uuid"},
|
||||||
|
{"Custom sqljsonb to jsonb", "sqljsonb", "jsonb"},
|
||||||
|
{"Custom sqlint64 to bigint", "sqlint64", "bigint"},
|
||||||
|
{"Custom customdate to date", "customdate", "date"},
|
||||||
|
|
||||||
|
// Case insensitive
|
||||||
|
{"Case insensitive BOOL", "BOOL", "boolean"},
|
||||||
|
{"Case insensitive InT64", "InT64", "bigint"},
|
||||||
|
{"Case insensitive STRING", "STRING", "text"},
|
||||||
|
|
||||||
|
// Non-Go types remain unchanged
|
||||||
|
{"SQL bigint unchanged", "bigint", "bigint"},
|
||||||
|
{"SQL integer unchanged", "integer", "integer"},
|
||||||
|
{"Invalid type unchanged", "invalidtype", "invalidtype"},
|
||||||
|
{"Empty string unchanged", "", ""},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := GetStdTypeFromGo(tt.typeName)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("GetStdTypeFromGo(%q) = %q, want %q", tt.typeName, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGoToStdTypesMap(t *testing.T) {
|
||||||
|
// Test that the map contains expected entries
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"bool": "boolean",
|
||||||
|
"int64": "bigint",
|
||||||
|
"int": "integer",
|
||||||
|
"string": "text",
|
||||||
|
"float64": "double",
|
||||||
|
"[]byte": "blob",
|
||||||
|
}
|
||||||
|
|
||||||
|
for goType, expectedStd := range expectedMappings {
|
||||||
|
if stdType, ok := GoToStdTypes[goType]; !ok {
|
||||||
|
t.Errorf("GoToStdTypes missing entry for %q", goType)
|
||||||
|
} else if stdType != expectedStd {
|
||||||
|
t.Errorf("GoToStdTypes[%q] = %q, want %q", goType, stdType, expectedStd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that the map is not empty
|
||||||
|
if len(GoToStdTypes) == 0 {
|
||||||
|
t.Error("GoToStdTypes map is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGoToPGSQLTypesMap(t *testing.T) {
|
||||||
|
// Test that the map contains expected entries
|
||||||
|
expectedMappings := map[string]string{
|
||||||
|
"bool": "boolean",
|
||||||
|
"int64": "bigint",
|
||||||
|
"int": "integer",
|
||||||
|
"string": "text",
|
||||||
|
"float64": "double precision",
|
||||||
|
"float32": "real",
|
||||||
|
"[]byte": "bytea",
|
||||||
|
}
|
||||||
|
|
||||||
|
for goType, expectedPG := range expectedMappings {
|
||||||
|
if pgType, ok := GoToPGSQLTypes[goType]; !ok {
|
||||||
|
t.Errorf("GoToPGSQLTypes missing entry for %q", goType)
|
||||||
|
} else if pgType != expectedPG {
|
||||||
|
t.Errorf("GoToPGSQLTypes[%q] = %q, want %q", goType, pgType, expectedPG)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that the map is not empty
|
||||||
|
if len(GoToPGSQLTypes) == 0 {
|
||||||
|
t.Error("GoToPGSQLTypes map is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTypeConversionConsistency(t *testing.T) {
|
||||||
|
// Test that GetSQLType and ConvertSQLType are consistent for known types
|
||||||
|
knownGoTypes := []string{"bool", "int64", "int", "string", "float64", "[]byte"}
|
||||||
|
|
||||||
|
for _, goType := range knownGoTypes {
|
||||||
|
getSQLResult := GetSQLType(goType)
|
||||||
|
convertResult := ConvertSQLType(goType)
|
||||||
|
|
||||||
|
if getSQLResult != convertResult {
|
||||||
|
t.Errorf("Inconsistent results for %q: GetSQLType=%q, ConvertSQLType=%q",
|
||||||
|
goType, getSQLResult, convertResult)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetSQLTypeVsConvertSQLTypeDifference(t *testing.T) {
|
||||||
|
// Test that GetSQLType returns "text" for unknown types
|
||||||
|
// while ConvertSQLType returns the input unchanged
|
||||||
|
unknownTypes := []string{"varchar", "char", "customtype", "unknowntype"}
|
||||||
|
|
||||||
|
for _, unknown := range unknownTypes {
|
||||||
|
getSQLResult := GetSQLType(unknown)
|
||||||
|
convertResult := ConvertSQLType(unknown)
|
||||||
|
|
||||||
|
if getSQLResult != "text" {
|
||||||
|
t.Errorf("GetSQLType(%q) = %q, want %q", unknown, getSQLResult, "text")
|
||||||
|
}
|
||||||
|
|
||||||
|
if convertResult != unknown {
|
||||||
|
t.Errorf("ConvertSQLType(%q) = %q, want %q", unknown, convertResult, unknown)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
36
pkg/pgsql/doc.go
Normal file
36
pkg/pgsql/doc.go
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
// Package pgsql provides PostgreSQL-specific utilities and helpers.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The pgsql package contains PostgreSQL-specific functionality including:
|
||||||
|
// - SQL reserved keyword validation
|
||||||
|
// - Data type mappings and conversions
|
||||||
|
// - PostgreSQL-specific schema introspection helpers
|
||||||
|
//
|
||||||
|
// # Components
|
||||||
|
//
|
||||||
|
// keywords.go - SQL reserved keywords validation
|
||||||
|
//
|
||||||
|
// Provides functions to check if identifiers conflict with SQL reserved words
|
||||||
|
// and need quoting for safe usage in PostgreSQL queries.
|
||||||
|
//
|
||||||
|
// datatypes.go - PostgreSQL data type utilities
|
||||||
|
//
|
||||||
|
// Contains mappings between PostgreSQL data types and their equivalents in other
|
||||||
|
// systems, as well as type conversion and normalization functions.
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// // Check if identifier needs quoting
|
||||||
|
// if pgsql.IsReservedKeyword("user") {
|
||||||
|
// // Quote the identifier
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // Normalize data type
|
||||||
|
// normalizedType := pgsql.NormalizeDataType("varchar(255)")
|
||||||
|
//
|
||||||
|
// # Purpose
|
||||||
|
//
|
||||||
|
// This package supports the PostgreSQL reader and writer implementations by providing
|
||||||
|
// shared utilities for handling PostgreSQL-specific schema elements and constraints.
|
||||||
|
package pgsql
|
||||||
136
pkg/pgsql/keywords_test.go
Normal file
136
pkg/pgsql/keywords_test.go
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
package pgsql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGetPostgresKeywords(t *testing.T) {
|
||||||
|
keywords := GetPostgresKeywords()
|
||||||
|
|
||||||
|
// Test that keywords are returned
|
||||||
|
if len(keywords) == 0 {
|
||||||
|
t.Fatal("Expected non-empty list of keywords")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that we get all keywords from the map
|
||||||
|
expectedCount := len(postgresKeywords)
|
||||||
|
if len(keywords) != expectedCount {
|
||||||
|
t.Errorf("Expected %d keywords, got %d", expectedCount, len(keywords))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that all returned keywords exist in the map
|
||||||
|
for _, keyword := range keywords {
|
||||||
|
if !postgresKeywords[keyword] {
|
||||||
|
t.Errorf("Keyword %q not found in postgresKeywords map", keyword)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test that no duplicate keywords are returned
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
for _, keyword := range keywords {
|
||||||
|
if seen[keyword] {
|
||||||
|
t.Errorf("Duplicate keyword found: %q", keyword)
|
||||||
|
}
|
||||||
|
seen[keyword] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPostgresKeywordsMap(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
keyword string
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{"SELECT keyword", "select", true},
|
||||||
|
{"FROM keyword", "from", true},
|
||||||
|
{"WHERE keyword", "where", true},
|
||||||
|
{"TABLE keyword", "table", true},
|
||||||
|
{"PRIMARY keyword", "primary", true},
|
||||||
|
{"FOREIGN keyword", "foreign", true},
|
||||||
|
{"CREATE keyword", "create", true},
|
||||||
|
{"DROP keyword", "drop", true},
|
||||||
|
{"ALTER keyword", "alter", true},
|
||||||
|
{"INDEX keyword", "index", true},
|
||||||
|
{"NOT keyword", "not", true},
|
||||||
|
{"NULL keyword", "null", true},
|
||||||
|
{"TRUE keyword", "true", true},
|
||||||
|
{"FALSE keyword", "false", true},
|
||||||
|
{"Non-keyword lowercase", "notakeyword", false},
|
||||||
|
{"Non-keyword uppercase", "NOTAKEYWORD", false},
|
||||||
|
{"Empty string", "", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := postgresKeywords[tt.keyword]
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("postgresKeywords[%q] = %v, want %v", tt.keyword, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPostgresKeywordsMapContent(t *testing.T) {
|
||||||
|
// Test that the map contains expected common keywords
|
||||||
|
commonKeywords := []string{
|
||||||
|
"select", "insert", "update", "delete", "create", "drop", "alter",
|
||||||
|
"table", "index", "view", "schema", "function", "procedure",
|
||||||
|
"primary", "foreign", "key", "constraint", "unique", "check",
|
||||||
|
"null", "not", "and", "or", "like", "in", "between",
|
||||||
|
"join", "inner", "left", "right", "cross", "full", "outer",
|
||||||
|
"where", "having", "group", "order", "limit", "offset",
|
||||||
|
"union", "intersect", "except",
|
||||||
|
"begin", "commit", "rollback", "transaction",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, keyword := range commonKeywords {
|
||||||
|
if !postgresKeywords[keyword] {
|
||||||
|
t.Errorf("Expected common keyword %q to be in postgresKeywords map", keyword)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPostgresKeywordsMapSize(t *testing.T) {
|
||||||
|
// PostgreSQL has a substantial list of reserved keywords
|
||||||
|
// This test ensures the map has a reasonable number of entries
|
||||||
|
minExpectedKeywords := 200 // PostgreSQL 13+ has 400+ reserved words
|
||||||
|
|
||||||
|
if len(postgresKeywords) < minExpectedKeywords {
|
||||||
|
t.Errorf("Expected at least %d keywords, got %d. The map may be incomplete.",
|
||||||
|
minExpectedKeywords, len(postgresKeywords))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetPostgresKeywordsConsistency(t *testing.T) {
|
||||||
|
// Test that calling GetPostgresKeywords multiple times returns consistent results
|
||||||
|
keywords1 := GetPostgresKeywords()
|
||||||
|
keywords2 := GetPostgresKeywords()
|
||||||
|
|
||||||
|
if len(keywords1) != len(keywords2) {
|
||||||
|
t.Errorf("Inconsistent results: first call returned %d keywords, second call returned %d",
|
||||||
|
len(keywords1), len(keywords2))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a map from both results to compare
|
||||||
|
map1 := make(map[string]bool)
|
||||||
|
map2 := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, k := range keywords1 {
|
||||||
|
map1[k] = true
|
||||||
|
}
|
||||||
|
for _, k := range keywords2 {
|
||||||
|
map2[k] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that both contain the same keywords
|
||||||
|
for k := range map1 {
|
||||||
|
if !map2[k] {
|
||||||
|
t.Errorf("Keyword %q present in first call but not in second", k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for k := range map2 {
|
||||||
|
if !map1[k] {
|
||||||
|
t.Errorf("Keyword %q present in second call but not in first", k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -632,6 +632,9 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
|||||||
column.Name = parts[0]
|
column.Name = parts[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Track if we found explicit nullability markers
|
||||||
|
hasExplicitNullableMarker := false
|
||||||
|
|
||||||
// Parse tag attributes
|
// Parse tag attributes
|
||||||
for _, part := range parts[1:] {
|
for _, part := range parts[1:] {
|
||||||
kv := strings.SplitN(part, ":", 2)
|
kv := strings.SplitN(part, ":", 2)
|
||||||
@@ -649,6 +652,10 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
|||||||
column.IsPrimaryKey = true
|
column.IsPrimaryKey = true
|
||||||
case "notnull":
|
case "notnull":
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
|
hasExplicitNullableMarker = true
|
||||||
|
case "nullzero":
|
||||||
|
column.NotNull = false
|
||||||
|
hasExplicitNullableMarker = true
|
||||||
case "autoincrement":
|
case "autoincrement":
|
||||||
column.AutoIncrement = true
|
column.AutoIncrement = true
|
||||||
case "default":
|
case "default":
|
||||||
@@ -664,17 +671,15 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
|||||||
|
|
||||||
// Determine if nullable based on Go type and bun tags
|
// Determine if nullable based on Go type and bun tags
|
||||||
// In Bun:
|
// In Bun:
|
||||||
// - nullzero tag means the field is nullable (can be NULL in DB)
|
// - explicit "notnull" tag means NOT NULL
|
||||||
// - absence of nullzero means the field is NOT NULL
|
// - explicit "nullzero" tag means nullable
|
||||||
// - primitive types (int64, bool, string) are NOT NULL by default
|
// - absence of explicit markers: infer from Go type
|
||||||
column.NotNull = true
|
if !hasExplicitNullableMarker {
|
||||||
// Primary keys are always NOT NULL
|
// Infer from Go type if no explicit marker found
|
||||||
|
|
||||||
if strings.Contains(bunTag, "nullzero") {
|
|
||||||
column.NotNull = false
|
|
||||||
} else {
|
|
||||||
column.NotNull = !r.isNullableGoType(fieldType)
|
column.NotNull = !r.isNullableGoType(fieldType)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Primary keys are always NOT NULL
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,9 @@ import (
|
|||||||
"bufio"
|
"bufio"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
@@ -24,11 +26,23 @@ func NewReader(options *readers.ReaderOptions) *Reader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ReadDatabase reads and parses DBML input, returning a Database model
|
// ReadDatabase reads and parses DBML input, returning a Database model
|
||||||
|
// If FilePath points to a directory, all .dbml files are loaded and merged
|
||||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
if r.options.FilePath == "" {
|
if r.options.FilePath == "" {
|
||||||
return nil, fmt.Errorf("file path is required for DBML reader")
|
return nil, fmt.Errorf("file path is required for DBML reader")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if path is a directory
|
||||||
|
info, err := os.Stat(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to stat path: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return r.readDirectoryDBML(r.options.FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Single file - existing logic
|
||||||
content, err := os.ReadFile(r.options.FilePath)
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
@@ -67,15 +81,341 @@ func (r *Reader) ReadTable() (*models.Table, error) {
|
|||||||
return schema.Tables[0], nil
|
return schema.Tables[0], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// stripQuotes removes surrounding quotes from an identifier
|
// readDirectoryDBML processes all .dbml files in directory
|
||||||
|
// Returns merged Database model
|
||||||
|
func (r *Reader) readDirectoryDBML(dirPath string) (*models.Database, error) {
|
||||||
|
// Discover and sort DBML files
|
||||||
|
files, err := r.discoverDBMLFiles(dirPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to discover DBML files: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no files found, return empty database
|
||||||
|
if len(files) == 0 {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
db.Name = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize database (will be merged with files)
|
||||||
|
var db *models.Database
|
||||||
|
|
||||||
|
// Process each file in sorted order
|
||||||
|
for _, filePath := range files {
|
||||||
|
content, err := os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file %s: %w", filePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fileDB, err := r.parseDBML(string(content))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse file %s: %w", filePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// First file initializes the database
|
||||||
|
if db == nil {
|
||||||
|
db = fileDB
|
||||||
|
} else {
|
||||||
|
// Subsequent files are merged
|
||||||
|
mergeDatabase(db, fileDB)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// splitIdentifier splits a dotted identifier while respecting quotes
|
||||||
|
// Handles cases like: "schema.with.dots"."table"."column"
|
||||||
|
func splitIdentifier(s string) []string {
|
||||||
|
var parts []string
|
||||||
|
var current strings.Builder
|
||||||
|
inQuote := false
|
||||||
|
quoteChar := byte(0)
|
||||||
|
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
ch := s[i]
|
||||||
|
|
||||||
|
if !inQuote {
|
||||||
|
switch ch {
|
||||||
|
case '"', '\'':
|
||||||
|
inQuote = true
|
||||||
|
quoteChar = ch
|
||||||
|
current.WriteByte(ch)
|
||||||
|
case '.':
|
||||||
|
if current.Len() > 0 {
|
||||||
|
parts = append(parts, current.String())
|
||||||
|
current.Reset()
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
current.WriteByte(ch)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
current.WriteByte(ch)
|
||||||
|
if ch == quoteChar {
|
||||||
|
inQuote = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if current.Len() > 0 {
|
||||||
|
parts = append(parts, current.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts
|
||||||
|
}
|
||||||
|
|
||||||
|
// stripQuotes removes surrounding quotes and comments from an identifier
|
||||||
func stripQuotes(s string) string {
|
func stripQuotes(s string) string {
|
||||||
s = strings.TrimSpace(s)
|
s = strings.TrimSpace(s)
|
||||||
|
|
||||||
|
// Remove DBML comments in brackets (e.g., [note: 'description'])
|
||||||
|
// This handles inline comments like: "table_name" [note: 'comment']
|
||||||
|
commentRegex := regexp.MustCompile(`\s*\[.*?\]\s*`)
|
||||||
|
s = commentRegex.ReplaceAllString(s, "")
|
||||||
|
|
||||||
|
// Trim again after removing comments
|
||||||
|
s = strings.TrimSpace(s)
|
||||||
|
|
||||||
|
// Remove surrounding quotes (double or single)
|
||||||
if len(s) >= 2 && ((s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'')) {
|
if len(s) >= 2 && ((s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'')) {
|
||||||
return s[1 : len(s)-1]
|
return s[1 : len(s)-1]
|
||||||
}
|
}
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parseFilePrefix extracts numeric prefix from filename
|
||||||
|
// Examples: "1_schema.dbml" -> (1, true), "tables.dbml" -> (0, false)
|
||||||
|
func parseFilePrefix(filename string) (int, bool) {
|
||||||
|
base := filepath.Base(filename)
|
||||||
|
re := regexp.MustCompile(`^(\d+)[_-]`)
|
||||||
|
matches := re.FindStringSubmatch(base)
|
||||||
|
if len(matches) > 1 {
|
||||||
|
var prefix int
|
||||||
|
_, err := fmt.Sscanf(matches[1], "%d", &prefix)
|
||||||
|
if err == nil {
|
||||||
|
return prefix, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasCommentedRefs scans file content for commented-out Ref statements
|
||||||
|
// Returns true if file contains lines like: // Ref: table.col > other.col
|
||||||
|
func hasCommentedRefs(filePath string) (bool, error) {
|
||||||
|
content, err := os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(string(content)))
|
||||||
|
commentedRefRegex := regexp.MustCompile(`^\s*//.*Ref:\s+`)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
if commentedRefRegex.MatchString(line) {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// discoverDBMLFiles finds all .dbml files in directory and returns them sorted
|
||||||
|
func (r *Reader) discoverDBMLFiles(dirPath string) ([]string, error) {
|
||||||
|
pattern := filepath.Join(dirPath, "*.dbml")
|
||||||
|
files, err := filepath.Glob(pattern)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to glob .dbml files: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return sortDBMLFiles(files), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// sortDBMLFiles sorts files by:
|
||||||
|
// 1. Files without commented refs (by numeric prefix, then alphabetically)
|
||||||
|
// 2. Files with commented refs (by numeric prefix, then alphabetically)
|
||||||
|
func sortDBMLFiles(files []string) []string {
|
||||||
|
// Create a slice to hold file info for sorting
|
||||||
|
type fileInfo struct {
|
||||||
|
path string
|
||||||
|
hasCommented bool
|
||||||
|
prefix int
|
||||||
|
hasPrefix bool
|
||||||
|
basename string
|
||||||
|
}
|
||||||
|
|
||||||
|
fileInfos := make([]fileInfo, 0, len(files))
|
||||||
|
|
||||||
|
for _, file := range files {
|
||||||
|
hasCommented, err := hasCommentedRefs(file)
|
||||||
|
if err != nil {
|
||||||
|
// If we can't read the file, treat it as not having commented refs
|
||||||
|
hasCommented = false
|
||||||
|
}
|
||||||
|
|
||||||
|
prefix, hasPrefix := parseFilePrefix(file)
|
||||||
|
basename := filepath.Base(file)
|
||||||
|
|
||||||
|
fileInfos = append(fileInfos, fileInfo{
|
||||||
|
path: file,
|
||||||
|
hasCommented: hasCommented,
|
||||||
|
prefix: prefix,
|
||||||
|
hasPrefix: hasPrefix,
|
||||||
|
basename: basename,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by: hasCommented (false first), hasPrefix (true first), prefix, basename
|
||||||
|
sort.Slice(fileInfos, func(i, j int) bool {
|
||||||
|
// First, sort by commented refs (files without commented refs come first)
|
||||||
|
if fileInfos[i].hasCommented != fileInfos[j].hasCommented {
|
||||||
|
return !fileInfos[i].hasCommented
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then by presence of prefix (files with prefix come first)
|
||||||
|
if fileInfos[i].hasPrefix != fileInfos[j].hasPrefix {
|
||||||
|
return fileInfos[i].hasPrefix
|
||||||
|
}
|
||||||
|
|
||||||
|
// If both have prefix, sort by prefix value
|
||||||
|
if fileInfos[i].hasPrefix && fileInfos[j].hasPrefix {
|
||||||
|
if fileInfos[i].prefix != fileInfos[j].prefix {
|
||||||
|
return fileInfos[i].prefix < fileInfos[j].prefix
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, sort alphabetically by basename
|
||||||
|
return fileInfos[i].basename < fileInfos[j].basename
|
||||||
|
})
|
||||||
|
|
||||||
|
// Extract sorted paths
|
||||||
|
sortedFiles := make([]string, len(fileInfos))
|
||||||
|
for i, info := range fileInfos {
|
||||||
|
sortedFiles[i] = info.path
|
||||||
|
}
|
||||||
|
|
||||||
|
return sortedFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeTable combines two table definitions
|
||||||
|
// Merges: Columns (map), Constraints (map), Indexes (map), Relationships (map)
|
||||||
|
// Uses first non-empty Description
|
||||||
|
func mergeTable(baseTable, fileTable *models.Table) {
|
||||||
|
// Merge columns (map naturally merges - later keys overwrite)
|
||||||
|
for key, col := range fileTable.Columns {
|
||||||
|
baseTable.Columns[key] = col
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge constraints
|
||||||
|
for key, constraint := range fileTable.Constraints {
|
||||||
|
baseTable.Constraints[key] = constraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge indexes
|
||||||
|
for key, index := range fileTable.Indexes {
|
||||||
|
baseTable.Indexes[key] = index
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge relationships
|
||||||
|
for key, rel := range fileTable.Relationships {
|
||||||
|
baseTable.Relationships[key] = rel
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use first non-empty description
|
||||||
|
if baseTable.Description == "" && fileTable.Description != "" {
|
||||||
|
baseTable.Description = fileTable.Description
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge metadata maps
|
||||||
|
if baseTable.Metadata == nil {
|
||||||
|
baseTable.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
for key, val := range fileTable.Metadata {
|
||||||
|
baseTable.Metadata[key] = val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeSchema finds or creates schema and merges tables
|
||||||
|
func mergeSchema(baseDB *models.Database, fileSchema *models.Schema) {
|
||||||
|
// Find existing schema by name (normalize names by stripping quotes)
|
||||||
|
var existingSchema *models.Schema
|
||||||
|
fileSchemaName := stripQuotes(fileSchema.Name)
|
||||||
|
for _, schema := range baseDB.Schemas {
|
||||||
|
if stripQuotes(schema.Name) == fileSchemaName {
|
||||||
|
existingSchema = schema
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If schema doesn't exist, add it and return
|
||||||
|
if existingSchema == nil {
|
||||||
|
baseDB.Schemas = append(baseDB.Schemas, fileSchema)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge tables from fileSchema into existingSchema
|
||||||
|
for _, fileTable := range fileSchema.Tables {
|
||||||
|
// Find existing table by name (normalize names by stripping quotes)
|
||||||
|
var existingTable *models.Table
|
||||||
|
fileTableName := stripQuotes(fileTable.Name)
|
||||||
|
for _, table := range existingSchema.Tables {
|
||||||
|
if stripQuotes(table.Name) == fileTableName {
|
||||||
|
existingTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If table doesn't exist, add it
|
||||||
|
if existingTable == nil {
|
||||||
|
existingSchema.Tables = append(existingSchema.Tables, fileTable)
|
||||||
|
} else {
|
||||||
|
// Merge table properties - tables are identical, skip
|
||||||
|
mergeTable(existingTable, fileTable)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge other schema properties
|
||||||
|
existingSchema.Views = append(existingSchema.Views, fileSchema.Views...)
|
||||||
|
existingSchema.Sequences = append(existingSchema.Sequences, fileSchema.Sequences...)
|
||||||
|
existingSchema.Scripts = append(existingSchema.Scripts, fileSchema.Scripts...)
|
||||||
|
|
||||||
|
// Merge permissions
|
||||||
|
if existingSchema.Permissions == nil {
|
||||||
|
existingSchema.Permissions = make(map[string]string)
|
||||||
|
}
|
||||||
|
for key, val := range fileSchema.Permissions {
|
||||||
|
existingSchema.Permissions[key] = val
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge metadata
|
||||||
|
if existingSchema.Metadata == nil {
|
||||||
|
existingSchema.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
for key, val := range fileSchema.Metadata {
|
||||||
|
existingSchema.Metadata[key] = val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeDatabase merges schemas from fileDB into baseDB
|
||||||
|
func mergeDatabase(baseDB, fileDB *models.Database) {
|
||||||
|
// Merge each schema from fileDB
|
||||||
|
for _, fileSchema := range fileDB.Schemas {
|
||||||
|
mergeSchema(baseDB, fileSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge domains
|
||||||
|
baseDB.Domains = append(baseDB.Domains, fileDB.Domains...)
|
||||||
|
|
||||||
|
// Use first non-empty description
|
||||||
|
if baseDB.Description == "" && fileDB.Description != "" {
|
||||||
|
baseDB.Description = fileDB.Description
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// parseDBML parses DBML content and returns a Database model
|
// parseDBML parses DBML content and returns a Database model
|
||||||
func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
||||||
db := models.InitDatabase("database")
|
db := models.InitDatabase("database")
|
||||||
@@ -109,7 +449,9 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
|
|||||||
// Parse Table definition
|
// Parse Table definition
|
||||||
if matches := tableRegex.FindStringSubmatch(line); matches != nil {
|
if matches := tableRegex.FindStringSubmatch(line); matches != nil {
|
||||||
tableName := matches[1]
|
tableName := matches[1]
|
||||||
parts := strings.Split(tableName, ".")
|
// Strip comments/notes before parsing to avoid dots in notes
|
||||||
|
tableName = strings.TrimSpace(regexp.MustCompile(`\s*\[.*?\]\s*`).ReplaceAllString(tableName, ""))
|
||||||
|
parts := splitIdentifier(tableName)
|
||||||
|
|
||||||
if len(parts) == 2 {
|
if len(parts) == 2 {
|
||||||
currentSchema = stripQuotes(parts[0])
|
currentSchema = stripQuotes(parts[0])
|
||||||
@@ -261,8 +603,10 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
|
|||||||
column.Default = strings.Trim(defaultVal, "'\"")
|
column.Default = strings.Trim(defaultVal, "'\"")
|
||||||
} else if attr == "unique" {
|
} else if attr == "unique" {
|
||||||
// Create a unique constraint
|
// Create a unique constraint
|
||||||
|
// Clean table name by removing leading underscores to avoid double underscores
|
||||||
|
cleanTableName := strings.TrimLeft(tableName, "_")
|
||||||
uniqueConstraint := models.InitConstraint(
|
uniqueConstraint := models.InitConstraint(
|
||||||
fmt.Sprintf("uq_%s", columnName),
|
fmt.Sprintf("ukey_%s_%s", cleanTableName, columnName),
|
||||||
models.UniqueConstraint,
|
models.UniqueConstraint,
|
||||||
)
|
)
|
||||||
uniqueConstraint.Schema = schemaName
|
uniqueConstraint.Schema = schemaName
|
||||||
@@ -287,10 +631,10 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
|
|||||||
refOp := strings.TrimSpace(refStr)
|
refOp := strings.TrimSpace(refStr)
|
||||||
var isReverse bool
|
var isReverse bool
|
||||||
if strings.HasPrefix(refOp, "<") {
|
if strings.HasPrefix(refOp, "<") {
|
||||||
isReverse = column.IsPrimaryKey // < on PK means "is referenced by" (reverse)
|
// < means "is referenced by" - only makes sense on PK columns
|
||||||
} else if strings.HasPrefix(refOp, ">") {
|
isReverse = column.IsPrimaryKey
|
||||||
isReverse = !column.IsPrimaryKey // > on FK means reverse
|
|
||||||
}
|
}
|
||||||
|
// > means "references" - always a forward FK, never reverse
|
||||||
|
|
||||||
constraint = r.parseRef(refStr)
|
constraint = r.parseRef(refStr)
|
||||||
if constraint != nil {
|
if constraint != nil {
|
||||||
@@ -310,8 +654,8 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
|
|||||||
constraint.Table = tableName
|
constraint.Table = tableName
|
||||||
constraint.Columns = []string{columnName}
|
constraint.Columns = []string{columnName}
|
||||||
}
|
}
|
||||||
// Generate short constraint name based on the column
|
// Generate constraint name based on table and columns
|
||||||
constraint.Name = fmt.Sprintf("fk_%s", constraint.Columns[0])
|
constraint.Name = fmt.Sprintf("fk_%s_%s", constraint.Table, strings.Join(constraint.Columns, "_"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -332,29 +676,33 @@ func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
|
|||||||
// Format: (columns) [attributes] OR columnname [attributes]
|
// Format: (columns) [attributes] OR columnname [attributes]
|
||||||
var columns []string
|
var columns []string
|
||||||
|
|
||||||
if strings.Contains(line, "(") && strings.Contains(line, ")") {
|
// Find the attributes section to avoid parsing parentheses in notes/attributes
|
||||||
|
attrStart := strings.Index(line, "[")
|
||||||
|
columnPart := line
|
||||||
|
if attrStart > 0 {
|
||||||
|
columnPart = line[:attrStart]
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(columnPart, "(") && strings.Contains(columnPart, ")") {
|
||||||
// Multi-column format: (col1, col2) [attributes]
|
// Multi-column format: (col1, col2) [attributes]
|
||||||
colStart := strings.Index(line, "(")
|
colStart := strings.Index(columnPart, "(")
|
||||||
colEnd := strings.Index(line, ")")
|
colEnd := strings.Index(columnPart, ")")
|
||||||
if colStart >= colEnd {
|
if colStart >= colEnd {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
columnsStr := line[colStart+1 : colEnd]
|
columnsStr := columnPart[colStart+1 : colEnd]
|
||||||
for _, col := range strings.Split(columnsStr, ",") {
|
for _, col := range strings.Split(columnsStr, ",") {
|
||||||
columns = append(columns, stripQuotes(strings.TrimSpace(col)))
|
columns = append(columns, stripQuotes(strings.TrimSpace(col)))
|
||||||
}
|
}
|
||||||
} else if strings.Contains(line, "[") {
|
} else if attrStart > 0 {
|
||||||
// Single column format: columnname [attributes]
|
// Single column format: columnname [attributes]
|
||||||
// Extract column name before the bracket
|
// Extract column name before the bracket
|
||||||
idx := strings.Index(line, "[")
|
colName := strings.TrimSpace(columnPart)
|
||||||
if idx > 0 {
|
|
||||||
colName := strings.TrimSpace(line[:idx])
|
|
||||||
if colName != "" {
|
if colName != "" {
|
||||||
columns = []string{stripQuotes(colName)}
|
columns = []string{stripQuotes(colName)}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if len(columns) == 0 {
|
if len(columns) == 0 {
|
||||||
return nil
|
return nil
|
||||||
@@ -391,7 +739,11 @@ func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
|
|||||||
|
|
||||||
// Generate name if not provided
|
// Generate name if not provided
|
||||||
if index.Name == "" {
|
if index.Name == "" {
|
||||||
index.Name = fmt.Sprintf("idx_%s_%s", tableName, strings.Join(columns, "_"))
|
prefix := "idx"
|
||||||
|
if index.Unique {
|
||||||
|
prefix = "uidx"
|
||||||
|
}
|
||||||
|
index.Name = fmt.Sprintf("%s_%s_%s", prefix, tableName, strings.Join(columns, "_"))
|
||||||
}
|
}
|
||||||
|
|
||||||
return index
|
return index
|
||||||
@@ -451,10 +803,10 @@ func (r *Reader) parseRef(refStr string) *models.Constraint {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate short constraint name based on the source column
|
// Generate constraint name based on table and columns
|
||||||
constraintName := fmt.Sprintf("fk_%s_%s", fromTable, toTable)
|
constraintName := fmt.Sprintf("fk_%s_%s", fromTable, strings.Join(fromColumns, "_"))
|
||||||
if len(fromColumns) > 0 {
|
if len(fromColumns) == 0 {
|
||||||
constraintName = fmt.Sprintf("fk_%s", fromColumns[0])
|
constraintName = fmt.Sprintf("fk_%s_%s", fromTable, toTable)
|
||||||
}
|
}
|
||||||
|
|
||||||
constraint := models.InitConstraint(
|
constraint := models.InitConstraint(
|
||||||
@@ -510,7 +862,7 @@ func (r *Reader) parseTableRef(ref string) (schema, table string, columns []stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Parse schema, table, and optionally column
|
// Parse schema, table, and optionally column
|
||||||
parts := strings.Split(strings.TrimSpace(ref), ".")
|
parts := splitIdentifier(strings.TrimSpace(ref))
|
||||||
if len(parts) == 3 {
|
if len(parts) == 3 {
|
||||||
// Format: "schema"."table"."column"
|
// Format: "schema"."table"."column"
|
||||||
schema = stripQuotes(parts[0])
|
schema = stripQuotes(parts[0])
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package dbml
|
package dbml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@@ -517,3 +518,356 @@ func TestGetForeignKeys(t *testing.T) {
|
|||||||
t.Error("Expected foreign key constraint type")
|
t.Error("Expected foreign key constraint type")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Tests for multi-file directory loading
|
||||||
|
|
||||||
|
func TestReadDirectory_MultipleFiles(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should have public schema
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
var publicSchema *models.Schema
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name == "public" {
|
||||||
|
publicSchema = schema
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if publicSchema == nil {
|
||||||
|
t.Fatal("Public schema not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should have 3 tables: users, posts, comments
|
||||||
|
if len(publicSchema.Tables) != 3 {
|
||||||
|
t.Fatalf("Expected 3 tables, got %d", len(publicSchema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find tables
|
||||||
|
var usersTable, postsTable, commentsTable *models.Table
|
||||||
|
for _, table := range publicSchema.Tables {
|
||||||
|
switch table.Name {
|
||||||
|
case "users":
|
||||||
|
usersTable = table
|
||||||
|
case "posts":
|
||||||
|
postsTable = table
|
||||||
|
case "comments":
|
||||||
|
commentsTable = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if usersTable == nil {
|
||||||
|
t.Fatal("Users table not found")
|
||||||
|
}
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
if commentsTable == nil {
|
||||||
|
t.Fatal("Comments table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table has merged columns from 1_users.dbml and 3_add_columns.dbml
|
||||||
|
expectedUserColumns := []string{"id", "email", "name", "created_at"}
|
||||||
|
if len(usersTable.Columns) != len(expectedUserColumns) {
|
||||||
|
t.Errorf("Expected %d columns in users table, got %d", len(expectedUserColumns), len(usersTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, colName := range expectedUserColumns {
|
||||||
|
if _, exists := usersTable.Columns[colName]; !exists {
|
||||||
|
t.Errorf("Expected column '%s' in users table", colName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table columns
|
||||||
|
expectedPostColumns := []string{"id", "user_id", "title", "content", "created_at"}
|
||||||
|
for _, colName := range expectedPostColumns {
|
||||||
|
if _, exists := postsTable.Columns[colName]; !exists {
|
||||||
|
t.Errorf("Expected column '%s' in posts table", colName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadDirectory_TableMerging(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find users table
|
||||||
|
var usersTable *models.Table
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "users" && schema.Name == "public" {
|
||||||
|
usersTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if usersTable == nil {
|
||||||
|
t.Fatal("Users table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify columns from file 1 (id, email)
|
||||||
|
if _, exists := usersTable.Columns["id"]; !exists {
|
||||||
|
t.Error("Column 'id' from 1_users.dbml not found")
|
||||||
|
}
|
||||||
|
if _, exists := usersTable.Columns["email"]; !exists {
|
||||||
|
t.Error("Column 'email' from 1_users.dbml not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify columns from file 3 (name, created_at)
|
||||||
|
if _, exists := usersTable.Columns["name"]; !exists {
|
||||||
|
t.Error("Column 'name' from 3_add_columns.dbml not found")
|
||||||
|
}
|
||||||
|
if _, exists := usersTable.Columns["created_at"]; !exists {
|
||||||
|
t.Error("Column 'created_at' from 3_add_columns.dbml not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify column properties from file 1
|
||||||
|
emailCol := usersTable.Columns["email"]
|
||||||
|
if !emailCol.NotNull {
|
||||||
|
t.Error("Email column should be not null (from 1_users.dbml)")
|
||||||
|
}
|
||||||
|
if emailCol.Type != "varchar(255)" {
|
||||||
|
t.Errorf("Expected email type 'varchar(255)', got '%s'", emailCol.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadDirectory_CommentedRefsLast(t *testing.T) {
|
||||||
|
// This test verifies that files with commented refs are processed last
|
||||||
|
// by checking that the file discovery returns them in the correct order
|
||||||
|
dirPath := filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile")
|
||||||
|
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: dirPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
files, err := reader.discoverDBMLFiles(dirPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("discoverDBMLFiles() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(files) < 2 {
|
||||||
|
t.Skip("Not enough files to test ordering")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that 9_refs.dbml (which has commented refs) comes last
|
||||||
|
lastFile := filepath.Base(files[len(files)-1])
|
||||||
|
if lastFile != "9_refs.dbml" {
|
||||||
|
t.Errorf("Expected last file to be '9_refs.dbml' (has commented refs), got '%s'", lastFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that numbered files without commented refs come first
|
||||||
|
firstFile := filepath.Base(files[0])
|
||||||
|
if firstFile != "1_users.dbml" {
|
||||||
|
t.Errorf("Expected first file to be '1_users.dbml', got '%s'", firstFile)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadDirectory_EmptyDirectory(t *testing.T) {
|
||||||
|
// Create a temporary empty directory
|
||||||
|
tmpDir := filepath.Join("..", "..", "..", "tests", "assets", "dbml", "empty_test_dir")
|
||||||
|
err := os.MkdirAll(tmpDir, 0755)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: tmpDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() should not error on empty directory, got: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Empty directory should return empty database
|
||||||
|
if len(db.Schemas) != 0 {
|
||||||
|
t.Errorf("Expected 0 schemas for empty directory, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadDatabase_BackwardCompat(t *testing.T) {
|
||||||
|
// Test that single file loading still works
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "simple.dbml"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[0]
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseFilePrefix(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
filename string
|
||||||
|
wantPrefix int
|
||||||
|
wantHas bool
|
||||||
|
}{
|
||||||
|
{"1_schema.dbml", 1, true},
|
||||||
|
{"2_tables.dbml", 2, true},
|
||||||
|
{"10_relationships.dbml", 10, true},
|
||||||
|
{"99_data.dbml", 99, true},
|
||||||
|
{"schema.dbml", 0, false},
|
||||||
|
{"tables_no_prefix.dbml", 0, false},
|
||||||
|
{"/path/to/1_file.dbml", 1, true},
|
||||||
|
{"/path/to/file.dbml", 0, false},
|
||||||
|
{"1-file.dbml", 1, true},
|
||||||
|
{"2-another.dbml", 2, true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.filename, func(t *testing.T) {
|
||||||
|
gotPrefix, gotHas := parseFilePrefix(tt.filename)
|
||||||
|
if gotPrefix != tt.wantPrefix {
|
||||||
|
t.Errorf("parseFilePrefix(%s) prefix = %d, want %d", tt.filename, gotPrefix, tt.wantPrefix)
|
||||||
|
}
|
||||||
|
if gotHas != tt.wantHas {
|
||||||
|
t.Errorf("parseFilePrefix(%s) hasPrefix = %v, want %v", tt.filename, gotHas, tt.wantHas)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConstraintNaming(t *testing.T) {
|
||||||
|
// Test that constraints are named with proper prefixes
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "complex.dbml"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find users table
|
||||||
|
var usersTable *models.Table
|
||||||
|
var postsTable *models.Table
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "users" {
|
||||||
|
usersTable = table
|
||||||
|
} else if table.Name == "posts" {
|
||||||
|
postsTable = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if usersTable == nil {
|
||||||
|
t.Fatal("Users table not found")
|
||||||
|
}
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test unique constraint naming: ukey_table_column
|
||||||
|
if _, exists := usersTable.Constraints["ukey_users_email"]; !exists {
|
||||||
|
t.Error("Expected unique constraint 'ukey_users_email' not found")
|
||||||
|
t.Logf("Available constraints: %v", getKeys(usersTable.Constraints))
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := postsTable.Constraints["ukey_posts_slug"]; !exists {
|
||||||
|
t.Error("Expected unique constraint 'ukey_posts_slug' not found")
|
||||||
|
t.Logf("Available constraints: %v", getKeys(postsTable.Constraints))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test foreign key naming: fk_table_column
|
||||||
|
if _, exists := postsTable.Constraints["fk_posts_user_id"]; !exists {
|
||||||
|
t.Error("Expected foreign key 'fk_posts_user_id' not found")
|
||||||
|
t.Logf("Available constraints: %v", getKeys(postsTable.Constraints))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test unique index naming: uidx_table_columns
|
||||||
|
if _, exists := postsTable.Indexes["uidx_posts_slug"]; !exists {
|
||||||
|
t.Error("Expected unique index 'uidx_posts_slug' not found")
|
||||||
|
t.Logf("Available indexes: %v", getKeys(postsTable.Indexes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test regular index naming: idx_table_columns
|
||||||
|
if _, exists := postsTable.Indexes["idx_posts_user_id_published"]; !exists {
|
||||||
|
t.Error("Expected index 'idx_posts_user_id_published' not found")
|
||||||
|
t.Logf("Available indexes: %v", getKeys(postsTable.Indexes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getKeys[V any](m map[string]V) []string {
|
||||||
|
keys := make([]string, 0, len(m))
|
||||||
|
for k := range m {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
return keys
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHasCommentedRefs(t *testing.T) {
|
||||||
|
// Test with the actual multifile test fixtures
|
||||||
|
tests := []struct {
|
||||||
|
filename string
|
||||||
|
wantHas bool
|
||||||
|
}{
|
||||||
|
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "1_users.dbml"), false},
|
||||||
|
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "2_posts.dbml"), false},
|
||||||
|
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "3_add_columns.dbml"), false},
|
||||||
|
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "9_refs.dbml"), true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(filepath.Base(tt.filename), func(t *testing.T) {
|
||||||
|
gotHas, err := hasCommentedRefs(tt.filename)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("hasCommentedRefs() error = %v", err)
|
||||||
|
}
|
||||||
|
if gotHas != tt.wantHas {
|
||||||
|
t.Errorf("hasCommentedRefs(%s) = %v, want %v", filepath.Base(tt.filename), gotHas, tt.wantHas)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -79,6 +79,8 @@ func (r *Reader) convertToDatabase(dctx *models.DCTXDictionary) (*models.Databas
|
|||||||
db := models.InitDatabase(dbName)
|
db := models.InitDatabase(dbName)
|
||||||
schema := models.InitSchema("public")
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Note: DCTX doesn't have database GUID, but schema can use dictionary name if available
|
||||||
|
|
||||||
// Create GUID mappings for tables and keys
|
// Create GUID mappings for tables and keys
|
||||||
tableGuidMap := make(map[string]string) // GUID -> table name
|
tableGuidMap := make(map[string]string) // GUID -> table name
|
||||||
keyGuidMap := make(map[string]*models.DCTXKey) // GUID -> key definition
|
keyGuidMap := make(map[string]*models.DCTXKey) // GUID -> key definition
|
||||||
@@ -162,6 +164,10 @@ func (r *Reader) convertTable(dctxTable *models.DCTXTable) (*models.Table, map[s
|
|||||||
tableName := r.sanitizeName(dctxTable.Name)
|
tableName := r.sanitizeName(dctxTable.Name)
|
||||||
table := models.InitTable(tableName, "public")
|
table := models.InitTable(tableName, "public")
|
||||||
table.Description = dctxTable.Description
|
table.Description = dctxTable.Description
|
||||||
|
// Assign GUID from DCTX table
|
||||||
|
if dctxTable.Guid != "" {
|
||||||
|
table.GUID = dctxTable.Guid
|
||||||
|
}
|
||||||
|
|
||||||
fieldGuidMap := make(map[string]string)
|
fieldGuidMap := make(map[string]string)
|
||||||
|
|
||||||
@@ -202,6 +208,10 @@ func (r *Reader) convertField(dctxField *models.DCTXField, tableName string) ([]
|
|||||||
|
|
||||||
// Convert single field
|
// Convert single field
|
||||||
column := models.InitColumn(r.sanitizeName(dctxField.Name), tableName, "public")
|
column := models.InitColumn(r.sanitizeName(dctxField.Name), tableName, "public")
|
||||||
|
// Assign GUID from DCTX field
|
||||||
|
if dctxField.Guid != "" {
|
||||||
|
column.GUID = dctxField.Guid
|
||||||
|
}
|
||||||
|
|
||||||
// Map Clarion data types
|
// Map Clarion data types
|
||||||
dataType, length := r.mapDataType(dctxField.DataType, dctxField.Size)
|
dataType, length := r.mapDataType(dctxField.DataType, dctxField.Size)
|
||||||
@@ -346,6 +356,10 @@ func (r *Reader) convertKey(dctxKey *models.DCTXKey, table *models.Table, fieldG
|
|||||||
constraint.Table = table.Name
|
constraint.Table = table.Name
|
||||||
constraint.Schema = table.Schema
|
constraint.Schema = table.Schema
|
||||||
constraint.Columns = columns
|
constraint.Columns = columns
|
||||||
|
// Assign GUID from DCTX key
|
||||||
|
if dctxKey.Guid != "" {
|
||||||
|
constraint.GUID = dctxKey.Guid
|
||||||
|
}
|
||||||
|
|
||||||
table.Constraints[constraint.Name] = constraint
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
|
||||||
@@ -366,6 +380,10 @@ func (r *Reader) convertKey(dctxKey *models.DCTXKey, table *models.Table, fieldG
|
|||||||
index.Columns = columns
|
index.Columns = columns
|
||||||
index.Unique = dctxKey.Unique
|
index.Unique = dctxKey.Unique
|
||||||
index.Type = "btree"
|
index.Type = "btree"
|
||||||
|
// Assign GUID from DCTX key
|
||||||
|
if dctxKey.Guid != "" {
|
||||||
|
index.GUID = dctxKey.Guid
|
||||||
|
}
|
||||||
|
|
||||||
table.Indexes[index.Name] = index
|
table.Indexes[index.Name] = index
|
||||||
return nil
|
return nil
|
||||||
@@ -460,6 +478,10 @@ func (r *Reader) processRelations(dctx *models.DCTXDictionary, schema *models.Sc
|
|||||||
constraint.ReferencedColumns = pkColumns
|
constraint.ReferencedColumns = pkColumns
|
||||||
constraint.OnDelete = r.mapReferentialAction(relation.Delete)
|
constraint.OnDelete = r.mapReferentialAction(relation.Delete)
|
||||||
constraint.OnUpdate = r.mapReferentialAction(relation.Update)
|
constraint.OnUpdate = r.mapReferentialAction(relation.Update)
|
||||||
|
// Assign GUID from DCTX relation
|
||||||
|
if relation.Guid != "" {
|
||||||
|
constraint.GUID = relation.Guid
|
||||||
|
}
|
||||||
|
|
||||||
foreignTable.Constraints[fkName] = constraint
|
foreignTable.Constraints[fkName] = constraint
|
||||||
|
|
||||||
@@ -473,6 +495,10 @@ func (r *Reader) processRelations(dctx *models.DCTXDictionary, schema *models.Sc
|
|||||||
relationship.ForeignKey = fkName
|
relationship.ForeignKey = fkName
|
||||||
relationship.Properties["on_delete"] = constraint.OnDelete
|
relationship.Properties["on_delete"] = constraint.OnDelete
|
||||||
relationship.Properties["on_update"] = constraint.OnUpdate
|
relationship.Properties["on_update"] = constraint.OnUpdate
|
||||||
|
// Assign GUID from DCTX relation
|
||||||
|
if relation.Guid != "" {
|
||||||
|
relationship.GUID = relation.Guid
|
||||||
|
}
|
||||||
|
|
||||||
foreignTable.Relationships[relationshipName] = relationship
|
foreignTable.Relationships[relationshipName] = relationship
|
||||||
}
|
}
|
||||||
|
|||||||
53
pkg/readers/doc.go
Normal file
53
pkg/readers/doc.go
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
// Package readers provides interfaces and implementations for reading database schemas
|
||||||
|
// from various input formats and data sources.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The readers package defines a common Reader interface that all format-specific readers
|
||||||
|
// implement. This allows RelSpec to read database schemas from multiple sources including:
|
||||||
|
// - Live databases (PostgreSQL, SQLite)
|
||||||
|
// - Schema definition files (DBML, DCTX, DrawDB, GraphQL)
|
||||||
|
// - ORM model files (GORM, Bun, Drizzle, Prisma, TypeORM)
|
||||||
|
// - Data interchange formats (JSON, YAML)
|
||||||
|
//
|
||||||
|
// # Architecture
|
||||||
|
//
|
||||||
|
// Each reader implementation is located in its own subpackage (e.g., pkg/readers/dbml,
|
||||||
|
// pkg/readers/pgsql) and implements the Reader interface, supporting three levels of
|
||||||
|
// granularity:
|
||||||
|
// - ReadDatabase() - Read complete database with all schemas
|
||||||
|
// - ReadSchema() - Read single schema with all tables
|
||||||
|
// - ReadTable() - Read single table with all columns and metadata
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// Readers are instantiated with ReaderOptions containing source-specific configuration:
|
||||||
|
//
|
||||||
|
// // Read from file
|
||||||
|
// reader := dbml.NewReader(&readers.ReaderOptions{
|
||||||
|
// FilePath: "schema.dbml",
|
||||||
|
// })
|
||||||
|
// db, err := reader.ReadDatabase()
|
||||||
|
//
|
||||||
|
// // Read from database
|
||||||
|
// reader := pgsql.NewReader(&readers.ReaderOptions{
|
||||||
|
// ConnectionString: "postgres://user:pass@localhost/mydb",
|
||||||
|
// })
|
||||||
|
// db, err := reader.ReadDatabase()
|
||||||
|
//
|
||||||
|
// # Supported Formats
|
||||||
|
//
|
||||||
|
// - dbml: Database Markup Language files
|
||||||
|
// - dctx: DCTX schema files
|
||||||
|
// - drawdb: DrawDB JSON format
|
||||||
|
// - graphql: GraphQL schema definition language
|
||||||
|
// - json: JSON database schema
|
||||||
|
// - yaml: YAML database schema
|
||||||
|
// - gorm: Go GORM model structs
|
||||||
|
// - bun: Go Bun model structs
|
||||||
|
// - drizzle: TypeScript Drizzle ORM schemas
|
||||||
|
// - prisma: Prisma schema language
|
||||||
|
// - typeorm: TypeScript TypeORM entities
|
||||||
|
// - pgsql: PostgreSQL live database introspection
|
||||||
|
// - sqlite: SQLite database files
|
||||||
|
package readers
|
||||||
@@ -140,6 +140,32 @@ func (r *Reader) convertToDatabase(drawSchema *drawdb.DrawDBSchema) (*models.Dat
|
|||||||
db.Schemas = append(db.Schemas, schema)
|
db.Schemas = append(db.Schemas, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Convert DrawDB subject areas to domains
|
||||||
|
for _, area := range drawSchema.SubjectAreas {
|
||||||
|
domain := models.InitDomain(area.Name)
|
||||||
|
|
||||||
|
// Find all tables that visually belong to this area
|
||||||
|
// A table belongs to an area if its position is within the area bounds
|
||||||
|
for _, drawTable := range drawSchema.Tables {
|
||||||
|
if drawTable.X >= area.X && drawTable.X <= (area.X+area.Width) &&
|
||||||
|
drawTable.Y >= area.Y && drawTable.Y <= (area.Y+area.Height) {
|
||||||
|
|
||||||
|
schemaName := drawTable.Schema
|
||||||
|
if schemaName == "" {
|
||||||
|
schemaName = "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
domainTable := models.InitDomainTable(drawTable.Name, schemaName)
|
||||||
|
domain.Tables = append(domain.Tables, domainTable)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only add domain if it has tables
|
||||||
|
if len(domain.Tables) > 0 {
|
||||||
|
db.Domains = append(db.Domains, domain)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return db, nil
|
return db, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -241,11 +241,9 @@ func (r *Reader) parsePgEnum(line string, matches []string) *models.Enum {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &models.Enum{
|
enum := models.InitEnum(enumName, "public")
|
||||||
Name: enumName,
|
enum.Values = values
|
||||||
Values: values,
|
return enum
|
||||||
Schema: "public",
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseTableBlock parses a complete pgTable definition block
|
// parseTableBlock parses a complete pgTable definition block
|
||||||
|
|||||||
@@ -260,11 +260,7 @@ func (r *Reader) parseType(typeName string, lines []string, schema *models.Schem
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
|
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
|
||||||
enum := &models.Enum{
|
enum := models.InitEnum(enumName, schema.Name)
|
||||||
Name: enumName,
|
|
||||||
Schema: schema.Name,
|
|
||||||
Values: make([]string, 0),
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, line := range lines {
|
for _, line := range lines {
|
||||||
trimmed := strings.TrimSpace(line)
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|||||||
@@ -329,10 +329,10 @@ func (r *Reader) deriveRelationship(table *models.Table, fk *models.Constraint)
|
|||||||
relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable)
|
relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable)
|
||||||
|
|
||||||
relationship := models.InitRelationship(relationshipName, models.OneToMany)
|
relationship := models.InitRelationship(relationshipName, models.OneToMany)
|
||||||
relationship.FromTable = fk.ReferencedTable
|
relationship.FromTable = table.Name
|
||||||
relationship.FromSchema = fk.ReferencedSchema
|
relationship.FromSchema = table.Schema
|
||||||
relationship.ToTable = table.Name
|
relationship.ToTable = fk.ReferencedTable
|
||||||
relationship.ToSchema = table.Schema
|
relationship.ToSchema = fk.ReferencedSchema
|
||||||
relationship.ForeignKey = fk.Name
|
relationship.ForeignKey = fk.Name
|
||||||
|
|
||||||
// Store constraint actions in properties
|
// Store constraint actions in properties
|
||||||
|
|||||||
@@ -328,12 +328,12 @@ func TestDeriveRelationship(t *testing.T) {
|
|||||||
t.Errorf("Expected relationship type %s, got %s", models.OneToMany, rel.Type)
|
t.Errorf("Expected relationship type %s, got %s", models.OneToMany, rel.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
if rel.FromTable != "users" {
|
if rel.FromTable != "orders" {
|
||||||
t.Errorf("Expected FromTable 'users', got '%s'", rel.FromTable)
|
t.Errorf("Expected FromTable 'orders', got '%s'", rel.FromTable)
|
||||||
}
|
}
|
||||||
|
|
||||||
if rel.ToTable != "orders" {
|
if rel.ToTable != "users" {
|
||||||
t.Errorf("Expected ToTable 'orders', got '%s'", rel.ToTable)
|
t.Errorf("Expected ToTable 'users', got '%s'", rel.ToTable)
|
||||||
}
|
}
|
||||||
|
|
||||||
if rel.ForeignKey != "fk_orders_user_id" {
|
if rel.ForeignKey != "fk_orders_user_id" {
|
||||||
|
|||||||
@@ -128,11 +128,7 @@ func (r *Reader) parsePrisma(content string) (*models.Database, error) {
|
|||||||
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
currentBlock = "enum"
|
currentBlock = "enum"
|
||||||
enumName := matches[1]
|
enumName := matches[1]
|
||||||
currentEnum = &models.Enum{
|
currentEnum = models.InitEnum(enumName, "public")
|
||||||
Name: enumName,
|
|
||||||
Schema: "public",
|
|
||||||
Values: make([]string, 0),
|
|
||||||
}
|
|
||||||
blockContent = []string{}
|
blockContent = []string{}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
162
pkg/readers/sqldir/README.md
Normal file
162
pkg/readers/sqldir/README.md
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
# SQL Directory Reader
|
||||||
|
|
||||||
|
The SQL Directory Reader (`sqldir`) reads SQL scripts from a directory structure and populates the `Scripts` field of a `Schema`. It supports recursive directory scanning and extracts priority, sequence, and name information from filenames.
|
||||||
|
|
||||||
|
## File Naming Convention
|
||||||
|
|
||||||
|
Scripts must follow this naming pattern (supports both underscores and hyphens as separators):
|
||||||
|
|
||||||
|
```
|
||||||
|
{priority}_{sequence}_{name}.{sql|pgsql}
|
||||||
|
{priority}-{sequence}-{name}.{sql|pgsql}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- **priority**: Integer (0-9999) - Defines execution order (lower executes first)
|
||||||
|
- **sequence**: Integer (0-9999) - Defines order within the same priority level
|
||||||
|
- **separator**: Underscore `_` or hyphen `-` (can be mixed)
|
||||||
|
- **name**: Descriptive name (alphanumeric, underscores, hyphens allowed)
|
||||||
|
- **extension**: `.sql` or `.pgsql`
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_schema.sql # Priority 1, Sequence 1 (underscore format)
|
||||||
|
├── 1-002-create-users-table.sql # Priority 1, Sequence 2 (hyphen format)
|
||||||
|
├── 1_003_create_posts_table.pgsql # Priority 1, Sequence 3 (underscore format)
|
||||||
|
├── 2-001-add-indexes.sql # Priority 2, Sequence 1 (hyphen format)
|
||||||
|
├── 2_002_add_constraints.sql # Priority 2, Sequence 2 (underscore format)
|
||||||
|
├── 10-10-create-newid.pgsql # Priority 10, Sequence 10 (hyphen format)
|
||||||
|
└── subdirectory/
|
||||||
|
└── 3_001_seed_data.sql # Priority 3, Sequence 1 (subdirs supported)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Execution Order**: 1→2→3→4→5→6→7 (sorted by Priority ascending, then Sequence ascending)
|
||||||
|
|
||||||
|
**Both formats can be mixed** in the same directory - the reader handles both seamlessly.
|
||||||
|
|
||||||
|
### Invalid Filenames (Ignored)
|
||||||
|
|
||||||
|
- `migration.sql` - Missing priority/sequence
|
||||||
|
- `1_create_users.sql` - Missing sequence
|
||||||
|
- `create_users.sql` - Missing priority/sequence
|
||||||
|
- `1_001_test.txt` - Wrong extension
|
||||||
|
- `readme.md` - Not a SQL file
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
)
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "public", // Optional, defaults to "public"
|
||||||
|
"database_name": "myapp", // Optional, defaults to "database"
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read all scripts
|
||||||
|
database, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Access scripts
|
||||||
|
for _, schema := range database.Schemas {
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
fmt.Printf("Script: %s (P:%d S:%d)\n",
|
||||||
|
script.Name, script.Priority, script.Sequence)
|
||||||
|
fmt.Printf("SQL: %s\n", script.SQL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Read Schema Only
|
||||||
|
|
||||||
|
```go
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d scripts\n", len(schema.Scripts))
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Recursive Directory Scanning**: Automatically scans all subdirectories
|
||||||
|
- **Symlink Skipping**: Symbolic links are automatically skipped (prevents loops and duplicates)
|
||||||
|
- **Multiple Extensions**: Supports both `.sql` and `.pgsql` files
|
||||||
|
- **Flexible Naming**: Extract metadata from filename patterns
|
||||||
|
- **Error Handling**: Validates directory existence and file accessibility
|
||||||
|
- **Schema Integration**: Scripts are added to the standard RelSpec `Schema` model
|
||||||
|
|
||||||
|
## Script Model
|
||||||
|
|
||||||
|
Each script is stored as a `models.Script`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Script struct {
|
||||||
|
Name string // Extracted from filename (e.g., "create_users")
|
||||||
|
Description string // Auto-generated description with file path
|
||||||
|
SQL string // Complete SQL content from file
|
||||||
|
Priority int // Execution priority from filename
|
||||||
|
Sequence uint // Execution sequence from filename
|
||||||
|
// ... other fields available but not populated by this reader
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with SQL Executor
|
||||||
|
|
||||||
|
The SQL Directory Reader is designed to work seamlessly with the SQL Executor Writer:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
db, _ := reader.ReadDatabase()
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/mydb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
writer.WriteDatabase(db) // Executes in Priority→Sequence order
|
||||||
|
```
|
||||||
|
|
||||||
|
See `pkg/writers/sqlexec/README.md` for more details on script execution.
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
The reader will return errors for:
|
||||||
|
- Non-existent directory paths
|
||||||
|
- Inaccessible directories or files
|
||||||
|
- Invalid file permissions
|
||||||
|
- File read failures
|
||||||
|
|
||||||
|
Files that don't match the naming pattern are silently ignored (not treated as errors).
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/readers/sqldir/
|
||||||
|
```
|
||||||
|
|
||||||
|
Tests include:
|
||||||
|
- Valid file parsing (underscore and hyphen formats)
|
||||||
|
- Recursive directory scanning
|
||||||
|
- Symlink skipping
|
||||||
|
- Invalid filename handling
|
||||||
|
- Empty directory handling
|
||||||
|
- Error conditions
|
||||||
127
pkg/readers/sqldir/example_test.go
Normal file
127
pkg/readers/sqldir/example_test.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package sqldir_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Example demonstrates how to read SQL scripts from a directory and execute them
|
||||||
|
func Example() {
|
||||||
|
// Step 1: Read SQL scripts from a directory
|
||||||
|
// Directory structure example:
|
||||||
|
// migrations/
|
||||||
|
// 1_001_create_schema.sql
|
||||||
|
// 1_002_create_users_table.sql
|
||||||
|
// 1_003_create_posts_table.pgsql
|
||||||
|
// 2_001_add_indexes.sql
|
||||||
|
// 2_002_seed_data.sql
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "public",
|
||||||
|
"database_name": "myapp",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read the database schema with scripts
|
||||||
|
database, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to read scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Read %d schemas\n", len(database.Schemas))
|
||||||
|
fmt.Printf("Found %d scripts in schema '%s'\n",
|
||||||
|
len(database.Schemas[0].Scripts),
|
||||||
|
database.Schemas[0].Name)
|
||||||
|
|
||||||
|
// Step 2: Execute the scripts against a PostgreSQL database
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://user:password@localhost:5432/myapp?sslmode=disable",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute all scripts in Priority then Sequence order
|
||||||
|
if err := writer.WriteDatabase(database); err != nil {
|
||||||
|
log.Fatalf("Failed to execute scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("All scripts executed successfully!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example_withSingleSchema shows how to read and execute scripts for a single schema
|
||||||
|
func Example_withSingleSchema() {
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
})
|
||||||
|
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to read schema: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/testdb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteSchema(schema); err != nil {
|
||||||
|
log.Fatalf("Failed to execute scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Schema scripts executed successfully!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example_fileNamingConvention shows the expected file naming pattern
|
||||||
|
func Example_fileNamingConvention() {
|
||||||
|
// File naming pattern: {priority}_{sequence}_{name}.sql or .pgsql
|
||||||
|
// OR: {priority}-{sequence}-{name}.sql or .pgsql
|
||||||
|
//
|
||||||
|
// Both underscore (_) and hyphen (-) separators are supported and can be mixed.
|
||||||
|
//
|
||||||
|
// Components:
|
||||||
|
// - priority: Integer (0-9999) - Scripts with lower priority execute first
|
||||||
|
// - sequence: Integer (0-9999) - Within same priority, lower sequence executes first
|
||||||
|
// - separator: Underscore (_) or hyphen (-)
|
||||||
|
// - name: Descriptive name (alphanumeric, underscores, hyphens)
|
||||||
|
// - extension: .sql or .pgsql
|
||||||
|
//
|
||||||
|
// Examples (underscore format):
|
||||||
|
// ✓ 1_001_create_users.sql (Priority=1, Sequence=1)
|
||||||
|
// ✓ 1_002_create_posts.sql (Priority=1, Sequence=2)
|
||||||
|
// ✓ 2_001_add_indexes.pgsql (Priority=2, Sequence=1)
|
||||||
|
// ✓ 10_100_migration.sql (Priority=10, Sequence=100)
|
||||||
|
//
|
||||||
|
// Examples (hyphen format):
|
||||||
|
// ✓ 1-001-create-users.sql (Priority=1, Sequence=1)
|
||||||
|
// ✓ 1-002-create-posts.sql (Priority=1, Sequence=2)
|
||||||
|
// ✓ 2-001-add-indexes.pgsql (Priority=2, Sequence=1)
|
||||||
|
// ✓ 10-10-create-newid.pgsql (Priority=10, Sequence=10)
|
||||||
|
//
|
||||||
|
// Mixed format (both in same directory):
|
||||||
|
// ✓ 1_001_create_users.sql (underscore format)
|
||||||
|
// ✓ 1-002-create-posts.sql (hyphen format)
|
||||||
|
// ✓ 2_001_add_indexes.sql (underscore format)
|
||||||
|
//
|
||||||
|
// Execution order for mixed examples:
|
||||||
|
// 1. 1_001_create_users.sql (Priority 1, Sequence 1)
|
||||||
|
// 2. 1-002-create-posts.sql (Priority 1, Sequence 2)
|
||||||
|
// 3. 2_001_add_indexes.sql (Priority 2, Sequence 1)
|
||||||
|
//
|
||||||
|
// Invalid filenames (will be ignored):
|
||||||
|
// ✗ migration.sql (missing priority/sequence)
|
||||||
|
// ✗ 1_create_users.sql (missing sequence)
|
||||||
|
// ✗ create_users.sql (missing priority/sequence)
|
||||||
|
// ✗ 1_001_create_users.txt (wrong extension)
|
||||||
|
|
||||||
|
fmt.Println("See comments for file naming conventions")
|
||||||
|
}
|
||||||
178
pkg/readers/sqldir/reader.go
Normal file
178
pkg/readers/sqldir/reader.go
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
package sqldir
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for SQL script directories
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new SQL directory reader
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads all SQL scripts from a directory into a Database
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("directory path is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if directory exists
|
||||||
|
info, err := os.Stat(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to access directory: %w", err)
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return nil, fmt.Errorf("path is not a directory: %s", r.options.FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read scripts from directory
|
||||||
|
scripts, err := r.readScripts()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get schema name from metadata or use default
|
||||||
|
schemaName := "public"
|
||||||
|
if name, ok := r.options.Metadata["schema_name"].(string); ok && name != "" {
|
||||||
|
schemaName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create schema with scripts
|
||||||
|
schema := &models.Schema{
|
||||||
|
Name: schemaName,
|
||||||
|
Scripts: scripts,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get database name from metadata or use default
|
||||||
|
dbName := "database"
|
||||||
|
if name, ok := r.options.Metadata["database_name"].(string); ok && name != "" {
|
||||||
|
dbName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create database with schema
|
||||||
|
database := &models.Database{
|
||||||
|
Name: dbName,
|
||||||
|
Schemas: []*models.Schema{schema},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set back-reference
|
||||||
|
schema.RefDatabase = database
|
||||||
|
|
||||||
|
return database, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads all SQL scripts from a directory into a Schema
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schema found")
|
||||||
|
}
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable is not applicable for SQL script directories
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
return nil, fmt.Errorf("ReadTable is not supported for SQL script directories")
|
||||||
|
}
|
||||||
|
|
||||||
|
// readScripts recursively scans the directory for SQL files and parses them into Script models
|
||||||
|
func (r *Reader) readScripts() ([]*models.Script, error) {
|
||||||
|
var scripts []*models.Script
|
||||||
|
|
||||||
|
// Regular expression to parse filename: {priority}{sep}{sequence}{sep}{name}.sql or .pgsql
|
||||||
|
// Separator can be underscore (_) or hyphen (-)
|
||||||
|
// Example: 1_001_create_users.sql -> priority=1, sequence=001, name=create_users
|
||||||
|
// Example: 2_005_add_indexes.pgsql -> priority=2, sequence=005, name=add_indexes
|
||||||
|
// Example: 10-10-create-newid.pgsql -> priority=10, sequence=10, name=create-newid
|
||||||
|
pattern := regexp.MustCompile(`^(\d+)[_-](\d+)[_-](.+)\.(sql|pgsql)$`)
|
||||||
|
|
||||||
|
err := filepath.WalkDir(r.options.FilePath, func(path string, d os.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't process directories as files (WalkDir still descends into them recursively)
|
||||||
|
if d.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip symlinks
|
||||||
|
info, err := d.Info()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if info.Mode()&os.ModeSymlink != 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get filename
|
||||||
|
filename := d.Name()
|
||||||
|
|
||||||
|
// Match against pattern
|
||||||
|
matches := pattern.FindStringSubmatch(filename)
|
||||||
|
if matches == nil {
|
||||||
|
// Skip files that don't match the pattern
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse priority
|
||||||
|
priority, err := strconv.Atoi(matches[1])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid priority in filename %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse sequence
|
||||||
|
sequence, err := strconv.ParseUint(matches[2], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid sequence in filename %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract name
|
||||||
|
name := matches[3]
|
||||||
|
|
||||||
|
// Read SQL content
|
||||||
|
content, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read file %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get relative path from base directory
|
||||||
|
relPath, err := filepath.Rel(r.options.FilePath, path)
|
||||||
|
if err != nil {
|
||||||
|
relPath = path
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Script model
|
||||||
|
script := models.InitScript(name)
|
||||||
|
script.Description = fmt.Sprintf("SQL script from %s", relPath)
|
||||||
|
script.SQL = string(content)
|
||||||
|
script.Priority = priority
|
||||||
|
script.Sequence = uint(sequence)
|
||||||
|
|
||||||
|
scripts = append(scripts, script)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return scripts, nil
|
||||||
|
}
|
||||||
437
pkg/readers/sqldir/reader_test.go
Normal file
437
pkg/readers/sqldir/reader_test.go
Normal file
@@ -0,0 +1,437 @@
|
|||||||
|
package sqldir
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test SQL files with both underscore and hyphen separators
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1_001_create_users.sql": "CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT);",
|
||||||
|
"1_002_create_posts.sql": "CREATE TABLE posts (id SERIAL PRIMARY KEY, user_id INT);",
|
||||||
|
"2_001_add_indexes.sql": "CREATE INDEX idx_posts_user_id ON posts(user_id);",
|
||||||
|
"1_003_seed_data.pgsql": "INSERT INTO users (name) VALUES ('Alice'), ('Bob');",
|
||||||
|
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL PRIMARY KEY);",
|
||||||
|
"2-005-add-column.sql": "ALTER TABLE users ADD COLUMN email TEXT;",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create subdirectory with additional script
|
||||||
|
subDir := filepath.Join(tempDir, "migrations")
|
||||||
|
if err := os.MkdirAll(subDir, 0755); err != nil {
|
||||||
|
t.Fatalf("Failed to create subdirectory: %v", err)
|
||||||
|
}
|
||||||
|
subFile := filepath.Join(subDir, "3_001_add_column.sql")
|
||||||
|
if err := os.WriteFile(subFile, []byte("ALTER TABLE users ADD COLUMN email TEXT;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create subdirectory file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "test_schema",
|
||||||
|
"database_name": "test_db",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify database
|
||||||
|
if db.Name != "test_db" {
|
||||||
|
t.Errorf("Expected database name 'test_db', got '%s'", db.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) != 1 {
|
||||||
|
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "test_schema" {
|
||||||
|
t.Errorf("Expected schema name 'test_schema', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify scripts (should be 7 total: 4 underscore + 2 hyphen + 1 subdirectory)
|
||||||
|
if len(schema.Scripts) != 7 {
|
||||||
|
t.Fatalf("Expected 7 scripts, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify script details
|
||||||
|
expectedScripts := []struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
}{
|
||||||
|
{"create_users", 1, 1},
|
||||||
|
{"create_posts", 1, 2},
|
||||||
|
{"seed_data", 1, 3},
|
||||||
|
{"add_indexes", 2, 1},
|
||||||
|
{"add-column", 2, 5},
|
||||||
|
{"add_column", 3, 1},
|
||||||
|
{"create-newid", 10, 10},
|
||||||
|
}
|
||||||
|
|
||||||
|
scriptMap := make(map[string]*struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sql string
|
||||||
|
})
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
scriptMap[script.Name] = &struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sql string
|
||||||
|
}{
|
||||||
|
priority: script.Priority,
|
||||||
|
sequence: script.Sequence,
|
||||||
|
sql: script.SQL,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, expected := range expectedScripts {
|
||||||
|
script, exists := scriptMap[expected.name]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected script '%s' not found", expected.name)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if script.priority != expected.priority {
|
||||||
|
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||||
|
expected.name, expected.priority, script.priority)
|
||||||
|
}
|
||||||
|
if script.sequence != expected.sequence {
|
||||||
|
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||||
|
expected.name, expected.sequence, script.sequence)
|
||||||
|
}
|
||||||
|
if script.sql == "" {
|
||||||
|
t.Errorf("Script '%s': SQL content is empty", expected.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test SQL file
|
||||||
|
testFile := filepath.Join(tempDir, "1_001_test.sql")
|
||||||
|
if err := os.WriteFile(testFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read schema
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify schema
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected default schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Scripts) != 1 {
|
||||||
|
t.Fatalf("Expected 1 script, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidDirectory(t *testing.T) {
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/directory",
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for nonexistent directory, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_EmptyDirectory(t *testing.T) {
|
||||||
|
// Create temporary empty directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas[0].Scripts) != 0 {
|
||||||
|
t.Errorf("Expected 0 scripts in empty directory, got %d", len(db.Schemas[0].Scripts))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidFilename(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create files with various invalid patterns
|
||||||
|
invalidFiles := []string{
|
||||||
|
"invalid.sql", // No priority/sequence
|
||||||
|
"1_test.sql", // Missing sequence
|
||||||
|
"test_1_2.sql", // Wrong order
|
||||||
|
"a_001_test.sql", // Non-numeric priority
|
||||||
|
"1_abc_test.sql", // Non-numeric sequence
|
||||||
|
"1_001_test.txt", // Wrong extension
|
||||||
|
"1_001_test.sql.backup", // Wrong extension
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, filename := range invalidFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create one valid file
|
||||||
|
validFile := filepath.Join(tempDir, "1_001_valid.sql")
|
||||||
|
if err := os.WriteFile(validFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create valid file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should only have the valid file
|
||||||
|
if len(db.Schemas[0].Scripts) != 1 {
|
||||||
|
t.Errorf("Expected 1 script (invalid files should be skipped), got %d", len(db.Schemas[0].Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
if db.Schemas[0].Scripts[0].Name != "valid" {
|
||||||
|
t.Errorf("Expected script name 'valid', got '%s'", db.Schemas[0].Scripts[0].Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
reader := NewReader(&readers.ReaderOptions{})
|
||||||
|
|
||||||
|
_, err := reader.ReadTable()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for ReadTable (not supported), got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_HyphenFormat(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-hyphen-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test files with hyphen separators
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1-001-create-table.sql": "CREATE TABLE test (id INT);",
|
||||||
|
"1-002-insert-data.pgsql": "INSERT INTO test VALUES (1);",
|
||||||
|
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL);",
|
||||||
|
"2-005-add-index.sql": "CREATE INDEX idx_test ON test(id);",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) != 4 {
|
||||||
|
t.Fatalf("Expected 4 scripts, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify specific hyphen-formatted scripts
|
||||||
|
expectedScripts := map[string]struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
}{
|
||||||
|
"create-table": {1, 1},
|
||||||
|
"insert-data": {1, 2},
|
||||||
|
"add-index": {2, 5},
|
||||||
|
"create-newid": {10, 10},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
expected, exists := expectedScripts[script.Name]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Unexpected script: %s", script.Name)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if script.Priority != expected.priority {
|
||||||
|
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||||
|
script.Name, expected.priority, script.Priority)
|
||||||
|
}
|
||||||
|
if script.Sequence != expected.sequence {
|
||||||
|
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||||
|
script.Name, expected.sequence, script.Sequence)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_MixedFormat(t *testing.T) {
|
||||||
|
// Test that both underscore and hyphen formats can be mixed
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-mixed-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1_001_underscore.sql": "SELECT 1;",
|
||||||
|
"1-002-hyphen.sql": "SELECT 2;",
|
||||||
|
"2_003_underscore.sql": "SELECT 3;",
|
||||||
|
"2-004-hyphen.sql": "SELECT 4;",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) != 4 {
|
||||||
|
t.Fatalf("Expected 4 scripts (mixed format), got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify both formats are parsed correctly
|
||||||
|
names := make(map[string]bool)
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
names[script.Name] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedNames := []string{"underscore", "hyphen", "underscore", "hyphen"}
|
||||||
|
for _, name := range expectedNames {
|
||||||
|
if !names[name] {
|
||||||
|
t.Errorf("Expected script name '%s' not found", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_SkipSymlinks(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-symlink-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create a real SQL file
|
||||||
|
realFile := filepath.Join(tempDir, "1_001_real_file.sql")
|
||||||
|
if err := os.WriteFile(realFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create real file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create another file to link to
|
||||||
|
targetFile := filepath.Join(tempDir, "2_001_target.sql")
|
||||||
|
if err := os.WriteFile(targetFile, []byte("SELECT 2;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create target file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a symlink to the target file (this should be skipped)
|
||||||
|
symlinkFile := filepath.Join(tempDir, "3_001_symlink.sql")
|
||||||
|
if err := os.Symlink(targetFile, symlinkFile); err != nil {
|
||||||
|
// Skip test on systems that don't support symlinks (e.g., Windows without admin)
|
||||||
|
t.Skipf("Symlink creation not supported: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
// Should only have 2 scripts (real_file and target), symlink should be skipped
|
||||||
|
if len(schema.Scripts) != 2 {
|
||||||
|
t.Errorf("Expected 2 scripts (symlink should be skipped), got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the scripts are the real files, not the symlink
|
||||||
|
scriptNames := make(map[string]bool)
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
scriptNames[script.Name] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if !scriptNames["real_file"] {
|
||||||
|
t.Error("Expected 'real_file' script to be present")
|
||||||
|
}
|
||||||
|
if !scriptNames["target"] {
|
||||||
|
t.Error("Expected 'target' script to be present")
|
||||||
|
}
|
||||||
|
if scriptNames["symlink"] {
|
||||||
|
t.Error("Symlink script should have been skipped but was found")
|
||||||
|
}
|
||||||
|
}
|
||||||
75
pkg/readers/sqlite/README.md
Normal file
75
pkg/readers/sqlite/README.md
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
# SQLite Reader
|
||||||
|
|
||||||
|
Reads database schema from SQLite database files.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Using file path
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "path/to/database.db",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := sqlite.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
// Or using connection string
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
ConnectionString: "path/to/database.db",
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Reads tables with columns and data types
|
||||||
|
- Reads views with definitions
|
||||||
|
- Reads primary keys
|
||||||
|
- Reads foreign keys with CASCADE actions
|
||||||
|
- Reads indexes (non-auto-generated)
|
||||||
|
- Maps SQLite types to canonical types
|
||||||
|
- Derives relationships from foreign keys
|
||||||
|
|
||||||
|
## SQLite Specifics
|
||||||
|
|
||||||
|
- SQLite doesn't support schemas, creates single "main" schema
|
||||||
|
- Uses pure Go driver (modernc.org/sqlite) - no CGo required
|
||||||
|
- Supports both file path and connection string
|
||||||
|
- Auto-increment detection for INTEGER PRIMARY KEY columns
|
||||||
|
- Foreign keys require `PRAGMA foreign_keys = ON` to be set
|
||||||
|
|
||||||
|
## Example Schema
|
||||||
|
|
||||||
|
```sql
|
||||||
|
PRAGMA foreign_keys = ON;
|
||||||
|
|
||||||
|
CREATE TABLE users (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
username VARCHAR(50) NOT NULL UNIQUE,
|
||||||
|
email VARCHAR(100) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE posts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
title VARCHAR(200) NOT NULL,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mappings
|
||||||
|
|
||||||
|
| SQLite Type | Canonical Type |
|
||||||
|
|-------------|---------------|
|
||||||
|
| INTEGER, INT | int |
|
||||||
|
| BIGINT | int64 |
|
||||||
|
| REAL, DOUBLE | float64 |
|
||||||
|
| TEXT, VARCHAR | string |
|
||||||
|
| BLOB | bytea |
|
||||||
|
| BOOLEAN | bool |
|
||||||
|
| DATE | date |
|
||||||
|
| DATETIME, TIMESTAMP | timestamp |
|
||||||
306
pkg/readers/sqlite/queries.go
Normal file
306
pkg/readers/sqlite/queries.go
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// queryTables retrieves all tables from the SQLite database
|
||||||
|
func (r *Reader) queryTables() ([]*models.Table, error) {
|
||||||
|
query := `
|
||||||
|
SELECT name
|
||||||
|
FROM sqlite_master
|
||||||
|
WHERE type = 'table'
|
||||||
|
AND name NOT LIKE 'sqlite_%'
|
||||||
|
ORDER BY name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
tables := make([]*models.Table, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var tableName string
|
||||||
|
|
||||||
|
if err := rows.Scan(&tableName); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
table := models.InitTable(tableName, "main")
|
||||||
|
tables = append(tables, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tables, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryViews retrieves all views from the SQLite database
|
||||||
|
func (r *Reader) queryViews() ([]*models.View, error) {
|
||||||
|
query := `
|
||||||
|
SELECT name, sql
|
||||||
|
FROM sqlite_master
|
||||||
|
WHERE type = 'view'
|
||||||
|
ORDER BY name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
views := make([]*models.View, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var viewName string
|
||||||
|
var sql *string
|
||||||
|
|
||||||
|
if err := rows.Scan(&viewName, &sql); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
view := models.InitView(viewName, "main")
|
||||||
|
if sql != nil {
|
||||||
|
view.Definition = *sql
|
||||||
|
}
|
||||||
|
|
||||||
|
views = append(views, view)
|
||||||
|
}
|
||||||
|
|
||||||
|
return views, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryColumns retrieves all columns for a given table or view
|
||||||
|
func (r *Reader) queryColumns(tableName string) (map[string]*models.Column, error) {
|
||||||
|
query := fmt.Sprintf("PRAGMA table_info(%s)", tableName)
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
columns := make(map[string]*models.Column)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var cid int
|
||||||
|
var name, dataType string
|
||||||
|
var notNull, pk int
|
||||||
|
var defaultValue *string
|
||||||
|
|
||||||
|
if err := rows.Scan(&cid, &name, &dataType, ¬Null, &defaultValue, &pk); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
column := models.InitColumn(name, tableName, "main")
|
||||||
|
column.Type = r.mapDataType(strings.ToUpper(dataType))
|
||||||
|
column.NotNull = (notNull == 1)
|
||||||
|
column.IsPrimaryKey = (pk > 0)
|
||||||
|
column.Sequence = uint(cid + 1)
|
||||||
|
|
||||||
|
if defaultValue != nil {
|
||||||
|
column.Default = *defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for autoincrement (SQLite uses INTEGER PRIMARY KEY AUTOINCREMENT)
|
||||||
|
if pk > 0 && strings.EqualFold(dataType, "INTEGER") {
|
||||||
|
column.AutoIncrement = r.isAutoIncrement(tableName, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
columns[name] = column
|
||||||
|
}
|
||||||
|
|
||||||
|
return columns, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// isAutoIncrement checks if a column is autoincrement
|
||||||
|
func (r *Reader) isAutoIncrement(tableName, columnName string) bool {
|
||||||
|
// Check sqlite_sequence table or parse CREATE TABLE statement
|
||||||
|
query := `
|
||||||
|
SELECT sql
|
||||||
|
FROM sqlite_master
|
||||||
|
WHERE type = 'table' AND name = ?
|
||||||
|
`
|
||||||
|
|
||||||
|
var sql string
|
||||||
|
err := r.db.QueryRowContext(r.ctx, query, tableName).Scan(&sql)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the SQL contains AUTOINCREMENT for this column
|
||||||
|
return strings.Contains(strings.ToUpper(sql), strings.ToUpper(columnName)+" INTEGER PRIMARY KEY AUTOINCREMENT") ||
|
||||||
|
strings.Contains(strings.ToUpper(sql), strings.ToUpper(columnName)+" INTEGER AUTOINCREMENT")
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryPrimaryKey retrieves the primary key constraint for a table
|
||||||
|
func (r *Reader) queryPrimaryKey(tableName string) (*models.Constraint, error) {
|
||||||
|
query := fmt.Sprintf("PRAGMA table_info(%s)", tableName)
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var pkColumns []string
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var cid int
|
||||||
|
var name, dataType string
|
||||||
|
var notNull, pk int
|
||||||
|
var defaultValue *string
|
||||||
|
|
||||||
|
if err := rows.Scan(&cid, &name, &dataType, ¬Null, &defaultValue, &pk); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if pk > 0 {
|
||||||
|
pkColumns = append(pkColumns, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(pkColumns) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create primary key constraint
|
||||||
|
constraintName := fmt.Sprintf("%s_pkey", tableName)
|
||||||
|
constraint := models.InitConstraint(constraintName, models.PrimaryKeyConstraint)
|
||||||
|
constraint.Schema = "main"
|
||||||
|
constraint.Table = tableName
|
||||||
|
constraint.Columns = pkColumns
|
||||||
|
|
||||||
|
return constraint, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryForeignKeys retrieves all foreign key constraints for a table
|
||||||
|
func (r *Reader) queryForeignKeys(tableName string) ([]*models.Constraint, error) {
|
||||||
|
query := fmt.Sprintf("PRAGMA foreign_key_list(%s)", tableName)
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
// Group foreign keys by id (since composite FKs have multiple rows)
|
||||||
|
fkMap := make(map[int]*models.Constraint)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var id, seq int
|
||||||
|
var referencedTable, fromColumn, toColumn string
|
||||||
|
var onUpdate, onDelete, match string
|
||||||
|
|
||||||
|
if err := rows.Scan(&id, &seq, &referencedTable, &fromColumn, &toColumn, &onUpdate, &onDelete, &match); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := fkMap[id]; !exists {
|
||||||
|
constraintName := fmt.Sprintf("%s_%s_fkey", tableName, referencedTable)
|
||||||
|
if id > 0 {
|
||||||
|
constraintName = fmt.Sprintf("%s_%s_fkey_%d", tableName, referencedTable, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||||
|
constraint.Schema = "main"
|
||||||
|
constraint.Table = tableName
|
||||||
|
constraint.ReferencedSchema = "main"
|
||||||
|
constraint.ReferencedTable = referencedTable
|
||||||
|
constraint.OnUpdate = onUpdate
|
||||||
|
constraint.OnDelete = onDelete
|
||||||
|
constraint.Columns = []string{}
|
||||||
|
constraint.ReferencedColumns = []string{}
|
||||||
|
|
||||||
|
fkMap[id] = constraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add column to the constraint
|
||||||
|
fkMap[id].Columns = append(fkMap[id].Columns, fromColumn)
|
||||||
|
fkMap[id].ReferencedColumns = append(fkMap[id].ReferencedColumns, toColumn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert map to slice
|
||||||
|
foreignKeys := make([]*models.Constraint, 0, len(fkMap))
|
||||||
|
for _, fk := range fkMap {
|
||||||
|
foreignKeys = append(foreignKeys, fk)
|
||||||
|
}
|
||||||
|
|
||||||
|
return foreignKeys, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryIndexes retrieves all indexes for a table
|
||||||
|
func (r *Reader) queryIndexes(tableName string) ([]*models.Index, error) {
|
||||||
|
query := fmt.Sprintf("PRAGMA index_list(%s)", tableName)
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
indexes := make([]*models.Index, 0)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var seq int
|
||||||
|
var name string
|
||||||
|
var unique int
|
||||||
|
var origin string
|
||||||
|
var partial int
|
||||||
|
|
||||||
|
if err := rows.Scan(&seq, &name, &unique, &origin, &partial); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip auto-generated indexes (origin = 'pk' for primary keys, etc.)
|
||||||
|
// origin: c = CREATE INDEX, u = UNIQUE constraint, pk = PRIMARY KEY
|
||||||
|
if origin == "pk" || origin == "u" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
index := models.InitIndex(name, tableName, "main")
|
||||||
|
index.Unique = (unique == 1)
|
||||||
|
|
||||||
|
// Get index columns
|
||||||
|
columns, err := r.queryIndexColumns(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
index.Columns = columns
|
||||||
|
|
||||||
|
indexes = append(indexes, index)
|
||||||
|
}
|
||||||
|
|
||||||
|
return indexes, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryIndexColumns retrieves the columns for a specific index
|
||||||
|
func (r *Reader) queryIndexColumns(indexName string) ([]string, error) {
|
||||||
|
query := fmt.Sprintf("PRAGMA index_info(%s)", indexName)
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
columns := make([]string, 0)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var seqno, cid int
|
||||||
|
var name *string
|
||||||
|
|
||||||
|
if err := rows.Scan(&seqno, &cid, &name); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if name != nil {
|
||||||
|
columns = append(columns, *name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return columns, rows.Err()
|
||||||
|
}
|
||||||
261
pkg/readers/sqlite/reader.go
Normal file
261
pkg/readers/sqlite/reader.go
Normal file
@@ -0,0 +1,261 @@
|
|||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
_ "modernc.org/sqlite" // SQLite driver
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for SQLite databases
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
db *sql.DB
|
||||||
|
ctx context.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new SQLite reader
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
ctx: context.Background(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads the entire database schema from SQLite
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
// Validate file path or connection string
|
||||||
|
dbPath := r.options.FilePath
|
||||||
|
if dbPath == "" && r.options.ConnectionString != "" {
|
||||||
|
dbPath = r.options.ConnectionString
|
||||||
|
}
|
||||||
|
if dbPath == "" {
|
||||||
|
return nil, fmt.Errorf("file path or connection string is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to the database
|
||||||
|
if err := r.connect(dbPath); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to connect: %w", err)
|
||||||
|
}
|
||||||
|
defer r.close()
|
||||||
|
|
||||||
|
// Get database name from file path
|
||||||
|
dbName := filepath.Base(dbPath)
|
||||||
|
if dbName == "" {
|
||||||
|
dbName = "sqlite"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize database model
|
||||||
|
db := models.InitDatabase(dbName)
|
||||||
|
db.DatabaseType = models.SqlLiteDatabaseType
|
||||||
|
db.SourceFormat = "sqlite"
|
||||||
|
|
||||||
|
// Get SQLite version
|
||||||
|
var version string
|
||||||
|
err := r.db.QueryRowContext(r.ctx, "SELECT sqlite_version()").Scan(&version)
|
||||||
|
if err == nil {
|
||||||
|
db.DatabaseVersion = version
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLite doesn't have schemas, so we create a single "main" schema
|
||||||
|
schema := models.InitSchema("main")
|
||||||
|
schema.RefDatabase = db
|
||||||
|
|
||||||
|
// Query tables
|
||||||
|
tables, err := r.queryTables()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query tables: %w", err)
|
||||||
|
}
|
||||||
|
schema.Tables = tables
|
||||||
|
|
||||||
|
// Query views
|
||||||
|
views, err := r.queryViews()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query views: %w", err)
|
||||||
|
}
|
||||||
|
schema.Views = views
|
||||||
|
|
||||||
|
// Query columns for tables and views
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
columns, err := r.queryColumns(table.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query columns for table %s: %w", table.Name, err)
|
||||||
|
}
|
||||||
|
table.Columns = columns
|
||||||
|
table.RefSchema = schema
|
||||||
|
|
||||||
|
// Query primary key
|
||||||
|
pk, err := r.queryPrimaryKey(table.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query primary key for table %s: %w", table.Name, err)
|
||||||
|
}
|
||||||
|
if pk != nil {
|
||||||
|
table.Constraints[pk.Name] = pk
|
||||||
|
// Mark columns as primary key and not null
|
||||||
|
for _, colName := range pk.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
col.IsPrimaryKey = true
|
||||||
|
col.NotNull = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query foreign keys
|
||||||
|
foreignKeys, err := r.queryForeignKeys(table.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query foreign keys for table %s: %w", table.Name, err)
|
||||||
|
}
|
||||||
|
for _, fk := range foreignKeys {
|
||||||
|
table.Constraints[fk.Name] = fk
|
||||||
|
// Derive relationship from foreign key
|
||||||
|
r.deriveRelationship(table, fk)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query indexes
|
||||||
|
indexes, err := r.queryIndexes(table.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query indexes for table %s: %w", table.Name, err)
|
||||||
|
}
|
||||||
|
for _, idx := range indexes {
|
||||||
|
table.Indexes[idx.Name] = idx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query columns for views
|
||||||
|
for _, view := range schema.Views {
|
||||||
|
columns, err := r.queryColumns(view.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query columns for view %s: %w", view.Name, err)
|
||||||
|
}
|
||||||
|
view.Columns = columns
|
||||||
|
view.RefSchema = schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add schema to database
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads a single schema (returns the main schema from the database)
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in database")
|
||||||
|
}
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads a single table (returns the first table from the schema)
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in schema")
|
||||||
|
}
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// connect establishes a connection to the SQLite database
|
||||||
|
func (r *Reader) connect(dbPath string) error {
|
||||||
|
db, err := sql.Open("sqlite", dbPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
r.db = db
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// close closes the database connection
|
||||||
|
func (r *Reader) close() {
|
||||||
|
if r.db != nil {
|
||||||
|
r.db.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mapDataType maps SQLite data types to canonical types
|
||||||
|
func (r *Reader) mapDataType(sqliteType string) string {
|
||||||
|
// SQLite has a flexible type system, but we map common types
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"INTEGER": "int",
|
||||||
|
"INT": "int",
|
||||||
|
"TINYINT": "int8",
|
||||||
|
"SMALLINT": "int16",
|
||||||
|
"MEDIUMINT": "int",
|
||||||
|
"BIGINT": "int64",
|
||||||
|
"UNSIGNED BIG INT": "uint64",
|
||||||
|
"INT2": "int16",
|
||||||
|
"INT8": "int64",
|
||||||
|
"REAL": "float64",
|
||||||
|
"DOUBLE": "float64",
|
||||||
|
"DOUBLE PRECISION": "float64",
|
||||||
|
"FLOAT": "float32",
|
||||||
|
"NUMERIC": "decimal",
|
||||||
|
"DECIMAL": "decimal",
|
||||||
|
"BOOLEAN": "bool",
|
||||||
|
"BOOL": "bool",
|
||||||
|
"DATE": "date",
|
||||||
|
"DATETIME": "timestamp",
|
||||||
|
"TIMESTAMP": "timestamp",
|
||||||
|
"TEXT": "string",
|
||||||
|
"VARCHAR": "string",
|
||||||
|
"CHAR": "string",
|
||||||
|
"CHARACTER": "string",
|
||||||
|
"VARYING CHARACTER": "string",
|
||||||
|
"NCHAR": "string",
|
||||||
|
"NVARCHAR": "string",
|
||||||
|
"CLOB": "text",
|
||||||
|
"BLOB": "bytea",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try exact match first
|
||||||
|
if mapped, exists := typeMap[sqliteType]; exists {
|
||||||
|
return mapped
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try case-insensitive match for common types
|
||||||
|
sqliteTypeUpper := sqliteType
|
||||||
|
if len(sqliteType) > 0 {
|
||||||
|
// Extract base type (e.g., "VARCHAR(255)" -> "VARCHAR")
|
||||||
|
for baseType := range typeMap {
|
||||||
|
if len(sqliteTypeUpper) >= len(baseType) && sqliteTypeUpper[:len(baseType)] == baseType {
|
||||||
|
return typeMap[baseType]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to string for unknown types
|
||||||
|
return "string"
|
||||||
|
}
|
||||||
|
|
||||||
|
// deriveRelationship creates a relationship from a foreign key constraint
|
||||||
|
func (r *Reader) deriveRelationship(table *models.Table, fk *models.Constraint) {
|
||||||
|
relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable)
|
||||||
|
|
||||||
|
relationship := models.InitRelationship(relationshipName, models.OneToMany)
|
||||||
|
relationship.FromTable = table.Name
|
||||||
|
relationship.FromSchema = table.Schema
|
||||||
|
relationship.ToTable = fk.ReferencedTable
|
||||||
|
relationship.ToSchema = fk.ReferencedSchema
|
||||||
|
relationship.ForeignKey = fk.Name
|
||||||
|
|
||||||
|
// Store constraint actions in properties
|
||||||
|
if fk.OnDelete != "" {
|
||||||
|
relationship.Properties["on_delete"] = fk.OnDelete
|
||||||
|
}
|
||||||
|
if fk.OnUpdate != "" {
|
||||||
|
relationship.Properties["on_update"] = fk.OnUpdate
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Relationships[relationshipName] = relationship
|
||||||
|
}
|
||||||
334
pkg/readers/sqlite/reader_test.go
Normal file
334
pkg/readers/sqlite/reader_test.go
Normal file
@@ -0,0 +1,334 @@
|
|||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// setupTestDatabase creates a temporary SQLite database with test data
|
||||||
|
func setupTestDatabase(t *testing.T) string {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
dbPath := filepath.Join(tmpDir, "test.db")
|
||||||
|
|
||||||
|
db, err := sql.Open("sqlite", dbPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
// Create test schema
|
||||||
|
schema := `
|
||||||
|
PRAGMA foreign_keys = ON;
|
||||||
|
|
||||||
|
CREATE TABLE users (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
username VARCHAR(50) NOT NULL UNIQUE,
|
||||||
|
email VARCHAR(100) NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE posts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
title VARCHAR(200) NOT NULL,
|
||||||
|
content TEXT,
|
||||||
|
published BOOLEAN DEFAULT 0,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE comments (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
post_id INTEGER NOT NULL,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
comment TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_posts_user_id ON posts(user_id);
|
||||||
|
CREATE INDEX idx_comments_post_id ON comments(post_id);
|
||||||
|
CREATE UNIQUE INDEX idx_users_email ON users(email);
|
||||||
|
|
||||||
|
CREATE VIEW user_post_count AS
|
||||||
|
SELECT u.id, u.username, COUNT(p.id) as post_count
|
||||||
|
FROM users u
|
||||||
|
LEFT JOIN posts p ON u.id = p.user_id
|
||||||
|
GROUP BY u.id, u.username;
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err = db.Exec(schema)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return dbPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase(t *testing.T) {
|
||||||
|
dbPath := setupTestDatabase(t)
|
||||||
|
defer os.Remove(dbPath)
|
||||||
|
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: dbPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, db)
|
||||||
|
|
||||||
|
// Check database metadata
|
||||||
|
assert.Equal(t, "test.db", db.Name)
|
||||||
|
assert.Equal(t, models.SqlLiteDatabaseType, db.DatabaseType)
|
||||||
|
assert.Equal(t, "sqlite", db.SourceFormat)
|
||||||
|
assert.NotEmpty(t, db.DatabaseVersion)
|
||||||
|
|
||||||
|
// Check schemas (SQLite should have a single "main" schema)
|
||||||
|
require.Len(t, db.Schemas, 1)
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
assert.Equal(t, "main", schema.Name)
|
||||||
|
|
||||||
|
// Check tables
|
||||||
|
assert.Len(t, schema.Tables, 3)
|
||||||
|
tableNames := make([]string, len(schema.Tables))
|
||||||
|
for i, table := range schema.Tables {
|
||||||
|
tableNames[i] = table.Name
|
||||||
|
}
|
||||||
|
assert.Contains(t, tableNames, "users")
|
||||||
|
assert.Contains(t, tableNames, "posts")
|
||||||
|
assert.Contains(t, tableNames, "comments")
|
||||||
|
|
||||||
|
// Check views
|
||||||
|
assert.Len(t, schema.Views, 1)
|
||||||
|
assert.Equal(t, "user_post_count", schema.Views[0].Name)
|
||||||
|
assert.NotEmpty(t, schema.Views[0].Definition)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable_Users(t *testing.T) {
|
||||||
|
dbPath := setupTestDatabase(t)
|
||||||
|
defer os.Remove(dbPath)
|
||||||
|
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: dbPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, db)
|
||||||
|
|
||||||
|
// Find users table
|
||||||
|
var usersTable *models.Table
|
||||||
|
for _, table := range db.Schemas[0].Tables {
|
||||||
|
if table.Name == "users" {
|
||||||
|
usersTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NotNil(t, usersTable)
|
||||||
|
assert.Equal(t, "users", usersTable.Name)
|
||||||
|
assert.Equal(t, "main", usersTable.Schema)
|
||||||
|
|
||||||
|
// Check columns
|
||||||
|
assert.Len(t, usersTable.Columns, 4)
|
||||||
|
|
||||||
|
// Check id column
|
||||||
|
idCol, exists := usersTable.Columns["id"]
|
||||||
|
require.True(t, exists)
|
||||||
|
assert.Equal(t, "int", idCol.Type)
|
||||||
|
assert.True(t, idCol.IsPrimaryKey)
|
||||||
|
assert.True(t, idCol.AutoIncrement)
|
||||||
|
assert.True(t, idCol.NotNull)
|
||||||
|
|
||||||
|
// Check username column
|
||||||
|
usernameCol, exists := usersTable.Columns["username"]
|
||||||
|
require.True(t, exists)
|
||||||
|
assert.Equal(t, "string", usernameCol.Type)
|
||||||
|
assert.True(t, usernameCol.NotNull)
|
||||||
|
assert.False(t, usernameCol.IsPrimaryKey)
|
||||||
|
|
||||||
|
// Check email column
|
||||||
|
emailCol, exists := usersTable.Columns["email"]
|
||||||
|
require.True(t, exists)
|
||||||
|
assert.Equal(t, "string", emailCol.Type)
|
||||||
|
assert.True(t, emailCol.NotNull)
|
||||||
|
|
||||||
|
// Check primary key constraint
|
||||||
|
assert.Len(t, usersTable.Constraints, 1)
|
||||||
|
pkConstraint, exists := usersTable.Constraints["users_pkey"]
|
||||||
|
require.True(t, exists)
|
||||||
|
assert.Equal(t, models.PrimaryKeyConstraint, pkConstraint.Type)
|
||||||
|
assert.Equal(t, []string{"id"}, pkConstraint.Columns)
|
||||||
|
|
||||||
|
// Check indexes (should have unique index on email and username)
|
||||||
|
assert.GreaterOrEqual(t, len(usersTable.Indexes), 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable_Posts(t *testing.T) {
|
||||||
|
dbPath := setupTestDatabase(t)
|
||||||
|
defer os.Remove(dbPath)
|
||||||
|
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: dbPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, db)
|
||||||
|
|
||||||
|
// Find posts table
|
||||||
|
var postsTable *models.Table
|
||||||
|
for _, table := range db.Schemas[0].Tables {
|
||||||
|
if table.Name == "posts" {
|
||||||
|
postsTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NotNil(t, postsTable)
|
||||||
|
|
||||||
|
// Check columns
|
||||||
|
assert.Len(t, postsTable.Columns, 6)
|
||||||
|
|
||||||
|
// Check foreign key constraint
|
||||||
|
hasForeignKey := false
|
||||||
|
for _, constraint := range postsTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
hasForeignKey = true
|
||||||
|
assert.Equal(t, "users", constraint.ReferencedTable)
|
||||||
|
assert.Equal(t, "CASCADE", constraint.OnDelete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.True(t, hasForeignKey, "Posts table should have a foreign key constraint")
|
||||||
|
|
||||||
|
// Check relationships
|
||||||
|
assert.GreaterOrEqual(t, len(postsTable.Relationships), 1)
|
||||||
|
|
||||||
|
// Check indexes
|
||||||
|
hasUserIdIndex := false
|
||||||
|
for _, index := range postsTable.Indexes {
|
||||||
|
if index.Name == "idx_posts_user_id" {
|
||||||
|
hasUserIdIndex = true
|
||||||
|
assert.Contains(t, index.Columns, "user_id")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.True(t, hasUserIdIndex, "Posts table should have idx_posts_user_id index")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable_Comments(t *testing.T) {
|
||||||
|
dbPath := setupTestDatabase(t)
|
||||||
|
defer os.Remove(dbPath)
|
||||||
|
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: dbPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, db)
|
||||||
|
|
||||||
|
// Find comments table
|
||||||
|
var commentsTable *models.Table
|
||||||
|
for _, table := range db.Schemas[0].Tables {
|
||||||
|
if table.Name == "comments" {
|
||||||
|
commentsTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NotNil(t, commentsTable)
|
||||||
|
|
||||||
|
// Check foreign key constraints (should have 2)
|
||||||
|
fkCount := 0
|
||||||
|
for _, constraint := range commentsTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
fkCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.Equal(t, 2, fkCount, "Comments table should have 2 foreign key constraints")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
dbPath := setupTestDatabase(t)
|
||||||
|
defer os.Remove(dbPath)
|
||||||
|
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: dbPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, schema)
|
||||||
|
assert.Equal(t, "main", schema.Name)
|
||||||
|
assert.Len(t, schema.Tables, 3)
|
||||||
|
assert.Len(t, schema.Views, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
dbPath := setupTestDatabase(t)
|
||||||
|
defer os.Remove(dbPath)
|
||||||
|
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: dbPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, table)
|
||||||
|
assert.NotEmpty(t, table.Name)
|
||||||
|
assert.NotEmpty(t, table.Columns)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ConnectionString(t *testing.T) {
|
||||||
|
dbPath := setupTestDatabase(t)
|
||||||
|
defer os.Remove(dbPath)
|
||||||
|
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
ConnectionString: dbPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, db)
|
||||||
|
assert.Len(t, db.Schemas, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidPath(t *testing.T) {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/path/to/database.db",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_MissingPath(t *testing.T) {
|
||||||
|
options := &readers.ReaderOptions{}
|
||||||
|
|
||||||
|
reader := NewReader(options)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "file path or connection string is required")
|
||||||
|
}
|
||||||
36
pkg/reflectutil/doc.go
Normal file
36
pkg/reflectutil/doc.go
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
// Package reflectutil provides reflection utilities for analyzing Go code structures.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The reflectutil package offers helper functions for working with Go's reflection
|
||||||
|
// capabilities, particularly for parsing Go struct definitions and extracting type
|
||||||
|
// information. This is used by readers that parse ORM model files.
|
||||||
|
//
|
||||||
|
// # Features
|
||||||
|
//
|
||||||
|
// - Struct tag parsing and extraction
|
||||||
|
// - Type information analysis
|
||||||
|
// - Field metadata extraction
|
||||||
|
// - ORM tag interpretation (GORM, Bun, etc.)
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// This package is primarily used internally by readers like GORM and Bun to parse
|
||||||
|
// Go struct definitions and convert them to database schema models.
|
||||||
|
//
|
||||||
|
// // Example: Parse struct tags
|
||||||
|
// tags := reflectutil.ParseStructTags(field)
|
||||||
|
// columnName := tags.Get("db")
|
||||||
|
//
|
||||||
|
// # Supported ORM Tags
|
||||||
|
//
|
||||||
|
// The package understands tag conventions from:
|
||||||
|
// - GORM (gorm tag)
|
||||||
|
// - Bun (bun tag)
|
||||||
|
// - Standard database/sql (db tag)
|
||||||
|
//
|
||||||
|
// # Purpose
|
||||||
|
//
|
||||||
|
// This package enables RelSpec to read existing ORM models and convert them to
|
||||||
|
// a unified schema representation for transformation to other formats.
|
||||||
|
package reflectutil
|
||||||
326
pkg/reflectutil/helpers.go
Normal file
326
pkg/reflectutil/helpers.go
Normal file
@@ -0,0 +1,326 @@
|
|||||||
|
package reflectutil
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Deref dereferences pointers until it reaches a non-pointer value
|
||||||
|
// Returns the dereferenced value and true if successful, or the original value and false if nil
|
||||||
|
func Deref(v reflect.Value) (reflect.Value, bool) {
|
||||||
|
for v.Kind() == reflect.Ptr {
|
||||||
|
if v.IsNil() {
|
||||||
|
return v, false
|
||||||
|
}
|
||||||
|
v = v.Elem()
|
||||||
|
}
|
||||||
|
return v, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// DerefInterface dereferences an interface{} until it reaches a non-pointer value
|
||||||
|
func DerefInterface(i interface{}) reflect.Value {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, _ = Deref(v)
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetFieldValue extracts a field value from a struct, map, or pointer
|
||||||
|
// Returns nil if the field doesn't exist or can't be accessed
|
||||||
|
func GetFieldValue(item interface{}, field string) interface{} {
|
||||||
|
v := reflect.ValueOf(item)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch v.Kind() {
|
||||||
|
case reflect.Struct:
|
||||||
|
fieldVal := v.FieldByName(field)
|
||||||
|
if fieldVal.IsValid() {
|
||||||
|
return fieldVal.Interface()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case reflect.Map:
|
||||||
|
keyVal := reflect.ValueOf(field)
|
||||||
|
mapVal := v.MapIndex(keyVal)
|
||||||
|
if mapVal.IsValid() {
|
||||||
|
return mapVal.Interface()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsSliceOrArray checks if an interface{} is a slice or array
|
||||||
|
func IsSliceOrArray(i interface{}) bool {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
k := v.Kind()
|
||||||
|
return k == reflect.Slice || k == reflect.Array
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsMap checks if an interface{} is a map
|
||||||
|
func IsMap(i interface{}) bool {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return v.Kind() == reflect.Map
|
||||||
|
}
|
||||||
|
|
||||||
|
// SliceLen returns the length of a slice/array, or 0 if not a slice/array
|
||||||
|
func SliceLen(i interface{}) int {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return v.Len()
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapLen returns the length of a map, or 0 if not a map
|
||||||
|
func MapLen(i interface{}) int {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
if v.Kind() != reflect.Map {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return v.Len()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SliceToInterfaces converts a slice/array to []interface{}
|
||||||
|
// Returns empty slice if not a slice/array
|
||||||
|
func SliceToInterfaces(i interface{}) []interface{} {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return []interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
|
||||||
|
return []interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]interface{}, v.Len())
|
||||||
|
for i := 0; i < v.Len(); i++ {
|
||||||
|
result[i] = v.Index(i).Interface()
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapKeys returns all keys from a map as []interface{}
|
||||||
|
// Returns empty slice if not a map
|
||||||
|
func MapKeys(i interface{}) []interface{} {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return []interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Map {
|
||||||
|
return []interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
keys := v.MapKeys()
|
||||||
|
result := make([]interface{}, len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
result[i] = key.Interface()
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapValues returns all values from a map as []interface{}
|
||||||
|
// Returns empty slice if not a map
|
||||||
|
func MapValues(i interface{}) []interface{} {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return []interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Map {
|
||||||
|
return []interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]interface{}, 0, v.Len())
|
||||||
|
iter := v.MapRange()
|
||||||
|
for iter.Next() {
|
||||||
|
result = append(result, iter.Value().Interface())
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapGet safely gets a value from a map by key
|
||||||
|
// Returns nil if key doesn't exist or not a map
|
||||||
|
func MapGet(m interface{}, key interface{}) interface{} {
|
||||||
|
v := reflect.ValueOf(m)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Map {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
keyVal := reflect.ValueOf(key)
|
||||||
|
mapVal := v.MapIndex(keyVal)
|
||||||
|
if mapVal.IsValid() {
|
||||||
|
return mapVal.Interface()
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SliceIndex safely gets an element from a slice/array by index
|
||||||
|
// Returns nil if index out of bounds or not a slice/array
|
||||||
|
func SliceIndex(slice interface{}, index int) interface{} {
|
||||||
|
v := reflect.ValueOf(slice)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if index < 0 || index >= v.Len() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return v.Index(index).Interface()
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompareValues compares two values for sorting
|
||||||
|
// Returns -1 if a < b, 0 if a == b, 1 if a > b
|
||||||
|
func CompareValues(a, b interface{}) int {
|
||||||
|
if a == nil && b == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
if a == nil {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
if b == nil {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
va := reflect.ValueOf(a)
|
||||||
|
vb := reflect.ValueOf(b)
|
||||||
|
|
||||||
|
// Handle different types
|
||||||
|
switch va.Kind() {
|
||||||
|
case reflect.String:
|
||||||
|
if vb.Kind() == reflect.String {
|
||||||
|
as := va.String()
|
||||||
|
bs := vb.String()
|
||||||
|
if as < bs {
|
||||||
|
return -1
|
||||||
|
} else if as > bs {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
if vb.Kind() >= reflect.Int && vb.Kind() <= reflect.Int64 {
|
||||||
|
ai := va.Int()
|
||||||
|
bi := vb.Int()
|
||||||
|
if ai < bi {
|
||||||
|
return -1
|
||||||
|
} else if ai > bi {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
if vb.Kind() >= reflect.Uint && vb.Kind() <= reflect.Uint64 {
|
||||||
|
au := va.Uint()
|
||||||
|
bu := vb.Uint()
|
||||||
|
if au < bu {
|
||||||
|
return -1
|
||||||
|
} else if au > bu {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
if vb.Kind() == reflect.Float32 || vb.Kind() == reflect.Float64 {
|
||||||
|
af := va.Float()
|
||||||
|
bf := vb.Float()
|
||||||
|
if af < bf {
|
||||||
|
return -1
|
||||||
|
} else if af > bf {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetNestedValue gets a nested value using dot notation path
|
||||||
|
// Example: GetNestedValue(obj, "database.schema.table")
|
||||||
|
func GetNestedValue(m interface{}, path string) interface{} {
|
||||||
|
if path == "" {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Split(path, ".")
|
||||||
|
current := m
|
||||||
|
|
||||||
|
for _, part := range parts {
|
||||||
|
if current == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
v := reflect.ValueOf(current)
|
||||||
|
v, ok := Deref(v)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch v.Kind() {
|
||||||
|
case reflect.Map:
|
||||||
|
keyVal := reflect.ValueOf(part)
|
||||||
|
mapVal := v.MapIndex(keyVal)
|
||||||
|
if !mapVal.IsValid() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
current = mapVal.Interface()
|
||||||
|
|
||||||
|
case reflect.Struct:
|
||||||
|
fieldVal := v.FieldByName(part)
|
||||||
|
if !fieldVal.IsValid() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
current = fieldVal.Interface()
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return current
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeepEqual performs a deep equality check between two values
|
||||||
|
func DeepEqual(a, b interface{}) bool {
|
||||||
|
return reflect.DeepEqual(a, b)
|
||||||
|
}
|
||||||
490
pkg/reflectutil/helpers_test.go
Normal file
490
pkg/reflectutil/helpers_test.go
Normal file
@@ -0,0 +1,490 @@
|
|||||||
|
package reflectutil
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
type testStruct struct {
|
||||||
|
Name string
|
||||||
|
Age int
|
||||||
|
Active bool
|
||||||
|
Nested *nestedStruct
|
||||||
|
Private string
|
||||||
|
}
|
||||||
|
|
||||||
|
type nestedStruct struct {
|
||||||
|
Value string
|
||||||
|
Count int
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeref(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
wantValid bool
|
||||||
|
wantKind reflect.Kind
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "non-pointer int",
|
||||||
|
input: 42,
|
||||||
|
wantValid: true,
|
||||||
|
wantKind: reflect.Int,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single pointer",
|
||||||
|
input: ptrInt(42),
|
||||||
|
wantValid: true,
|
||||||
|
wantKind: reflect.Int,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "double pointer",
|
||||||
|
input: ptrPtr(ptrInt(42)),
|
||||||
|
wantValid: true,
|
||||||
|
wantKind: reflect.Int,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nil pointer",
|
||||||
|
input: (*int)(nil),
|
||||||
|
wantValid: false,
|
||||||
|
wantKind: reflect.Ptr,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "string",
|
||||||
|
input: "test",
|
||||||
|
wantValid: true,
|
||||||
|
wantKind: reflect.String,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "struct",
|
||||||
|
input: testStruct{Name: "test"},
|
||||||
|
wantValid: true,
|
||||||
|
wantKind: reflect.Struct,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
v := reflect.ValueOf(tt.input)
|
||||||
|
got, valid := Deref(v)
|
||||||
|
|
||||||
|
if valid != tt.wantValid {
|
||||||
|
t.Errorf("Deref() valid = %v, want %v", valid, tt.wantValid)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got.Kind() != tt.wantKind {
|
||||||
|
t.Errorf("Deref() kind = %v, want %v", got.Kind(), tt.wantKind)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDerefInterface(t *testing.T) {
|
||||||
|
i := 42
|
||||||
|
pi := &i
|
||||||
|
ppi := &pi
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
wantKind reflect.Kind
|
||||||
|
}{
|
||||||
|
{"int", 42, reflect.Int},
|
||||||
|
{"pointer to int", &i, reflect.Int},
|
||||||
|
{"double pointer to int", ppi, reflect.Int},
|
||||||
|
{"string", "test", reflect.String},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := DerefInterface(tt.input)
|
||||||
|
if got.Kind() != tt.wantKind {
|
||||||
|
t.Errorf("DerefInterface() kind = %v, want %v", got.Kind(), tt.wantKind)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetFieldValue(t *testing.T) {
|
||||||
|
ts := testStruct{
|
||||||
|
Name: "John",
|
||||||
|
Age: 30,
|
||||||
|
Active: true,
|
||||||
|
Nested: &nestedStruct{Value: "nested", Count: 5},
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
item interface{}
|
||||||
|
field string
|
||||||
|
want interface{}
|
||||||
|
}{
|
||||||
|
{"struct field Name", ts, "Name", "John"},
|
||||||
|
{"struct field Age", ts, "Age", 30},
|
||||||
|
{"struct field Active", ts, "Active", true},
|
||||||
|
{"struct non-existent field", ts, "NonExistent", nil},
|
||||||
|
{"pointer to struct", &ts, "Name", "John"},
|
||||||
|
{"map string key", map[string]string{"key": "value"}, "key", "value"},
|
||||||
|
{"map int key", map[string]int{"count": 42}, "count", 42},
|
||||||
|
{"map non-existent key", map[string]string{"key": "value"}, "missing", nil},
|
||||||
|
{"nil pointer", (*testStruct)(nil), "Name", nil},
|
||||||
|
{"non-struct non-map", 42, "field", nil},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := GetFieldValue(tt.item, tt.field)
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("GetFieldValue() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsSliceOrArray(t *testing.T) {
|
||||||
|
arr := [3]int{1, 2, 3}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{"slice", []int{1, 2, 3}, true},
|
||||||
|
{"array", arr, true},
|
||||||
|
{"pointer to slice", &[]int{1, 2, 3}, true},
|
||||||
|
{"string", "test", false},
|
||||||
|
{"int", 42, false},
|
||||||
|
{"map", map[string]int{}, false},
|
||||||
|
{"nil slice", ([]int)(nil), true}, // nil slice is still Kind==Slice
|
||||||
|
{"nil pointer", (*[]int)(nil), false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := IsSliceOrArray(tt.input)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("IsSliceOrArray() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsMap(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{"map[string]int", map[string]int{"a": 1}, true},
|
||||||
|
{"map[int]string", map[int]string{1: "a"}, true},
|
||||||
|
{"pointer to map", &map[string]int{"a": 1}, true},
|
||||||
|
{"slice", []int{1, 2, 3}, false},
|
||||||
|
{"string", "test", false},
|
||||||
|
{"int", 42, false},
|
||||||
|
{"nil map", (map[string]int)(nil), true}, // nil map is still Kind==Map
|
||||||
|
{"nil pointer", (*map[string]int)(nil), false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := IsMap(tt.input)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("IsMap() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSliceLen(t *testing.T) {
|
||||||
|
arr := [3]int{1, 2, 3}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
want int
|
||||||
|
}{
|
||||||
|
{"slice length 3", []int{1, 2, 3}, 3},
|
||||||
|
{"empty slice", []int{}, 0},
|
||||||
|
{"array length 3", arr, 3},
|
||||||
|
{"pointer to slice", &[]int{1, 2, 3}, 3},
|
||||||
|
{"not a slice", "test", 0},
|
||||||
|
{"int", 42, 0},
|
||||||
|
{"nil slice", ([]int)(nil), 0},
|
||||||
|
{"nil pointer", (*[]int)(nil), 0},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SliceLen(tt.input)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("SliceLen() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapLen(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
want int
|
||||||
|
}{
|
||||||
|
{"map length 2", map[string]int{"a": 1, "b": 2}, 2},
|
||||||
|
{"empty map", map[string]int{}, 0},
|
||||||
|
{"pointer to map", &map[string]int{"a": 1}, 1},
|
||||||
|
{"not a map", []int{1, 2, 3}, 0},
|
||||||
|
{"string", "test", 0},
|
||||||
|
{"nil map", (map[string]int)(nil), 0},
|
||||||
|
{"nil pointer", (*map[string]int)(nil), 0},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := MapLen(tt.input)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("MapLen() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSliceToInterfaces(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
want []interface{}
|
||||||
|
}{
|
||||||
|
{"int slice", []int{1, 2, 3}, []interface{}{1, 2, 3}},
|
||||||
|
{"string slice", []string{"a", "b"}, []interface{}{"a", "b"}},
|
||||||
|
{"empty slice", []int{}, []interface{}{}},
|
||||||
|
{"pointer to slice", &[]int{1, 2}, []interface{}{1, 2}},
|
||||||
|
{"not a slice", "test", []interface{}{}},
|
||||||
|
{"nil slice", ([]int)(nil), []interface{}{}},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SliceToInterfaces(tt.input)
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("SliceToInterfaces() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapKeys(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
want []interface{}
|
||||||
|
}{
|
||||||
|
{"map with keys", map[string]int{"a": 1, "b": 2}, []interface{}{"a", "b"}},
|
||||||
|
{"empty map", map[string]int{}, []interface{}{}},
|
||||||
|
{"not a map", []int{1, 2, 3}, []interface{}{}},
|
||||||
|
{"nil map", (map[string]int)(nil), []interface{}{}},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := MapKeys(tt.input)
|
||||||
|
if len(got) != len(tt.want) {
|
||||||
|
t.Errorf("MapKeys() length = %v, want %v", len(got), len(tt.want))
|
||||||
|
}
|
||||||
|
// For maps, order is not guaranteed, so just check length
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapValues(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
want int // length of values
|
||||||
|
}{
|
||||||
|
{"map with values", map[string]int{"a": 1, "b": 2}, 2},
|
||||||
|
{"empty map", map[string]int{}, 0},
|
||||||
|
{"not a map", []int{1, 2, 3}, 0},
|
||||||
|
{"nil map", (map[string]int)(nil), 0},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := MapValues(tt.input)
|
||||||
|
if len(got) != tt.want {
|
||||||
|
t.Errorf("MapValues() length = %v, want %v", len(got), tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapGet(t *testing.T) {
|
||||||
|
m := map[string]int{"a": 1, "b": 2}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
key interface{}
|
||||||
|
want interface{}
|
||||||
|
}{
|
||||||
|
{"existing key", m, "a", 1},
|
||||||
|
{"existing key b", m, "b", 2},
|
||||||
|
{"non-existing key", m, "c", nil},
|
||||||
|
{"pointer to map", &m, "a", 1},
|
||||||
|
{"not a map", []int{1, 2}, 0, nil},
|
||||||
|
{"nil map", (map[string]int)(nil), "a", nil},
|
||||||
|
{"nil pointer", (*map[string]int)(nil), "a", nil},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := MapGet(tt.input, tt.key)
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("MapGet() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSliceIndex(t *testing.T) {
|
||||||
|
s := []int{10, 20, 30}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
slice interface{}
|
||||||
|
index int
|
||||||
|
want interface{}
|
||||||
|
}{
|
||||||
|
{"index 0", s, 0, 10},
|
||||||
|
{"index 1", s, 1, 20},
|
||||||
|
{"index 2", s, 2, 30},
|
||||||
|
{"negative index", s, -1, nil},
|
||||||
|
{"out of bounds", s, 5, nil},
|
||||||
|
{"pointer to slice", &s, 1, 20},
|
||||||
|
{"not a slice", "test", 0, nil},
|
||||||
|
{"nil slice", ([]int)(nil), 0, nil},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := SliceIndex(tt.slice, tt.index)
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("SliceIndex() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompareValues(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
a interface{}
|
||||||
|
b interface{}
|
||||||
|
want int
|
||||||
|
}{
|
||||||
|
{"both nil", nil, nil, 0},
|
||||||
|
{"a nil", nil, 5, -1},
|
||||||
|
{"b nil", 5, nil, 1},
|
||||||
|
{"equal strings", "abc", "abc", 0},
|
||||||
|
{"a less than b strings", "abc", "xyz", -1},
|
||||||
|
{"a greater than b strings", "xyz", "abc", 1},
|
||||||
|
{"equal ints", 5, 5, 0},
|
||||||
|
{"a less than b ints", 3, 7, -1},
|
||||||
|
{"a greater than b ints", 10, 5, 1},
|
||||||
|
{"equal floats", 3.14, 3.14, 0},
|
||||||
|
{"a less than b floats", 2.5, 5.5, -1},
|
||||||
|
{"a greater than b floats", 10.5, 5.5, 1},
|
||||||
|
{"equal uints", uint(5), uint(5), 0},
|
||||||
|
{"different types", "abc", 123, 0},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := CompareValues(tt.a, tt.b)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("CompareValues(%v, %v) = %v, want %v", tt.a, tt.b, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetNestedValue(t *testing.T) {
|
||||||
|
nested := map[string]interface{}{
|
||||||
|
"level1": map[string]interface{}{
|
||||||
|
"level2": map[string]interface{}{
|
||||||
|
"value": "deep",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
ts := testStruct{
|
||||||
|
Name: "John",
|
||||||
|
Nested: &nestedStruct{
|
||||||
|
Value: "nested value",
|
||||||
|
Count: 42,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input interface{}
|
||||||
|
path string
|
||||||
|
want interface{}
|
||||||
|
}{
|
||||||
|
{"empty path", nested, "", nested},
|
||||||
|
{"single level map", nested, "level1", nested["level1"]},
|
||||||
|
{"nested map", nested, "level1.level2", map[string]interface{}{"value": "deep"}},
|
||||||
|
{"deep nested map", nested, "level1.level2.value", "deep"},
|
||||||
|
{"struct field", ts, "Name", "John"},
|
||||||
|
{"nested struct field", ts, "Nested", ts.Nested},
|
||||||
|
{"non-existent path", nested, "missing.path", nil},
|
||||||
|
{"nil input", nil, "path", nil},
|
||||||
|
{"partial missing path", nested, "level1.missing", nil},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := GetNestedValue(tt.input, tt.path)
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("GetNestedValue() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeepEqual(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
a interface{}
|
||||||
|
b interface{}
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{"equal ints", 42, 42, true},
|
||||||
|
{"different ints", 42, 43, false},
|
||||||
|
{"equal strings", "test", "test", true},
|
||||||
|
{"different strings", "test", "other", false},
|
||||||
|
{"equal slices", []int{1, 2, 3}, []int{1, 2, 3}, true},
|
||||||
|
{"different slices", []int{1, 2, 3}, []int{1, 2, 4}, false},
|
||||||
|
{"equal maps", map[string]int{"a": 1}, map[string]int{"a": 1}, true},
|
||||||
|
{"different maps", map[string]int{"a": 1}, map[string]int{"a": 2}, false},
|
||||||
|
{"both nil", nil, nil, true},
|
||||||
|
{"one nil", nil, 42, false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := DeepEqual(tt.a, tt.b)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("DeepEqual(%v, %v) = %v, want %v", tt.a, tt.b, got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
func ptrInt(i int) *int {
|
||||||
|
return &i
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrPtr(p *int) **int {
|
||||||
|
return &p
|
||||||
|
}
|
||||||
34
pkg/transform/doc.go
Normal file
34
pkg/transform/doc.go
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
// Package transform provides validation and transformation utilities for database models.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The transform package contains a Transformer type that provides methods for validating
|
||||||
|
// and normalizing database schemas. It ensures schema correctness and consistency across
|
||||||
|
// different format conversions.
|
||||||
|
//
|
||||||
|
// # Features
|
||||||
|
//
|
||||||
|
// - Database validation (structure and naming conventions)
|
||||||
|
// - Schema validation (completeness and integrity)
|
||||||
|
// - Table validation (column definitions and constraints)
|
||||||
|
// - Data type normalization
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// transformer := transform.NewTransformer()
|
||||||
|
// err := transformer.ValidateDatabase(db)
|
||||||
|
// if err != nil {
|
||||||
|
// log.Fatal("Invalid database schema:", err)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// # Validation Scope
|
||||||
|
//
|
||||||
|
// The transformer validates:
|
||||||
|
// - Required fields presence
|
||||||
|
// - Naming convention adherence
|
||||||
|
// - Data type compatibility
|
||||||
|
// - Constraint consistency
|
||||||
|
// - Relationship integrity
|
||||||
|
//
|
||||||
|
// Note: Some validation methods are currently stubs and will be implemented as needed.
|
||||||
|
package transform
|
||||||
95
pkg/ui/column_dataops.go
Normal file
95
pkg/ui/column_dataops.go
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
|
||||||
|
// Column data operations - business logic for column management
|
||||||
|
|
||||||
|
// CreateColumn creates a new column and adds it to a table
|
||||||
|
func (se *SchemaEditor) CreateColumn(schemaIndex, tableIndex int, name, dataType string, isPrimaryKey, isNotNull bool) *models.Column {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Columns == nil {
|
||||||
|
table.Columns = make(map[string]*models.Column)
|
||||||
|
}
|
||||||
|
|
||||||
|
newColumn := &models.Column{
|
||||||
|
Name: name,
|
||||||
|
Type: dataType,
|
||||||
|
IsPrimaryKey: isPrimaryKey,
|
||||||
|
NotNull: isNotNull,
|
||||||
|
}
|
||||||
|
table.UpdateDate()
|
||||||
|
table.Columns[name] = newColumn
|
||||||
|
return newColumn
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateColumn updates an existing column's properties
|
||||||
|
func (se *SchemaEditor) UpdateColumn(schemaIndex, tableIndex int, oldName, newName, dataType string, isPrimaryKey, isNotNull bool, defaultValue interface{}, description string) bool {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
column, exists := table.Columns[oldName]
|
||||||
|
if !exists {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
table.UpdateDate()
|
||||||
|
|
||||||
|
// If name changed, remove old entry and create new one
|
||||||
|
if oldName != newName {
|
||||||
|
delete(table.Columns, oldName)
|
||||||
|
column.Name = newName
|
||||||
|
table.Columns[newName] = column
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update properties
|
||||||
|
column.Type = dataType
|
||||||
|
column.IsPrimaryKey = isPrimaryKey
|
||||||
|
column.NotNull = isNotNull
|
||||||
|
column.Default = defaultValue
|
||||||
|
column.Description = description
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteColumn removes a column from a table
|
||||||
|
func (se *SchemaEditor) DeleteColumn(schemaIndex, tableIndex int, columnName string) bool {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := table.Columns[columnName]; !exists {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
table.UpdateDate()
|
||||||
|
|
||||||
|
delete(table.Columns, columnName)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetColumn returns a column by name
|
||||||
|
func (se *SchemaEditor) GetColumn(schemaIndex, tableIndex int, columnName string) *models.Column {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return table.Columns[columnName]
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAllColumns returns all columns in a table
|
||||||
|
func (se *SchemaEditor) GetAllColumns(schemaIndex, tableIndex int) map[string]*models.Column {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return table.Columns
|
||||||
|
}
|
||||||
214
pkg/ui/column_screens.go
Normal file
214
pkg/ui/column_screens.go
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// showColumnEditor shows editor for a specific column
|
||||||
|
func (se *SchemaEditor) showColumnEditor(schemaIndex, tableIndex, colIndex int, column *models.Column) {
|
||||||
|
form := tview.NewForm()
|
||||||
|
|
||||||
|
// Store original name to handle renames
|
||||||
|
originalName := column.Name
|
||||||
|
|
||||||
|
// Local variables to collect changes
|
||||||
|
newName := column.Name
|
||||||
|
newType := column.Type
|
||||||
|
newIsPK := column.IsPrimaryKey
|
||||||
|
newIsNotNull := column.NotNull
|
||||||
|
newDefault := column.Default
|
||||||
|
newDescription := column.Description
|
||||||
|
newGUID := column.GUID
|
||||||
|
|
||||||
|
// Column type options: PostgreSQL, MySQL, SQL Server, and common SQL types
|
||||||
|
columnTypes := []string{
|
||||||
|
// Numeric Types
|
||||||
|
"SMALLINT", "INTEGER", "BIGINT", "INT", "TINYINT", "FLOAT", "REAL", "DOUBLE PRECISION",
|
||||||
|
"DECIMAL(10,2)", "NUMERIC", "DECIMAL", "NUMERIC(10,2)",
|
||||||
|
// Character Types
|
||||||
|
"CHAR", "VARCHAR", "VARCHAR(255)", "TEXT", "NCHAR", "NVARCHAR", "NVARCHAR(255)",
|
||||||
|
// Boolean
|
||||||
|
"BOOLEAN", "BOOL", "BIT",
|
||||||
|
// Date/Time Types
|
||||||
|
"DATE", "TIME", "TIMESTAMP", "TIMESTAMP WITH TIME ZONE", "INTERVAL",
|
||||||
|
"DATETIME", "DATETIME2", "DATEFIRST",
|
||||||
|
// UUID and JSON
|
||||||
|
"UUID", "GUID", "JSON", "JSONB",
|
||||||
|
// Binary Types
|
||||||
|
"BYTEA", "BLOB", "IMAGE", "VARBINARY", "VARBINARY(MAX)", "BINARY",
|
||||||
|
// PostgreSQL Special Types
|
||||||
|
"int4range", "int8range", "numrange", "tsrange", "tstzrange", "daterange",
|
||||||
|
"HSTORE", "CITEXT", "INET", "MACADDR", "POINT", "LINE", "LSEG", "BOX", "PATH", "POLYGON", "CIRCLE",
|
||||||
|
// Array Types
|
||||||
|
"INTEGER ARRAY", "VARCHAR ARRAY", "TEXT ARRAY", "BIGINT ARRAY",
|
||||||
|
// MySQL Specific
|
||||||
|
"MEDIUMINT", "DOUBLE", "FLOAT(10,2)",
|
||||||
|
// SQL Server Specific
|
||||||
|
"MONEY", "SMALLMONEY", "SQL_VARIANT",
|
||||||
|
}
|
||||||
|
selectedTypeIndex := 0
|
||||||
|
|
||||||
|
// Add existing type if not already in the list
|
||||||
|
typeExists := false
|
||||||
|
for i, opt := range columnTypes {
|
||||||
|
if opt == column.Type {
|
||||||
|
selectedTypeIndex = i
|
||||||
|
typeExists = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !typeExists && column.Type != "" {
|
||||||
|
columnTypes = append(columnTypes, column.Type)
|
||||||
|
selectedTypeIndex = len(columnTypes) - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
form.AddInputField("Column Name", column.Name, 40, nil, func(value string) {
|
||||||
|
newName = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddDropDown("Type", columnTypes, selectedTypeIndex, func(option string, index int) {
|
||||||
|
newType = option
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Primary Key", column.IsPrimaryKey, func(checked bool) {
|
||||||
|
newIsPK = checked
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Not Null", column.NotNull, func(checked bool) {
|
||||||
|
newIsNotNull = checked
|
||||||
|
})
|
||||||
|
|
||||||
|
defaultStr := ""
|
||||||
|
if column.Default != nil {
|
||||||
|
defaultStr = fmt.Sprintf("%v", column.Default)
|
||||||
|
}
|
||||||
|
form.AddInputField("Default Value", defaultStr, 40, nil, func(value string) {
|
||||||
|
newDefault = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddTextArea("Description", column.Description, 40, 5, 0, func(value string) {
|
||||||
|
newDescription = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("GUID", column.GUID, 40, nil, func(value string) {
|
||||||
|
newGUID = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Save", func() {
|
||||||
|
// Apply changes using dataops
|
||||||
|
se.UpdateColumn(schemaIndex, tableIndex, originalName, newName, newType, newIsPK, newIsNotNull, newDefault, newDescription)
|
||||||
|
se.db.Schemas[schemaIndex].Tables[tableIndex].Columns[newName].GUID = newGUID
|
||||||
|
|
||||||
|
se.pages.RemovePage("column-editor")
|
||||||
|
se.pages.SwitchToPage("table-editor")
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Delete", func() {
|
||||||
|
se.showDeleteColumnConfirm(schemaIndex, tableIndex, originalName)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back", func() {
|
||||||
|
// Discard changes - don't apply them
|
||||||
|
se.pages.RemovePage("column-editor")
|
||||||
|
se.pages.SwitchToPage("table-editor")
|
||||||
|
})
|
||||||
|
|
||||||
|
form.SetBorder(true).SetTitle(" Edit Column ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.showExitConfirmation("column-editor", "table-editor")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("column-editor", form, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showNewColumnDialog shows dialog to create a new column
|
||||||
|
func (se *SchemaEditor) showNewColumnDialog(schemaIndex, tableIndex int) {
|
||||||
|
form := tview.NewForm()
|
||||||
|
|
||||||
|
columnName := ""
|
||||||
|
dataType := "VARCHAR(255)"
|
||||||
|
|
||||||
|
// Column type options: PostgreSQL, MySQL, SQL Server, and common SQL types
|
||||||
|
columnTypes := []string{
|
||||||
|
// Numeric Types
|
||||||
|
"SMALLINT", "INTEGER", "BIGINT", "INT", "TINYINT", "FLOAT", "REAL", "DOUBLE PRECISION",
|
||||||
|
"DECIMAL(10,2)", "NUMERIC", "DECIMAL", "NUMERIC(10,2)",
|
||||||
|
// Character Types
|
||||||
|
"CHAR", "VARCHAR", "VARCHAR(255)", "TEXT", "NCHAR", "NVARCHAR", "NVARCHAR(255)",
|
||||||
|
// Boolean
|
||||||
|
"BOOLEAN", "BOOL", "BIT",
|
||||||
|
// Date/Time Types
|
||||||
|
"DATE", "TIME", "TIMESTAMP", "TIMESTAMP WITH TIME ZONE", "INTERVAL",
|
||||||
|
"DATETIME", "DATETIME2", "DATEFIRST",
|
||||||
|
// UUID and JSON
|
||||||
|
"UUID", "GUID", "JSON", "JSONB",
|
||||||
|
// Binary Types
|
||||||
|
"BYTEA", "BLOB", "IMAGE", "VARBINARY", "VARBINARY(MAX)", "BINARY",
|
||||||
|
// PostgreSQL Special Types
|
||||||
|
"int4range", "int8range", "numrange", "tsrange", "tstzrange", "daterange",
|
||||||
|
"HSTORE", "CITEXT", "INET", "MACADDR", "POINT", "LINE", "LSEG", "BOX", "PATH", "POLYGON", "CIRCLE",
|
||||||
|
// Array Types
|
||||||
|
"INTEGER ARRAY", "VARCHAR ARRAY", "TEXT ARRAY", "BIGINT ARRAY",
|
||||||
|
// MySQL Specific
|
||||||
|
"MEDIUMINT", "DOUBLE", "FLOAT(10,2)",
|
||||||
|
// SQL Server Specific
|
||||||
|
"MONEY", "SMALLMONEY", "SQL_VARIANT",
|
||||||
|
}
|
||||||
|
selectedTypeIndex := 0
|
||||||
|
|
||||||
|
form.AddInputField("Column Name", "", 40, nil, func(value string) {
|
||||||
|
columnName = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddDropDown("Data Type", columnTypes, selectedTypeIndex, func(option string, index int) {
|
||||||
|
dataType = option
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Primary Key", false, nil)
|
||||||
|
form.AddCheckbox("Not Null", false, nil)
|
||||||
|
form.AddCheckbox("Unique", false, nil)
|
||||||
|
|
||||||
|
form.AddButton("Save", func() {
|
||||||
|
if columnName == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get form values
|
||||||
|
isPK := form.GetFormItemByLabel("Primary Key").(*tview.Checkbox).IsChecked()
|
||||||
|
isNotNull := form.GetFormItemByLabel("Not Null").(*tview.Checkbox).IsChecked()
|
||||||
|
|
||||||
|
se.CreateColumn(schemaIndex, tableIndex, columnName, dataType, isPK, isNotNull)
|
||||||
|
|
||||||
|
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||||
|
se.pages.RemovePage("new-column")
|
||||||
|
se.pages.RemovePage("table-editor")
|
||||||
|
se.showTableEditor(schemaIndex, tableIndex, table)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back", func() {
|
||||||
|
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||||
|
se.pages.RemovePage("new-column")
|
||||||
|
se.pages.RemovePage("table-editor")
|
||||||
|
se.showTableEditor(schemaIndex, tableIndex, table)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.SetBorder(true).SetTitle(" New Column ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.showExitConfirmation("new-column", "table-editor")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("new-column", form, true, true)
|
||||||
|
}
|
||||||
15
pkg/ui/database_dataops.go
Normal file
15
pkg/ui/database_dataops.go
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// updateDatabase updates database properties
|
||||||
|
func (se *SchemaEditor) updateDatabase(name, description, comment, dbType, dbVersion string) {
|
||||||
|
se.db.Name = name
|
||||||
|
se.db.Description = description
|
||||||
|
se.db.Comment = comment
|
||||||
|
se.db.DatabaseType = models.DatabaseType(dbType)
|
||||||
|
se.db.DatabaseVersion = dbVersion
|
||||||
|
se.db.UpdateDate()
|
||||||
|
}
|
||||||
78
pkg/ui/database_screens.go
Normal file
78
pkg/ui/database_screens.go
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
)
|
||||||
|
|
||||||
|
// showEditDatabaseForm displays a dialog to edit database properties
|
||||||
|
func (se *SchemaEditor) showEditDatabaseForm() {
|
||||||
|
form := tview.NewForm()
|
||||||
|
|
||||||
|
dbName := se.db.Name
|
||||||
|
dbDescription := se.db.Description
|
||||||
|
dbComment := se.db.Comment
|
||||||
|
dbType := string(se.db.DatabaseType)
|
||||||
|
dbVersion := se.db.DatabaseVersion
|
||||||
|
dbGUID := se.db.GUID
|
||||||
|
|
||||||
|
// Database type options
|
||||||
|
dbTypeOptions := []string{"pgsql", "mssql", "sqlite"}
|
||||||
|
selectedTypeIndex := 0
|
||||||
|
for i, opt := range dbTypeOptions {
|
||||||
|
if opt == dbType {
|
||||||
|
selectedTypeIndex = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
form.AddInputField("Database Name", dbName, 40, nil, func(value string) {
|
||||||
|
dbName = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Description", dbDescription, 50, nil, func(value string) {
|
||||||
|
dbDescription = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Comment", dbComment, 50, nil, func(value string) {
|
||||||
|
dbComment = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddDropDown("Database Type", dbTypeOptions, selectedTypeIndex, func(option string, index int) {
|
||||||
|
dbType = option
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Database Version", dbVersion, 20, nil, func(value string) {
|
||||||
|
dbVersion = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("GUID", dbGUID, 40, nil, func(value string) {
|
||||||
|
dbGUID = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Save", func() {
|
||||||
|
if dbName == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
se.updateDatabase(dbName, dbDescription, dbComment, dbType, dbVersion)
|
||||||
|
se.db.GUID = dbGUID
|
||||||
|
se.pages.RemovePage("edit-database")
|
||||||
|
se.pages.RemovePage("main")
|
||||||
|
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back", func() {
|
||||||
|
se.pages.RemovePage("edit-database")
|
||||||
|
})
|
||||||
|
|
||||||
|
form.SetBorder(true).SetTitle(" Edit Database ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.showExitConfirmation("edit-database", "main")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("edit-database", form, true, true)
|
||||||
|
}
|
||||||
139
pkg/ui/dialogs.go
Normal file
139
pkg/ui/dialogs.go
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
)
|
||||||
|
|
||||||
|
// showExitConfirmation shows a confirmation dialog when trying to exit without saving
|
||||||
|
func (se *SchemaEditor) showExitConfirmation(pageToRemove, pageToSwitchTo string) {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText("Exit without saving changes?").
|
||||||
|
AddButtons([]string{"Cancel", "No, exit without saving"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "No, exit without saving" {
|
||||||
|
se.pages.RemovePage(pageToRemove)
|
||||||
|
se.pages.SwitchToPage(pageToSwitchTo)
|
||||||
|
}
|
||||||
|
se.pages.RemovePage("exit-confirm")
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("exit-confirm")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("exit-confirm", modal, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showExitEditorConfirm shows confirmation dialog when trying to exit the entire editor
|
||||||
|
func (se *SchemaEditor) showExitEditorConfirm() {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText("Exit RelSpec Editor? Press ESC again to confirm.").
|
||||||
|
AddButtons([]string{"Cancel", "Exit"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Exit" {
|
||||||
|
se.app.Stop()
|
||||||
|
}
|
||||||
|
se.pages.RemovePage("exit-editor-confirm")
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.app.Stop()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("exit-editor-confirm", modal, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showDeleteSchemaConfirm shows confirmation dialog for schema deletion
|
||||||
|
func (se *SchemaEditor) showDeleteSchemaConfirm(schemaIndex int) {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(fmt.Sprintf("Delete schema '%s'? This will delete all tables in this schema.",
|
||||||
|
se.db.Schemas[schemaIndex].Name)).
|
||||||
|
AddButtons([]string{"Cancel", "Delete"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Delete" {
|
||||||
|
se.DeleteSchema(schemaIndex)
|
||||||
|
se.pages.RemovePage("schema-editor")
|
||||||
|
se.pages.RemovePage("schemas")
|
||||||
|
se.showSchemaList()
|
||||||
|
}
|
||||||
|
se.pages.RemovePage("confirm-delete-schema")
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("confirm-delete-schema")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("confirm-delete-schema", modal, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showDeleteTableConfirm shows confirmation dialog for table deletion
|
||||||
|
func (se *SchemaEditor) showDeleteTableConfirm(schemaIndex, tableIndex int) {
|
||||||
|
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(fmt.Sprintf("Delete table '%s'? This action cannot be undone.",
|
||||||
|
table.Name)).
|
||||||
|
AddButtons([]string{"Cancel", "Delete"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Delete" {
|
||||||
|
se.DeleteTable(schemaIndex, tableIndex)
|
||||||
|
schema := se.db.Schemas[schemaIndex]
|
||||||
|
se.pages.RemovePage("table-editor")
|
||||||
|
se.pages.RemovePage("schema-editor")
|
||||||
|
se.showSchemaEditor(schemaIndex, schema)
|
||||||
|
}
|
||||||
|
se.pages.RemovePage("confirm-delete-table")
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("confirm-delete-table")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("confirm-delete-table", modal, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showDeleteColumnConfirm shows confirmation dialog for column deletion
|
||||||
|
func (se *SchemaEditor) showDeleteColumnConfirm(schemaIndex, tableIndex int, columnName string) {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(fmt.Sprintf("Delete column '%s'? This action cannot be undone.",
|
||||||
|
columnName)).
|
||||||
|
AddButtons([]string{"Cancel", "Delete"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Delete" {
|
||||||
|
se.DeleteColumn(schemaIndex, tableIndex, columnName)
|
||||||
|
se.pages.RemovePage("column-editor")
|
||||||
|
se.pages.RemovePage("confirm-delete-column")
|
||||||
|
se.pages.SwitchToPage("table-editor")
|
||||||
|
} else {
|
||||||
|
se.pages.RemovePage("confirm-delete-column")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("confirm-delete-column")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("confirm-delete-column", modal, true, true)
|
||||||
|
}
|
||||||
57
pkg/ui/doc.go
Normal file
57
pkg/ui/doc.go
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
// Package ui provides an interactive terminal user interface (TUI) for editing database schemas.
|
||||||
|
//
|
||||||
|
// # Overview
|
||||||
|
//
|
||||||
|
// The ui package implements a full-featured terminal-based schema editor using tview,
|
||||||
|
// allowing users to visually create, modify, and manage database schemas without writing
|
||||||
|
// code or SQL.
|
||||||
|
//
|
||||||
|
// # Features
|
||||||
|
//
|
||||||
|
// The schema editor supports:
|
||||||
|
// - Database management: Edit name, description, and properties
|
||||||
|
// - Schema management: Create, edit, delete schemas
|
||||||
|
// - Table management: Create, edit, delete tables
|
||||||
|
// - Column management: Add, modify, delete columns with full property support
|
||||||
|
// - Relationship management: Define and edit table relationships
|
||||||
|
// - Domain management: Organize tables into logical domains
|
||||||
|
// - Import & merge: Combine schemas from multiple sources
|
||||||
|
// - Save: Export to any supported format
|
||||||
|
//
|
||||||
|
// # Architecture
|
||||||
|
//
|
||||||
|
// The package is organized into several components:
|
||||||
|
// - editor.go: Main editor and application lifecycle
|
||||||
|
// - *_screens.go: UI screens for each entity type
|
||||||
|
// - *_dataops.go: Business logic and data operations
|
||||||
|
// - dialogs.go: Reusable dialog components
|
||||||
|
// - load_save_screens.go: File I/O and format selection
|
||||||
|
// - main_menu.go: Primary navigation menu
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// editor := ui.NewSchemaEditor(database)
|
||||||
|
// if err := editor.Run(); err != nil {
|
||||||
|
// log.Fatal(err)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Or with pre-configured load/save options:
|
||||||
|
//
|
||||||
|
// editor := ui.NewSchemaEditorWithConfigs(database, loadConfig, saveConfig)
|
||||||
|
// if err := editor.Run(); err != nil {
|
||||||
|
// log.Fatal(err)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// # Navigation
|
||||||
|
//
|
||||||
|
// - Arrow keys: Navigate between items
|
||||||
|
// - Enter: Select/edit item
|
||||||
|
// - Tab/Shift+Tab: Navigate between buttons
|
||||||
|
// - Escape: Go back/cancel
|
||||||
|
// - Letter shortcuts: Quick actions (e.g., 'n' for new, 'e' for edit, 'd' for delete)
|
||||||
|
//
|
||||||
|
// # Integration
|
||||||
|
//
|
||||||
|
// The editor integrates with all readers and writers, supporting load/save operations
|
||||||
|
// for any format supported by RelSpec (DBML, PostgreSQL, GORM, Prisma, etc.).
|
||||||
|
package ui
|
||||||
35
pkg/ui/domain_dataops.go
Normal file
35
pkg/ui/domain_dataops.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// createDomain creates a new domain
|
||||||
|
func (se *SchemaEditor) createDomain(name, description string) {
|
||||||
|
domain := &models.Domain{
|
||||||
|
Name: name,
|
||||||
|
Description: description,
|
||||||
|
Tables: make([]*models.DomainTable, 0),
|
||||||
|
Sequence: uint(len(se.db.Domains)),
|
||||||
|
}
|
||||||
|
|
||||||
|
se.db.Domains = append(se.db.Domains, domain)
|
||||||
|
se.showDomainList()
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateDomain updates an existing domain
|
||||||
|
func (se *SchemaEditor) updateDomain(index int, name, description string) {
|
||||||
|
if index >= 0 && index < len(se.db.Domains) {
|
||||||
|
se.db.Domains[index].Name = name
|
||||||
|
se.db.Domains[index].Description = description
|
||||||
|
se.showDomainList()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// deleteDomain deletes a domain by index
|
||||||
|
func (se *SchemaEditor) deleteDomain(index int) {
|
||||||
|
if index >= 0 && index < len(se.db.Domains) {
|
||||||
|
se.db.Domains = append(se.db.Domains[:index], se.db.Domains[index+1:]...)
|
||||||
|
se.showDomainList()
|
||||||
|
}
|
||||||
|
}
|
||||||
258
pkg/ui/domain_screens.go
Normal file
258
pkg/ui/domain_screens.go
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// showDomainList displays the domain management screen
|
||||||
|
func (se *SchemaEditor) showDomainList() {
|
||||||
|
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||||
|
|
||||||
|
// Title
|
||||||
|
title := tview.NewTextView().
|
||||||
|
SetText("[::b]Manage Domains").
|
||||||
|
SetDynamicColors(true).
|
||||||
|
SetTextAlign(tview.AlignCenter)
|
||||||
|
|
||||||
|
// Create domains table
|
||||||
|
domainTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
|
||||||
|
|
||||||
|
// Add header row
|
||||||
|
headers := []string{"Name", "Sequence", "Total Tables", "Description"}
|
||||||
|
headerWidths := []int{20, 15, 20}
|
||||||
|
for i, header := range headers {
|
||||||
|
padding := ""
|
||||||
|
if i < len(headerWidths) {
|
||||||
|
padding = strings.Repeat(" ", headerWidths[i]-len(header))
|
||||||
|
}
|
||||||
|
cell := tview.NewTableCell(header + padding).
|
||||||
|
SetTextColor(tcell.ColorYellow).
|
||||||
|
SetSelectable(false).
|
||||||
|
SetAlign(tview.AlignLeft)
|
||||||
|
domainTable.SetCell(0, i, cell)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add existing domains
|
||||||
|
for row, domain := range se.db.Domains {
|
||||||
|
domain := domain // capture for closure
|
||||||
|
|
||||||
|
// Name - pad to 20 chars
|
||||||
|
nameStr := fmt.Sprintf("%-20s", domain.Name)
|
||||||
|
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
|
||||||
|
domainTable.SetCell(row+1, 0, nameCell)
|
||||||
|
|
||||||
|
// Sequence - pad to 15 chars
|
||||||
|
seqStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", domain.Sequence))
|
||||||
|
seqCell := tview.NewTableCell(seqStr).SetSelectable(true)
|
||||||
|
domainTable.SetCell(row+1, 1, seqCell)
|
||||||
|
|
||||||
|
// Total Tables - pad to 20 chars
|
||||||
|
tablesStr := fmt.Sprintf("%-20s", fmt.Sprintf("%d", len(domain.Tables)))
|
||||||
|
tablesCell := tview.NewTableCell(tablesStr).SetSelectable(true)
|
||||||
|
domainTable.SetCell(row+1, 2, tablesCell)
|
||||||
|
|
||||||
|
// Description - no padding, takes remaining space
|
||||||
|
descCell := tview.NewTableCell(domain.Description).SetSelectable(true)
|
||||||
|
domainTable.SetCell(row+1, 3, descCell)
|
||||||
|
}
|
||||||
|
|
||||||
|
domainTable.SetTitle(" Domains ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
|
||||||
|
|
||||||
|
// Action buttons flex
|
||||||
|
btnFlex := tview.NewFlex()
|
||||||
|
btnNewDomain := tview.NewButton("New Domain [n]").SetSelectedFunc(func() {
|
||||||
|
se.showNewDomainDialog()
|
||||||
|
})
|
||||||
|
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
})
|
||||||
|
|
||||||
|
// Set up button input captures for Tab/Shift+Tab navigation
|
||||||
|
btnNewDomain.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyBacktab {
|
||||||
|
se.app.SetFocus(domainTable)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(btnBack)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyBacktab {
|
||||||
|
se.app.SetFocus(btnNewDomain)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(domainTable)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
btnFlex.AddItem(btnNewDomain, 0, 1, true).
|
||||||
|
AddItem(btnBack, 0, 1, false)
|
||||||
|
|
||||||
|
domainTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(btnNewDomain)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyEnter {
|
||||||
|
row, _ := domainTable.GetSelection()
|
||||||
|
if row > 0 && row <= len(se.db.Domains) { // Skip header row
|
||||||
|
domainIndex := row - 1
|
||||||
|
se.showDomainEditor(domainIndex, se.db.Domains[domainIndex])
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if event.Rune() == 'n' {
|
||||||
|
se.showNewDomainDialog()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Rune() == 'b' {
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
flex.AddItem(title, 1, 0, false).
|
||||||
|
AddItem(domainTable, 0, 1, true).
|
||||||
|
AddItem(btnFlex, 1, 0, false)
|
||||||
|
|
||||||
|
se.pages.AddPage("domains", flex, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showNewDomainDialog displays a dialog to create a new domain
|
||||||
|
func (se *SchemaEditor) showNewDomainDialog() {
|
||||||
|
form := tview.NewForm()
|
||||||
|
|
||||||
|
domainName := ""
|
||||||
|
domainDesc := ""
|
||||||
|
|
||||||
|
form.AddInputField("Name", "", 40, nil, func(value string) {
|
||||||
|
domainName = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Description", "", 50, nil, func(value string) {
|
||||||
|
domainDesc = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Save", func() {
|
||||||
|
if domainName == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
se.createDomain(domainName, domainDesc)
|
||||||
|
se.pages.RemovePage("new-domain")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
se.showDomainList()
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back", func() {
|
||||||
|
se.pages.RemovePage("new-domain")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
se.showDomainList()
|
||||||
|
})
|
||||||
|
|
||||||
|
form.SetBorder(true).SetTitle(" New Domain ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.showExitConfirmation("new-domain", "domains")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("new-domain", form, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showDomainEditor displays a dialog to edit an existing domain
|
||||||
|
func (se *SchemaEditor) showDomainEditor(index int, domain *models.Domain) {
|
||||||
|
form := tview.NewForm()
|
||||||
|
|
||||||
|
domainName := domain.Name
|
||||||
|
domainDesc := domain.Description
|
||||||
|
|
||||||
|
form.AddInputField("Name", domainName, 40, nil, func(value string) {
|
||||||
|
domainName = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Description", domainDesc, 50, nil, func(value string) {
|
||||||
|
domainDesc = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Save", func() {
|
||||||
|
if domainName == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
se.updateDomain(index, domainName, domainDesc)
|
||||||
|
se.pages.RemovePage("edit-domain")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
se.showDomainList()
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Delete", func() {
|
||||||
|
se.showDeleteDomainConfirm(index)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back", func() {
|
||||||
|
se.pages.RemovePage("edit-domain")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
se.showDomainList()
|
||||||
|
})
|
||||||
|
|
||||||
|
form.SetBorder(true).SetTitle(" Edit Domain ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.showExitConfirmation("edit-domain", "domains")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("edit-domain", form, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showDeleteDomainConfirm shows a confirmation dialog before deleting a domain
|
||||||
|
func (se *SchemaEditor) showDeleteDomainConfirm(index int) {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(fmt.Sprintf("Delete domain '%s'? This action cannot be undone.", se.db.Domains[index].Name)).
|
||||||
|
AddButtons([]string{"Cancel", "Delete"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Delete" {
|
||||||
|
se.deleteDomain(index)
|
||||||
|
se.pages.RemovePage("delete-domain-confirm")
|
||||||
|
se.pages.RemovePage("edit-domain")
|
||||||
|
se.pages.RemovePage("domains")
|
||||||
|
se.showDomainList()
|
||||||
|
} else {
|
||||||
|
se.pages.RemovePage("delete-domain-confirm")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("delete-domain-confirm")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddAndSwitchToPage("delete-domain-confirm", modal, true)
|
||||||
|
}
|
||||||
73
pkg/ui/editor.go
Normal file
73
pkg/ui/editor.go
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SchemaEditor represents the interactive schema editor
|
||||||
|
type SchemaEditor struct {
|
||||||
|
db *models.Database
|
||||||
|
app *tview.Application
|
||||||
|
pages *tview.Pages
|
||||||
|
loadConfig *LoadConfig
|
||||||
|
saveConfig *SaveConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSchemaEditor creates a new schema editor
|
||||||
|
func NewSchemaEditor(db *models.Database) *SchemaEditor {
|
||||||
|
return &SchemaEditor{
|
||||||
|
db: db,
|
||||||
|
app: tview.NewApplication(),
|
||||||
|
pages: tview.NewPages(),
|
||||||
|
loadConfig: nil,
|
||||||
|
saveConfig: nil,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSchemaEditorWithConfigs creates a new schema editor with load/save configurations
|
||||||
|
func NewSchemaEditorWithConfigs(db *models.Database, loadConfig *LoadConfig, saveConfig *SaveConfig) *SchemaEditor {
|
||||||
|
return &SchemaEditor{
|
||||||
|
db: db,
|
||||||
|
app: tview.NewApplication(),
|
||||||
|
pages: tview.NewPages(),
|
||||||
|
loadConfig: loadConfig,
|
||||||
|
saveConfig: saveConfig,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run starts the interactive editor
|
||||||
|
func (se *SchemaEditor) Run() error {
|
||||||
|
// If no database is loaded, show load screen
|
||||||
|
if se.db == nil {
|
||||||
|
se.showLoadScreen()
|
||||||
|
} else {
|
||||||
|
// Create main menu view
|
||||||
|
mainMenu := se.createMainMenu()
|
||||||
|
se.pages.AddPage("main", mainMenu, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the application
|
||||||
|
if err := se.app.SetRoot(se.pages, true).Run(); err != nil {
|
||||||
|
return fmt.Errorf("application error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDatabase returns the current database
|
||||||
|
func (se *SchemaEditor) GetDatabase() *models.Database {
|
||||||
|
return se.db
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to get sorted column names
|
||||||
|
func getColumnNames(table *models.Table) []string {
|
||||||
|
names := make([]string, 0, len(table.Columns))
|
||||||
|
for name := range table.Columns {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
791
pkg/ui/load_save_screens.go
Normal file
791
pkg/ui/load_save_screens.go
Normal file
@@ -0,0 +1,791 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/merge"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
rbun "git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
rdbml "git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
rdctx "git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
rdrawdb "git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
rdrizzle "git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
rgorm "git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
rgraphql "git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
rjson "git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
rpgsql "git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
rprisma "git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
rtypeorm "git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
ryaml "git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||||
|
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||||
|
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||||
|
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
|
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
|
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LoadConfig holds the configuration for loading a database
|
||||||
|
type LoadConfig struct {
|
||||||
|
SourceType string
|
||||||
|
FilePath string
|
||||||
|
ConnString string
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveConfig holds the configuration for saving a database
|
||||||
|
type SaveConfig struct {
|
||||||
|
TargetType string
|
||||||
|
FilePath string
|
||||||
|
ConnString string
|
||||||
|
}
|
||||||
|
|
||||||
|
// showLoadScreen displays the database load screen
|
||||||
|
func (se *SchemaEditor) showLoadScreen() {
|
||||||
|
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||||
|
|
||||||
|
// Title
|
||||||
|
title := tview.NewTextView().
|
||||||
|
SetText("[::b]Load Database Schema").
|
||||||
|
SetTextAlign(tview.AlignCenter).
|
||||||
|
SetDynamicColors(true)
|
||||||
|
|
||||||
|
// Form
|
||||||
|
form := tview.NewForm()
|
||||||
|
form.SetBorder(true).SetTitle(" Load Configuration ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
|
||||||
|
// Format selection
|
||||||
|
formatOptions := []string{
|
||||||
|
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
|
||||||
|
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
|
||||||
|
}
|
||||||
|
selectedFormat := 0
|
||||||
|
currentFormat := formatOptions[selectedFormat]
|
||||||
|
|
||||||
|
// File path input
|
||||||
|
filePath := ""
|
||||||
|
connString := ""
|
||||||
|
|
||||||
|
form.AddDropDown("Format", formatOptions, 0, func(option string, index int) {
|
||||||
|
selectedFormat = index
|
||||||
|
currentFormat = option
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("File Path", "", 50, nil, func(value string) {
|
||||||
|
filePath = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Connection String", "", 50, nil, func(value string) {
|
||||||
|
connString = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddTextView("Help", getLoadHelpText(), 0, 5, true, false)
|
||||||
|
|
||||||
|
// Buttons
|
||||||
|
form.AddButton("Load [l]", func() {
|
||||||
|
se.loadDatabase(currentFormat, filePath, connString)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Create New [n]", func() {
|
||||||
|
se.createNewDatabase()
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Exit [q]", func() {
|
||||||
|
se.app.Stop()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Keyboard shortcuts
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.app.Stop()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch event.Rune() {
|
||||||
|
case 'l':
|
||||||
|
se.loadDatabase(currentFormat, filePath, connString)
|
||||||
|
return nil
|
||||||
|
case 'n':
|
||||||
|
se.createNewDatabase()
|
||||||
|
return nil
|
||||||
|
case 'q':
|
||||||
|
se.app.Stop()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tab navigation
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.app.Stop()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Rune() == 'l' || event.Rune() == 'n' || event.Rune() == 'q' {
|
||||||
|
return event
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
flex.AddItem(title, 1, 0, false).
|
||||||
|
AddItem(form, 0, 1, true)
|
||||||
|
|
||||||
|
se.pages.AddAndSwitchToPage("load-database", flex, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showSaveScreen displays the save database screen
|
||||||
|
func (se *SchemaEditor) showSaveScreen() {
|
||||||
|
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||||
|
|
||||||
|
// Title
|
||||||
|
title := tview.NewTextView().
|
||||||
|
SetText("[::b]Save Database Schema").
|
||||||
|
SetTextAlign(tview.AlignCenter).
|
||||||
|
SetDynamicColors(true)
|
||||||
|
|
||||||
|
// Form
|
||||||
|
form := tview.NewForm()
|
||||||
|
form.SetBorder(true).SetTitle(" Save Configuration ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
|
||||||
|
// Format selection
|
||||||
|
formatOptions := []string{
|
||||||
|
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
|
||||||
|
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
|
||||||
|
}
|
||||||
|
selectedFormat := 0
|
||||||
|
currentFormat := formatOptions[selectedFormat]
|
||||||
|
|
||||||
|
// File path input
|
||||||
|
filePath := ""
|
||||||
|
if se.saveConfig != nil {
|
||||||
|
// Pre-populate with existing save config
|
||||||
|
for i, format := range formatOptions {
|
||||||
|
if format == se.saveConfig.TargetType {
|
||||||
|
selectedFormat = i
|
||||||
|
currentFormat = format
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
filePath = se.saveConfig.FilePath
|
||||||
|
}
|
||||||
|
|
||||||
|
form.AddDropDown("Format", formatOptions, selectedFormat, func(option string, index int) {
|
||||||
|
selectedFormat = index
|
||||||
|
currentFormat = option
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("File Path", filePath, 50, nil, func(value string) {
|
||||||
|
filePath = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddTextView("Help", getSaveHelpText(), 0, 5, true, false)
|
||||||
|
|
||||||
|
// Buttons
|
||||||
|
form.AddButton("Save [s]", func() {
|
||||||
|
se.saveDatabase(currentFormat, filePath)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Update Existing Database [u]", func() {
|
||||||
|
// Use saveConfig if available, otherwise use loadConfig
|
||||||
|
if se.saveConfig != nil {
|
||||||
|
se.showUpdateExistingDatabaseConfirm()
|
||||||
|
} else if se.loadConfig != nil {
|
||||||
|
se.showUpdateExistingDatabaseConfirm()
|
||||||
|
} else {
|
||||||
|
se.showErrorDialog("Error", "No database source found. Use Save instead.")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back [b]", func() {
|
||||||
|
se.pages.RemovePage("save-database")
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
})
|
||||||
|
|
||||||
|
// Keyboard shortcuts
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("save-database")
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch event.Rune() {
|
||||||
|
case 's':
|
||||||
|
se.saveDatabase(currentFormat, filePath)
|
||||||
|
return nil
|
||||||
|
case 'u':
|
||||||
|
// Use saveConfig if available, otherwise use loadConfig
|
||||||
|
if se.saveConfig != nil {
|
||||||
|
se.showUpdateExistingDatabaseConfirm()
|
||||||
|
} else if se.loadConfig != nil {
|
||||||
|
se.showUpdateExistingDatabaseConfirm()
|
||||||
|
} else {
|
||||||
|
se.showErrorDialog("Error", "No database source found. Use Save instead.")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case 'b':
|
||||||
|
se.pages.RemovePage("save-database")
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
flex.AddItem(title, 1, 0, false).
|
||||||
|
AddItem(form, 0, 1, true)
|
||||||
|
|
||||||
|
se.pages.AddAndSwitchToPage("save-database", flex, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadDatabase loads a database from the specified configuration
|
||||||
|
func (se *SchemaEditor) loadDatabase(format, filePath, connString string) {
|
||||||
|
// Validate input
|
||||||
|
if format == "pgsql" {
|
||||||
|
if connString == "" {
|
||||||
|
se.showErrorDialog("Error", "Connection string is required for PostgreSQL")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if filePath == "" {
|
||||||
|
se.showErrorDialog("Error", "File path is required for "+format)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Expand home directory
|
||||||
|
if len(filePath) > 0 && filePath[0] == '~' {
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err == nil {
|
||||||
|
filePath = filepath.Join(home, filePath[1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
var reader readers.Reader
|
||||||
|
switch format {
|
||||||
|
case "dbml":
|
||||||
|
reader = rdbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "dctx":
|
||||||
|
reader = rdctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drawdb":
|
||||||
|
reader = rdrawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "graphql":
|
||||||
|
reader = rgraphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "json":
|
||||||
|
reader = rjson.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "yaml":
|
||||||
|
reader = ryaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "gorm":
|
||||||
|
reader = rgorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "bun":
|
||||||
|
reader = rbun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drizzle":
|
||||||
|
reader = rdrizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "prisma":
|
||||||
|
reader = rprisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "typeorm":
|
||||||
|
reader = rtypeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "pgsql":
|
||||||
|
reader = rpgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
default:
|
||||||
|
se.showErrorDialog("Error", "Unsupported format: "+format)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
se.showErrorDialog("Load Error", fmt.Sprintf("Failed to load database: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store load config
|
||||||
|
se.loadConfig = &LoadConfig{
|
||||||
|
SourceType: format,
|
||||||
|
FilePath: filePath,
|
||||||
|
ConnString: connString,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update database
|
||||||
|
se.db = db
|
||||||
|
|
||||||
|
// Show success and switch to main menu
|
||||||
|
se.showSuccessDialog("Load Complete", fmt.Sprintf("Successfully loaded database '%s'", db.Name), func() {
|
||||||
|
se.pages.RemovePage("load-database")
|
||||||
|
se.pages.RemovePage("main")
|
||||||
|
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// saveDatabase saves the database to the specified configuration
|
||||||
|
func (se *SchemaEditor) saveDatabase(format, filePath string) {
|
||||||
|
// Validate input
|
||||||
|
if format == "pgsql" {
|
||||||
|
se.showErrorDialog("Error", "Direct PostgreSQL save is not supported from the UI. Use --to pgsql --to-path output.sql")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if filePath == "" {
|
||||||
|
se.showErrorDialog("Error", "File path is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expand home directory
|
||||||
|
if len(filePath) > 0 && filePath[0] == '~' {
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err == nil {
|
||||||
|
filePath = filepath.Join(home, filePath[1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create writer
|
||||||
|
var writer writers.Writer
|
||||||
|
switch format {
|
||||||
|
case "dbml":
|
||||||
|
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "dctx":
|
||||||
|
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "drawdb":
|
||||||
|
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "graphql":
|
||||||
|
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "json":
|
||||||
|
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "yaml":
|
||||||
|
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "gorm":
|
||||||
|
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "bun":
|
||||||
|
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "drizzle":
|
||||||
|
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "prisma":
|
||||||
|
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "typeorm":
|
||||||
|
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
case "pgsql":
|
||||||
|
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||||
|
default:
|
||||||
|
se.showErrorDialog("Error", "Unsupported format: "+format)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write database
|
||||||
|
err := writer.WriteDatabase(se.db)
|
||||||
|
if err != nil {
|
||||||
|
se.showErrorDialog("Save Error", fmt.Sprintf("Failed to save database: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store save config
|
||||||
|
se.saveConfig = &SaveConfig{
|
||||||
|
TargetType: format,
|
||||||
|
FilePath: filePath,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show success
|
||||||
|
se.showSuccessDialog("Save Complete", fmt.Sprintf("Successfully saved database to %s", filePath), func() {
|
||||||
|
se.pages.RemovePage("save-database")
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// createNewDatabase creates a new empty database
|
||||||
|
func (se *SchemaEditor) createNewDatabase() {
|
||||||
|
// Create a new empty database
|
||||||
|
se.db = &models.Database{
|
||||||
|
Name: "New Database",
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear load config
|
||||||
|
se.loadConfig = nil
|
||||||
|
|
||||||
|
// Show success and switch to main menu
|
||||||
|
se.showSuccessDialog("New Database", "Created new empty database", func() {
|
||||||
|
se.pages.RemovePage("load-database")
|
||||||
|
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// showErrorDialog displays an error dialog
|
||||||
|
func (se *SchemaEditor) showErrorDialog(_title, message string) {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(message).
|
||||||
|
AddButtons([]string{"OK"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
se.pages.RemovePage("error-dialog")
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("error-dialog")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("error-dialog", modal, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showSuccessDialog displays a success dialog
|
||||||
|
func (se *SchemaEditor) showSuccessDialog(_title, message string, onClose func()) {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(message).
|
||||||
|
AddButtons([]string{"OK"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
se.pages.RemovePage("success-dialog")
|
||||||
|
if onClose != nil {
|
||||||
|
onClose()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("success-dialog")
|
||||||
|
if onClose != nil {
|
||||||
|
onClose()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("success-dialog", modal, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getLoadHelpText returns the help text for the load screen
|
||||||
|
func getLoadHelpText() string {
|
||||||
|
return `File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm
|
||||||
|
Database formats: pgsql (requires connection string)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- File path: ~/schemas/mydb.dbml or /path/to/schema.json
|
||||||
|
- Connection: postgres://user:pass@localhost/dbname`
|
||||||
|
}
|
||||||
|
|
||||||
|
// showUpdateExistingDatabaseConfirm displays a confirmation dialog before updating existing database
|
||||||
|
func (se *SchemaEditor) showUpdateExistingDatabaseConfirm() {
|
||||||
|
// Use saveConfig if available, otherwise use loadConfig
|
||||||
|
var targetType, targetPath string
|
||||||
|
if se.saveConfig != nil {
|
||||||
|
targetType = se.saveConfig.TargetType
|
||||||
|
targetPath = se.saveConfig.FilePath
|
||||||
|
} else if se.loadConfig != nil {
|
||||||
|
targetType = se.loadConfig.SourceType
|
||||||
|
targetPath = se.loadConfig.FilePath
|
||||||
|
} else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
confirmText := fmt.Sprintf("Update existing database?\n\nFormat: %s\nPath: %s\n\nThis will overwrite the source.",
|
||||||
|
targetType, targetPath)
|
||||||
|
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(confirmText).
|
||||||
|
AddButtons([]string{"Cancel", "Update"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Update" {
|
||||||
|
se.pages.RemovePage("update-confirm")
|
||||||
|
se.pages.RemovePage("save-database")
|
||||||
|
se.saveDatabase(targetType, targetPath)
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
} else {
|
||||||
|
se.pages.RemovePage("update-confirm")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("update-confirm")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddAndSwitchToPage("update-confirm", modal, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSaveHelpText returns the help text for the save screen
|
||||||
|
func getSaveHelpText() string {
|
||||||
|
return `File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql (SQL export)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- File: ~/schemas/mydb.dbml
|
||||||
|
- Directory (for code formats): ./models/`
|
||||||
|
}
|
||||||
|
|
||||||
|
// showImportScreen displays the import/merge database screen
|
||||||
|
func (se *SchemaEditor) showImportScreen() {
|
||||||
|
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||||
|
|
||||||
|
// Title
|
||||||
|
title := tview.NewTextView().
|
||||||
|
SetText("[::b]Import & Merge Database Schema").
|
||||||
|
SetTextAlign(tview.AlignCenter).
|
||||||
|
SetDynamicColors(true)
|
||||||
|
|
||||||
|
// Form
|
||||||
|
form := tview.NewForm()
|
||||||
|
form.SetBorder(true).SetTitle(" Import Configuration ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
|
||||||
|
// Format selection
|
||||||
|
formatOptions := []string{
|
||||||
|
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
|
||||||
|
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
|
||||||
|
}
|
||||||
|
selectedFormat := 0
|
||||||
|
currentFormat := formatOptions[selectedFormat]
|
||||||
|
|
||||||
|
// File path input
|
||||||
|
filePath := ""
|
||||||
|
connString := ""
|
||||||
|
skipDomains := false
|
||||||
|
skipRelations := false
|
||||||
|
skipEnums := false
|
||||||
|
skipViews := false
|
||||||
|
skipSequences := false
|
||||||
|
skipTables := ""
|
||||||
|
|
||||||
|
form.AddDropDown("Format", formatOptions, 0, func(option string, index int) {
|
||||||
|
selectedFormat = index
|
||||||
|
currentFormat = option
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("File Path", "", 50, nil, func(value string) {
|
||||||
|
filePath = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Connection String", "", 50, nil, func(value string) {
|
||||||
|
connString = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Skip Tables (comma-separated)", "", 50, nil, func(value string) {
|
||||||
|
skipTables = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Skip Domains", false, func(checked bool) {
|
||||||
|
skipDomains = checked
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Skip Relations", false, func(checked bool) {
|
||||||
|
skipRelations = checked
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Skip Enums", false, func(checked bool) {
|
||||||
|
skipEnums = checked
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Skip Views", false, func(checked bool) {
|
||||||
|
skipViews = checked
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddCheckbox("Skip Sequences", false, func(checked bool) {
|
||||||
|
skipSequences = checked
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddTextView("Help", getImportHelpText(), 0, 7, true, false)
|
||||||
|
|
||||||
|
// Buttons
|
||||||
|
form.AddButton("Import & Merge [i]", func() {
|
||||||
|
se.importAndMergeDatabase(currentFormat, filePath, connString, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back [b]", func() {
|
||||||
|
se.pages.RemovePage("import-database")
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Exit [q]", func() {
|
||||||
|
se.app.Stop()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Keyboard shortcuts
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("import-database")
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch event.Rune() {
|
||||||
|
case 'i':
|
||||||
|
se.importAndMergeDatabase(currentFormat, filePath, connString, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||||
|
return nil
|
||||||
|
case 'b':
|
||||||
|
se.pages.RemovePage("import-database")
|
||||||
|
se.pages.SwitchToPage("main")
|
||||||
|
return nil
|
||||||
|
case 'q':
|
||||||
|
se.app.Stop()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
flex.AddItem(title, 1, 0, false).
|
||||||
|
AddItem(form, 0, 1, true)
|
||||||
|
|
||||||
|
se.pages.AddAndSwitchToPage("import-database", flex, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// importAndMergeDatabase imports and merges a database from the specified configuration
|
||||||
|
func (se *SchemaEditor) importAndMergeDatabase(format, filePath, connString string, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
|
||||||
|
// Validate input
|
||||||
|
if format == "pgsql" {
|
||||||
|
if connString == "" {
|
||||||
|
se.showErrorDialog("Error", "Connection string is required for PostgreSQL")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if filePath == "" {
|
||||||
|
se.showErrorDialog("Error", "File path is required for "+format)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Expand home directory
|
||||||
|
if len(filePath) > 0 && filePath[0] == '~' {
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err == nil {
|
||||||
|
filePath = filepath.Join(home, filePath[1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
var reader readers.Reader
|
||||||
|
switch format {
|
||||||
|
case "dbml":
|
||||||
|
reader = rdbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "dctx":
|
||||||
|
reader = rdctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drawdb":
|
||||||
|
reader = rdrawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "graphql":
|
||||||
|
reader = rgraphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "json":
|
||||||
|
reader = rjson.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "yaml":
|
||||||
|
reader = ryaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "gorm":
|
||||||
|
reader = rgorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "bun":
|
||||||
|
reader = rbun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "drizzle":
|
||||||
|
reader = rdrizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "prisma":
|
||||||
|
reader = rprisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "typeorm":
|
||||||
|
reader = rtypeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
case "pgsql":
|
||||||
|
reader = rpgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
default:
|
||||||
|
se.showErrorDialog("Error", "Unsupported format: "+format)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the database to import
|
||||||
|
importDb, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
se.showErrorDialog("Import Error", fmt.Sprintf("Failed to read database: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show confirmation dialog
|
||||||
|
se.showImportConfirmation(importDb, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showImportConfirmation shows a confirmation dialog before merging
|
||||||
|
func (se *SchemaEditor) showImportConfirmation(importDb *models.Database, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
|
||||||
|
confirmText := fmt.Sprintf("Import & Merge Database?\n\nSource: %s\nTarget: %s\n\nThis will add missing schemas, tables, columns, and other objects from the source to your database.\n\nExisting items will NOT be modified.",
|
||||||
|
importDb.Name, se.db.Name)
|
||||||
|
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(confirmText).
|
||||||
|
AddButtons([]string{"Cancel", "Merge"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
se.pages.RemovePage("import-confirm")
|
||||||
|
if buttonLabel == "Merge" {
|
||||||
|
se.performMerge(importDb, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("import-confirm")
|
||||||
|
se.pages.SwitchToPage("import-database")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddAndSwitchToPage("import-confirm", modal, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// performMerge performs the actual merge operation
|
||||||
|
func (se *SchemaEditor) performMerge(importDb *models.Database, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
|
||||||
|
// Create merge options
|
||||||
|
opts := &merge.MergeOptions{
|
||||||
|
SkipDomains: skipDomains,
|
||||||
|
SkipRelations: skipRelations,
|
||||||
|
SkipEnums: skipEnums,
|
||||||
|
SkipViews: skipViews,
|
||||||
|
SkipSequences: skipSequences,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse skip tables
|
||||||
|
if skipTables != "" {
|
||||||
|
opts.SkipTableNames = parseSkipTablesUI(skipTables)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the merge
|
||||||
|
result := merge.MergeDatabases(se.db, importDb, opts)
|
||||||
|
|
||||||
|
// Update the database timestamp
|
||||||
|
se.db.UpdateDate()
|
||||||
|
|
||||||
|
// Show success dialog with summary
|
||||||
|
summary := merge.GetMergeSummary(result)
|
||||||
|
se.showSuccessDialog("Import Complete", summary, func() {
|
||||||
|
se.pages.RemovePage("import-database")
|
||||||
|
se.pages.RemovePage("main")
|
||||||
|
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// getImportHelpText returns the help text for the import screen
|
||||||
|
func getImportHelpText() string {
|
||||||
|
return `Import & Merge: Adds missing schemas, tables, columns, and other objects to your existing database.
|
||||||
|
|
||||||
|
File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm
|
||||||
|
Database formats: pgsql (requires connection string)
|
||||||
|
|
||||||
|
Skip options: Check to exclude specific object types from the merge.`
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseSkipTablesUI(skipTablesStr string) map[string]bool {
|
||||||
|
skipTables := make(map[string]bool)
|
||||||
|
if skipTablesStr == "" {
|
||||||
|
return skipTables
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split by comma and trim whitespace
|
||||||
|
parts := strings.Split(skipTablesStr, ",")
|
||||||
|
for _, part := range parts {
|
||||||
|
trimmed := strings.TrimSpace(part)
|
||||||
|
if trimmed != "" {
|
||||||
|
// Store in lowercase for case-insensitive matching
|
||||||
|
skipTables[strings.ToLower(trimmed)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return skipTables
|
||||||
|
}
|
||||||
65
pkg/ui/main_menu.go
Normal file
65
pkg/ui/main_menu.go
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
)
|
||||||
|
|
||||||
|
// createMainMenu creates the main menu screen
|
||||||
|
func (se *SchemaEditor) createMainMenu() tview.Primitive {
|
||||||
|
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||||
|
|
||||||
|
// Title with database name
|
||||||
|
dbName := se.db.Name
|
||||||
|
if dbName == "" {
|
||||||
|
dbName = "Untitled"
|
||||||
|
}
|
||||||
|
updateAtStr := ""
|
||||||
|
if se.db.UpdatedAt != "" {
|
||||||
|
updateAtStr = fmt.Sprintf("Updated @ %s", se.db.UpdatedAt)
|
||||||
|
}
|
||||||
|
titleText := fmt.Sprintf("[::b]RelSpec Schema Editor\n[::d]Database: %s %s\n[::d]Press arrow keys to navigate, Enter to select", dbName, updateAtStr)
|
||||||
|
title := tview.NewTextView().
|
||||||
|
SetText(titleText).
|
||||||
|
SetDynamicColors(true)
|
||||||
|
|
||||||
|
// Menu options
|
||||||
|
menu := tview.NewList().
|
||||||
|
AddItem("Edit Database", "Edit database name, description, and properties", 'e', func() {
|
||||||
|
se.showEditDatabaseForm()
|
||||||
|
}).
|
||||||
|
AddItem("Manage Schemas", "View, create, edit, and delete schemas", 's', func() {
|
||||||
|
se.showSchemaList()
|
||||||
|
}).
|
||||||
|
AddItem("Manage Tables", "View and manage tables in schemas", 't', func() {
|
||||||
|
se.showTableList()
|
||||||
|
}).
|
||||||
|
AddItem("Manage Domains", "View, create, edit, and delete domains", 'd', func() {
|
||||||
|
se.showDomainList()
|
||||||
|
}).
|
||||||
|
AddItem("Import & Merge", "Import and merge schema from another database", 'i', func() {
|
||||||
|
se.showImportScreen()
|
||||||
|
}).
|
||||||
|
AddItem("Save Database", "Save database to file or database", 'w', func() {
|
||||||
|
se.showSaveScreen()
|
||||||
|
}).
|
||||||
|
AddItem("Exit Editor", "Exit the editor", 'q', func() {
|
||||||
|
se.app.Stop()
|
||||||
|
})
|
||||||
|
|
||||||
|
menu.SetBorder(true).SetTitle(" Menu ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
menu.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.showExitEditorConfirm()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
flex.AddItem(title, 5, 0, false).
|
||||||
|
AddItem(menu, 0, 1, true)
|
||||||
|
|
||||||
|
return flex
|
||||||
|
}
|
||||||
115
pkg/ui/relation_dataops.go
Normal file
115
pkg/ui/relation_dataops.go
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
|
||||||
|
// Relationship data operations - business logic for relationship management
|
||||||
|
|
||||||
|
// CreateRelationship creates a new relationship and adds it to a table
|
||||||
|
func (se *SchemaEditor) CreateRelationship(schemaIndex, tableIndex int, rel *models.Relationship) *models.Relationship {
|
||||||
|
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := se.db.Schemas[schemaIndex]
|
||||||
|
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[tableIndex]
|
||||||
|
if table.Relationships == nil {
|
||||||
|
table.Relationships = make(map[string]*models.Relationship)
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Relationships[rel.Name] = rel
|
||||||
|
table.UpdateDate()
|
||||||
|
return rel
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateRelationship updates an existing relationship
|
||||||
|
func (se *SchemaEditor) UpdateRelationship(schemaIndex, tableIndex int, oldName string, rel *models.Relationship) bool {
|
||||||
|
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := se.db.Schemas[schemaIndex]
|
||||||
|
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[tableIndex]
|
||||||
|
if table.Relationships == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete old entry if name changed
|
||||||
|
if oldName != rel.Name {
|
||||||
|
delete(table.Relationships, oldName)
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Relationships[rel.Name] = rel
|
||||||
|
table.UpdateDate()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteRelationship removes a relationship from a table
|
||||||
|
func (se *SchemaEditor) DeleteRelationship(schemaIndex, tableIndex int, relName string) bool {
|
||||||
|
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := se.db.Schemas[schemaIndex]
|
||||||
|
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[tableIndex]
|
||||||
|
if table.Relationships == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(table.Relationships, relName)
|
||||||
|
table.UpdateDate()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRelationship returns a relationship by name
|
||||||
|
func (se *SchemaEditor) GetRelationship(schemaIndex, tableIndex int, relName string) *models.Relationship {
|
||||||
|
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := se.db.Schemas[schemaIndex]
|
||||||
|
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[tableIndex]
|
||||||
|
if table.Relationships == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return table.Relationships[relName]
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRelationshipNames returns all relationship names for a table
|
||||||
|
func (se *SchemaEditor) GetRelationshipNames(schemaIndex, tableIndex int) []string {
|
||||||
|
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := se.db.Schemas[schemaIndex]
|
||||||
|
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[tableIndex]
|
||||||
|
if table.Relationships == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
names := make([]string, 0, len(table.Relationships))
|
||||||
|
for name := range table.Relationships {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
486
pkg/ui/relation_screens.go
Normal file
486
pkg/ui/relation_screens.go
Normal file
@@ -0,0 +1,486 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// showRelationshipList displays all relationships for a table
|
||||||
|
func (se *SchemaEditor) showRelationshipList(schemaIndex, tableIndex int) {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||||
|
|
||||||
|
// Title
|
||||||
|
title := tview.NewTextView().
|
||||||
|
SetText(fmt.Sprintf("[::b]Relationships for Table: %s", table.Name)).
|
||||||
|
SetDynamicColors(true).
|
||||||
|
SetTextAlign(tview.AlignCenter)
|
||||||
|
|
||||||
|
// Create relationships table
|
||||||
|
relTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
|
||||||
|
|
||||||
|
// Add header row
|
||||||
|
headers := []string{"Name", "Type", "From Columns", "To Table", "To Columns", "Description"}
|
||||||
|
headerWidths := []int{20, 15, 20, 20, 20}
|
||||||
|
for i, header := range headers {
|
||||||
|
padding := ""
|
||||||
|
if i < len(headerWidths) {
|
||||||
|
padding = strings.Repeat(" ", headerWidths[i]-len(header))
|
||||||
|
}
|
||||||
|
cell := tview.NewTableCell(header + padding).
|
||||||
|
SetTextColor(tcell.ColorYellow).
|
||||||
|
SetSelectable(false).
|
||||||
|
SetAlign(tview.AlignLeft)
|
||||||
|
relTable.SetCell(0, i, cell)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get relationship names
|
||||||
|
relNames := se.GetRelationshipNames(schemaIndex, tableIndex)
|
||||||
|
for row, relName := range relNames {
|
||||||
|
rel := table.Relationships[relName]
|
||||||
|
|
||||||
|
// Name
|
||||||
|
nameStr := fmt.Sprintf("%-20s", rel.Name)
|
||||||
|
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
|
||||||
|
relTable.SetCell(row+1, 0, nameCell)
|
||||||
|
|
||||||
|
// Type
|
||||||
|
typeStr := fmt.Sprintf("%-15s", string(rel.Type))
|
||||||
|
typeCell := tview.NewTableCell(typeStr).SetSelectable(true)
|
||||||
|
relTable.SetCell(row+1, 1, typeCell)
|
||||||
|
|
||||||
|
// From Columns
|
||||||
|
fromColsStr := strings.Join(rel.FromColumns, ", ")
|
||||||
|
fromColsStr = fmt.Sprintf("%-20s", fromColsStr)
|
||||||
|
fromColsCell := tview.NewTableCell(fromColsStr).SetSelectable(true)
|
||||||
|
relTable.SetCell(row+1, 2, fromColsCell)
|
||||||
|
|
||||||
|
// To Table
|
||||||
|
toTableStr := rel.ToTable
|
||||||
|
if rel.ToSchema != "" && rel.ToSchema != table.Schema {
|
||||||
|
toTableStr = rel.ToSchema + "." + rel.ToTable
|
||||||
|
}
|
||||||
|
toTableStr = fmt.Sprintf("%-20s", toTableStr)
|
||||||
|
toTableCell := tview.NewTableCell(toTableStr).SetSelectable(true)
|
||||||
|
relTable.SetCell(row+1, 3, toTableCell)
|
||||||
|
|
||||||
|
// To Columns
|
||||||
|
toColsStr := strings.Join(rel.ToColumns, ", ")
|
||||||
|
toColsStr = fmt.Sprintf("%-20s", toColsStr)
|
||||||
|
toColsCell := tview.NewTableCell(toColsStr).SetSelectable(true)
|
||||||
|
relTable.SetCell(row+1, 4, toColsCell)
|
||||||
|
|
||||||
|
// Description
|
||||||
|
descCell := tview.NewTableCell(rel.Description).SetSelectable(true)
|
||||||
|
relTable.SetCell(row+1, 5, descCell)
|
||||||
|
}
|
||||||
|
|
||||||
|
relTable.SetTitle(" Relationships ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
|
||||||
|
|
||||||
|
// Action buttons
|
||||||
|
btnFlex := tview.NewFlex()
|
||||||
|
btnNew := tview.NewButton("New Relationship [n]").SetSelectedFunc(func() {
|
||||||
|
se.showNewRelationshipDialog(schemaIndex, tableIndex)
|
||||||
|
})
|
||||||
|
btnEdit := tview.NewButton("Edit [e]").SetSelectedFunc(func() {
|
||||||
|
row, _ := relTable.GetSelection()
|
||||||
|
if row > 0 && row <= len(relNames) {
|
||||||
|
relName := relNames[row-1]
|
||||||
|
se.showEditRelationshipDialog(schemaIndex, tableIndex, relName)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
btnDelete := tview.NewButton("Delete [d]").SetSelectedFunc(func() {
|
||||||
|
row, _ := relTable.GetSelection()
|
||||||
|
if row > 0 && row <= len(relNames) {
|
||||||
|
relName := relNames[row-1]
|
||||||
|
se.showDeleteRelationshipConfirm(schemaIndex, tableIndex, relName)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
|
||||||
|
se.pages.RemovePage("relationships")
|
||||||
|
se.pages.SwitchToPage("table-editor")
|
||||||
|
})
|
||||||
|
|
||||||
|
// Set up button navigation
|
||||||
|
btnNew.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyBacktab {
|
||||||
|
se.app.SetFocus(relTable)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(btnEdit)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
btnEdit.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyBacktab {
|
||||||
|
se.app.SetFocus(btnNew)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(btnDelete)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
btnDelete.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyBacktab {
|
||||||
|
se.app.SetFocus(btnEdit)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(btnBack)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyBacktab {
|
||||||
|
se.app.SetFocus(btnDelete)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(relTable)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
btnFlex.AddItem(btnNew, 0, 1, true).
|
||||||
|
AddItem(btnEdit, 0, 1, false).
|
||||||
|
AddItem(btnDelete, 0, 1, false).
|
||||||
|
AddItem(btnBack, 0, 1, false)
|
||||||
|
|
||||||
|
relTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("relationships")
|
||||||
|
se.pages.SwitchToPage("table-editor")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab {
|
||||||
|
se.app.SetFocus(btnNew)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyEnter {
|
||||||
|
row, _ := relTable.GetSelection()
|
||||||
|
if row > 0 && row <= len(relNames) {
|
||||||
|
relName := relNames[row-1]
|
||||||
|
se.showEditRelationshipDialog(schemaIndex, tableIndex, relName)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Rune() == 'n' {
|
||||||
|
se.showNewRelationshipDialog(schemaIndex, tableIndex)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Rune() == 'e' {
|
||||||
|
row, _ := relTable.GetSelection()
|
||||||
|
if row > 0 && row <= len(relNames) {
|
||||||
|
relName := relNames[row-1]
|
||||||
|
se.showEditRelationshipDialog(schemaIndex, tableIndex, relName)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Rune() == 'd' {
|
||||||
|
row, _ := relTable.GetSelection()
|
||||||
|
if row > 0 && row <= len(relNames) {
|
||||||
|
relName := relNames[row-1]
|
||||||
|
se.showDeleteRelationshipConfirm(schemaIndex, tableIndex, relName)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Rune() == 'b' {
|
||||||
|
se.pages.RemovePage("relationships")
|
||||||
|
se.pages.SwitchToPage("table-editor")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
flex.AddItem(title, 1, 0, false).
|
||||||
|
AddItem(relTable, 0, 1, true).
|
||||||
|
AddItem(btnFlex, 1, 0, false)
|
||||||
|
|
||||||
|
se.pages.AddPage("relationships", flex, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showNewRelationshipDialog shows dialog to create a new relationship
|
||||||
|
func (se *SchemaEditor) showNewRelationshipDialog(schemaIndex, tableIndex int) {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
form := tview.NewForm()
|
||||||
|
|
||||||
|
// Collect all tables for dropdown
|
||||||
|
var allTables []string
|
||||||
|
var tableMap []struct{ schemaIdx, tableIdx int }
|
||||||
|
for si, schema := range se.db.Schemas {
|
||||||
|
for ti, t := range schema.Tables {
|
||||||
|
tableName := t.Name
|
||||||
|
if schema.Name != table.Schema {
|
||||||
|
tableName = schema.Name + "." + t.Name
|
||||||
|
}
|
||||||
|
allTables = append(allTables, tableName)
|
||||||
|
tableMap = append(tableMap, struct{ schemaIdx, tableIdx int }{si, ti})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
relName := ""
|
||||||
|
relType := models.OneToMany
|
||||||
|
fromColumns := ""
|
||||||
|
toColumns := ""
|
||||||
|
description := ""
|
||||||
|
selectedTableIdx := 0
|
||||||
|
|
||||||
|
form.AddInputField("Name", "", 40, nil, func(value string) {
|
||||||
|
relName = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddDropDown("Type", []string{
|
||||||
|
string(models.OneToOne),
|
||||||
|
string(models.OneToMany),
|
||||||
|
string(models.ManyToMany),
|
||||||
|
}, 1, func(option string, optionIndex int) {
|
||||||
|
relType = models.RelationType(option)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("From Columns (comma-separated)", "", 40, nil, func(value string) {
|
||||||
|
fromColumns = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddDropDown("To Table", allTables, 0, func(option string, optionIndex int) {
|
||||||
|
selectedTableIdx = optionIndex
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("To Columns (comma-separated)", "", 40, nil, func(value string) {
|
||||||
|
toColumns = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Description", "", 60, nil, func(value string) {
|
||||||
|
description = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Save", func() {
|
||||||
|
if relName == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse columns
|
||||||
|
fromCols := strings.Split(fromColumns, ",")
|
||||||
|
for i := range fromCols {
|
||||||
|
fromCols[i] = strings.TrimSpace(fromCols[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
toCols := strings.Split(toColumns, ",")
|
||||||
|
for i := range toCols {
|
||||||
|
toCols[i] = strings.TrimSpace(toCols[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get target table
|
||||||
|
targetSchema := se.db.Schemas[tableMap[selectedTableIdx].schemaIdx]
|
||||||
|
targetTable := targetSchema.Tables[tableMap[selectedTableIdx].tableIdx]
|
||||||
|
|
||||||
|
rel := models.InitRelationship(relName, relType)
|
||||||
|
rel.FromTable = table.Name
|
||||||
|
rel.FromSchema = table.Schema
|
||||||
|
rel.FromColumns = fromCols
|
||||||
|
rel.ToTable = targetTable.Name
|
||||||
|
rel.ToSchema = targetTable.Schema
|
||||||
|
rel.ToColumns = toCols
|
||||||
|
rel.Description = description
|
||||||
|
|
||||||
|
se.CreateRelationship(schemaIndex, tableIndex, rel)
|
||||||
|
|
||||||
|
se.pages.RemovePage("new-relationship")
|
||||||
|
se.pages.RemovePage("relationships")
|
||||||
|
se.showRelationshipList(schemaIndex, tableIndex)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back", func() {
|
||||||
|
se.pages.RemovePage("new-relationship")
|
||||||
|
})
|
||||||
|
|
||||||
|
form.SetBorder(true).SetTitle(" New Relationship ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("new-relationship")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("new-relationship", form, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showEditRelationshipDialog shows dialog to edit a relationship
|
||||||
|
func (se *SchemaEditor) showEditRelationshipDialog(schemaIndex, tableIndex int, relName string) {
|
||||||
|
table := se.GetTable(schemaIndex, tableIndex)
|
||||||
|
if table == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
rel := se.GetRelationship(schemaIndex, tableIndex, relName)
|
||||||
|
if rel == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
form := tview.NewForm()
|
||||||
|
|
||||||
|
// Collect all tables for dropdown
|
||||||
|
var allTables []string
|
||||||
|
var tableMap []struct{ schemaIdx, tableIdx int }
|
||||||
|
selectedTableIdx := 0
|
||||||
|
for si, schema := range se.db.Schemas {
|
||||||
|
for ti, t := range schema.Tables {
|
||||||
|
tableName := t.Name
|
||||||
|
if schema.Name != table.Schema {
|
||||||
|
tableName = schema.Name + "." + t.Name
|
||||||
|
}
|
||||||
|
allTables = append(allTables, tableName)
|
||||||
|
tableMap = append(tableMap, struct{ schemaIdx, tableIdx int }{si, ti})
|
||||||
|
|
||||||
|
// Check if this is the current target table
|
||||||
|
if t.Name == rel.ToTable && schema.Name == rel.ToSchema {
|
||||||
|
selectedTableIdx = len(allTables) - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newName := rel.Name
|
||||||
|
relType := rel.Type
|
||||||
|
fromColumns := strings.Join(rel.FromColumns, ", ")
|
||||||
|
toColumns := strings.Join(rel.ToColumns, ", ")
|
||||||
|
description := rel.Description
|
||||||
|
|
||||||
|
form.AddInputField("Name", rel.Name, 40, nil, func(value string) {
|
||||||
|
newName = value
|
||||||
|
})
|
||||||
|
|
||||||
|
// Find initial type index
|
||||||
|
typeIdx := 1 // OneToMany default
|
||||||
|
typeOptions := []string{
|
||||||
|
string(models.OneToOne),
|
||||||
|
string(models.OneToMany),
|
||||||
|
string(models.ManyToMany),
|
||||||
|
}
|
||||||
|
for i, opt := range typeOptions {
|
||||||
|
if opt == string(rel.Type) {
|
||||||
|
typeIdx = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
form.AddDropDown("Type", typeOptions, typeIdx, func(option string, optionIndex int) {
|
||||||
|
relType = models.RelationType(option)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("From Columns (comma-separated)", fromColumns, 40, nil, func(value string) {
|
||||||
|
fromColumns = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddDropDown("To Table", allTables, selectedTableIdx, func(option string, optionIndex int) {
|
||||||
|
selectedTableIdx = optionIndex
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("To Columns (comma-separated)", toColumns, 40, nil, func(value string) {
|
||||||
|
toColumns = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddInputField("Description", rel.Description, 60, nil, func(value string) {
|
||||||
|
description = value
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Save", func() {
|
||||||
|
if newName == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse columns
|
||||||
|
fromCols := strings.Split(fromColumns, ",")
|
||||||
|
for i := range fromCols {
|
||||||
|
fromCols[i] = strings.TrimSpace(fromCols[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
toCols := strings.Split(toColumns, ",")
|
||||||
|
for i := range toCols {
|
||||||
|
toCols[i] = strings.TrimSpace(toCols[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get target table
|
||||||
|
targetSchema := se.db.Schemas[tableMap[selectedTableIdx].schemaIdx]
|
||||||
|
targetTable := targetSchema.Tables[tableMap[selectedTableIdx].tableIdx]
|
||||||
|
|
||||||
|
updatedRel := models.InitRelationship(newName, relType)
|
||||||
|
updatedRel.FromTable = table.Name
|
||||||
|
updatedRel.FromSchema = table.Schema
|
||||||
|
updatedRel.FromColumns = fromCols
|
||||||
|
updatedRel.ToTable = targetTable.Name
|
||||||
|
updatedRel.ToSchema = targetTable.Schema
|
||||||
|
updatedRel.ToColumns = toCols
|
||||||
|
updatedRel.Description = description
|
||||||
|
updatedRel.GUID = rel.GUID
|
||||||
|
|
||||||
|
se.UpdateRelationship(schemaIndex, tableIndex, relName, updatedRel)
|
||||||
|
|
||||||
|
se.pages.RemovePage("edit-relationship")
|
||||||
|
se.pages.RemovePage("relationships")
|
||||||
|
se.showRelationshipList(schemaIndex, tableIndex)
|
||||||
|
})
|
||||||
|
|
||||||
|
form.AddButton("Back", func() {
|
||||||
|
se.pages.RemovePage("edit-relationship")
|
||||||
|
})
|
||||||
|
|
||||||
|
form.SetBorder(true).SetTitle(" Edit Relationship ").SetTitleAlign(tview.AlignLeft)
|
||||||
|
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("edit-relationship")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddPage("edit-relationship", form, true, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showDeleteRelationshipConfirm shows confirmation dialog for deleting a relationship
|
||||||
|
func (se *SchemaEditor) showDeleteRelationshipConfirm(schemaIndex, tableIndex int, relName string) {
|
||||||
|
modal := tview.NewModal().
|
||||||
|
SetText(fmt.Sprintf("Delete relationship '%s'? This action cannot be undone.", relName)).
|
||||||
|
AddButtons([]string{"Cancel", "Delete"}).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Delete" {
|
||||||
|
se.DeleteRelationship(schemaIndex, tableIndex, relName)
|
||||||
|
se.pages.RemovePage("delete-relationship-confirm")
|
||||||
|
se.pages.RemovePage("relationships")
|
||||||
|
se.showRelationshipList(schemaIndex, tableIndex)
|
||||||
|
} else {
|
||||||
|
se.pages.RemovePage("delete-relationship-confirm")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
se.pages.RemovePage("delete-relationship-confirm")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
|
||||||
|
se.pages.AddAndSwitchToPage("delete-relationship-confirm", modal, true)
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user