Compare commits
39 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b57e1ba304 | |||
| 19fba62f1b | |||
| b4ff4334cc | |||
| 5d9b00c8f2 | |||
| debf351c48 | |||
| d87d657275 | |||
| 1795eb64d1 | |||
| 355f0f918f | |||
| 5d3c86119e | |||
| 8c602e3db0 | |||
| 64aeac972a | |||
| 97a57f5dc8 | |||
| adfe126758 | |||
| 1d193c84d7 | |||
| 1d627c74b1 | |||
| 7c6a355458 | |||
| c0ef26b660 | |||
| cb38f95b79 | |||
| 196d87bc29 | |||
| beb1100d86 | |||
| 410b1ee743 | |||
| b5d39aeee4 | |||
| 5fb9a8f231 | |||
| 27da24f575 | |||
| 0fb3469dbd | |||
| 9f29bc112e | |||
| b55737ab4c | |||
| 2a271b9859 | |||
| beb5b4fac8 | |||
| e61204cb3c | |||
| d52b9cdc14 | |||
| f98b278d72 | |||
| 666eab7cec | |||
| 35bc9dfb5c | |||
| aad5db5175 | |||
| d9225a7310 | |||
| 79effe6921 | |||
| 289715ba44 | |||
| 8ca2b50f9c |
@@ -4,10 +4,7 @@
|
||||
"description": "Database Relations Specification Tool for Go",
|
||||
"language": "go"
|
||||
},
|
||||
"agent": {
|
||||
"preferred": "Explore",
|
||||
"description": "Use Explore agent for fast codebase navigation and Go project exploration"
|
||||
},
|
||||
|
||||
"codeStyle": {
|
||||
"useGofmt": true,
|
||||
"lineLength": 100,
|
||||
|
||||
36
.github/workflows/ci.yml
vendored
36
.github/workflows/ci.yml
vendored
@@ -1,5 +1,5 @@
|
||||
name: CI
|
||||
|
||||
run-name: "Test on master branch"
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
go-version: ['1.23', '1.24', '1.25']
|
||||
go-version: ['1.24', '1.25']
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -34,8 +34,8 @@ jobs:
|
||||
- name: Download dependencies
|
||||
run: go mod download
|
||||
|
||||
- name: Run tests
|
||||
run: go test -v -race -coverprofile=coverage.out -covermode=atomic ./...
|
||||
- name: Run unit tests
|
||||
run: make test
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
@@ -57,11 +57,13 @@ jobs:
|
||||
with:
|
||||
go-version: '1.25'
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
with:
|
||||
version: latest
|
||||
args: --config=.golangci.json
|
||||
- name: Install golangci-lint
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin latest
|
||||
echo "$(go env GOPATH)/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Run linter
|
||||
run: make lint
|
||||
|
||||
build:
|
||||
name: Build
|
||||
@@ -76,8 +78,20 @@ jobs:
|
||||
with:
|
||||
go-version: '1.25'
|
||||
|
||||
- name: Build
|
||||
run: go build -v ./cmd/relspec
|
||||
- name: Download dependencies
|
||||
run: go mod download
|
||||
|
||||
- name: Build binary
|
||||
run: make build
|
||||
|
||||
- name: Verify binary exists
|
||||
run: |
|
||||
if [ ! -f build/relspec ]; then
|
||||
echo "Error: Binary not found at build/relspec"
|
||||
exit 1
|
||||
fi
|
||||
echo "Build successful: build/relspec"
|
||||
ls -lh build/relspec
|
||||
|
||||
- name: Check mod tidiness
|
||||
run: |
|
||||
|
||||
72
.github/workflows/integration-tests.yml
vendored
Normal file
72
.github/workflows/integration-tests.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: Integration Tests
|
||||
run-name: "Integration Tests"
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
integration-tests:
|
||||
name: Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: relspec
|
||||
POSTGRES_PASSWORD: relspec_test_password
|
||||
POSTGRES_DB: relspec_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.25'
|
||||
|
||||
- name: Cache Go modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-go-
|
||||
|
||||
- name: Download dependencies
|
||||
run: go mod download
|
||||
|
||||
- name: Initialize test database
|
||||
env:
|
||||
PGPASSWORD: relspec_test_password
|
||||
run: |
|
||||
# Services are accessible via hostname matching the service name
|
||||
psql -h postgres -U relspec -d relspec_test -f tests/postgres/init.sql
|
||||
|
||||
- name: Verify database setup
|
||||
env:
|
||||
PGPASSWORD: relspec_test_password
|
||||
run: |
|
||||
echo "Verifying database initialization..."
|
||||
psql -h postgres -U relspec -d relspec_test -c "
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND nspname NOT LIKE 'pg_%') as schemas,
|
||||
(SELECT COUNT(*) FROM pg_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as tables,
|
||||
(SELECT COUNT(*) FROM pg_views WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as views,
|
||||
(SELECT COUNT(*) FROM pg_sequences WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as sequences;
|
||||
"
|
||||
|
||||
- name: Run integration tests
|
||||
env:
|
||||
RELSPEC_TEST_PG_CONN: postgres://relspec:relspec_test_password@postgres:5432/relspec_test
|
||||
run: make test-integration
|
||||
116
.github/workflows/release.yml
vendored
Normal file
116
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
name: Release
|
||||
run-name: "Making Release"
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
jobs:
|
||||
build-and-release:
|
||||
name: Build and Release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.25'
|
||||
|
||||
- name: Get version from tag
|
||||
id: get_version
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
echo "Version: ${GITHUB_REF#refs/tags/}"
|
||||
|
||||
- name: Build binaries for multiple platforms
|
||||
run: |
|
||||
mkdir -p dist
|
||||
|
||||
# Linux AMD64
|
||||
GOOS=linux GOARCH=amd64 go build -o dist/relspec-linux-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||
|
||||
# Linux ARM64
|
||||
GOOS=linux GOARCH=arm64 go build -o dist/relspec-linux-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||
|
||||
# macOS AMD64
|
||||
GOOS=darwin GOARCH=amd64 go build -o dist/relspec-darwin-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||
|
||||
# macOS ARM64 (Apple Silicon)
|
||||
GOOS=darwin GOARCH=arm64 go build -o dist/relspec-darwin-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||
|
||||
# Windows AMD64
|
||||
GOOS=windows GOARCH=amd64 go build -o dist/relspec-windows-amd64.exe -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||
|
||||
# Create checksums
|
||||
cd dist
|
||||
sha256sum * > checksums.txt
|
||||
cd ..
|
||||
|
||||
- name: Generate release notes
|
||||
id: release_notes
|
||||
run: |
|
||||
# Get the previous tag
|
||||
previous_tag=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$previous_tag" ]; then
|
||||
# No previous tag, get all commits
|
||||
commits=$(git log --pretty=format:"- %s (%h)" --no-merges)
|
||||
else
|
||||
# Get commits since the previous tag
|
||||
commits=$(git log "${previous_tag}..HEAD" --pretty=format:"- %s (%h)" --no-merges)
|
||||
fi
|
||||
|
||||
# Create release notes
|
||||
cat > release_notes.md << EOF
|
||||
# Release ${{ steps.get_version.outputs.VERSION }}
|
||||
|
||||
## Changes
|
||||
|
||||
${commits}
|
||||
|
||||
## Installation
|
||||
|
||||
Download the appropriate binary for your platform:
|
||||
|
||||
- **Linux (AMD64)**: \`relspec-linux-amd64\`
|
||||
- **Linux (ARM64)**: \`relspec-linux-arm64\`
|
||||
- **macOS (Intel)**: \`relspec-darwin-amd64\`
|
||||
- **macOS (Apple Silicon)**: \`relspec-darwin-arm64\`
|
||||
- **Windows (AMD64)**: \`relspec-windows-amd64.exe\`
|
||||
|
||||
Make the binary executable (Linux/macOS):
|
||||
\`\`\`bash
|
||||
chmod +x relspec-*
|
||||
\`\`\`
|
||||
|
||||
Verify the download with the provided checksums.
|
||||
EOF
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
body_path: release_notes.md
|
||||
files: |
|
||||
dist/relspec-linux-amd64
|
||||
dist/relspec-linux-arm64
|
||||
dist/relspec-darwin-amd64
|
||||
dist/relspec-darwin-arm64
|
||||
dist/relspec-windows-amd64.exe
|
||||
dist/checksums.txt
|
||||
draft: false
|
||||
prerelease: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "Release ${{ steps.get_version.outputs.VERSION }} created successfully!"
|
||||
echo "Binaries built for:"
|
||||
echo " - Linux (amd64, arm64)"
|
||||
echo " - macOS (amd64, arm64)"
|
||||
echo " - Windows (amd64)"
|
||||
35
AI_USE.md
Normal file
35
AI_USE.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# AI Usage Declaration
|
||||
|
||||
This Go project utilizes AI tools for the following purposes:
|
||||
|
||||
- Generating and improving documentation
|
||||
- Writing and enhancing tests
|
||||
- Refactoring and optimizing existing code
|
||||
|
||||
AI is **not** used for core design or architecture decisions.
|
||||
All design decisions are deferred to human discussion.
|
||||
AI is employed only for enhancements to human-written code.
|
||||
|
||||
We are aware of significant AI hallucinations; all AI-generated content is to be reviewed and verified by humans.
|
||||
|
||||
|
||||
.-""""""-.
|
||||
.' '.
|
||||
/ O O \
|
||||
: ` :
|
||||
| |
|
||||
: .------. :
|
||||
\ ' ' /
|
||||
'. .'
|
||||
'-......-'
|
||||
MEGAMIND AI
|
||||
[============]
|
||||
|
||||
___________
|
||||
/___________\
|
||||
/_____________\
|
||||
| ASSIMILATE |
|
||||
| RESISTANCE |
|
||||
| IS FUTILE |
|
||||
\_____________/
|
||||
\___________/
|
||||
148
Makefile
148
Makefile
@@ -1,4 +1,4 @@
|
||||
.PHONY: all build test lint coverage clean install help docker-up docker-down docker-test docker-test-integration
|
||||
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration start stop release release-version
|
||||
|
||||
# Binary name
|
||||
BINARY_NAME=relspec
|
||||
@@ -14,17 +14,51 @@ GOGET=$(GOCMD) get
|
||||
GOMOD=$(GOCMD) mod
|
||||
GOCLEAN=$(GOCMD) clean
|
||||
|
||||
# Auto-detect container runtime (Docker or Podman)
|
||||
CONTAINER_RUNTIME := $(shell \
|
||||
if command -v podman > /dev/null 2>&1; then \
|
||||
echo "podman"; \
|
||||
elif command -v docker > /dev/null 2>&1; then \
|
||||
echo "docker"; \
|
||||
else \
|
||||
echo "none"; \
|
||||
fi)
|
||||
|
||||
# Detect compose command
|
||||
COMPOSE_CMD := $(shell \
|
||||
if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||
echo "podman-compose"; \
|
||||
elif command -v docker-compose > /dev/null 2>&1; then \
|
||||
echo "docker-compose"; \
|
||||
else \
|
||||
echo "docker compose"; \
|
||||
fi)
|
||||
|
||||
all: lint test build ## Run linting, tests, and build
|
||||
|
||||
build: ## Build the binary
|
||||
build: deps ## Build the binary
|
||||
@echo "Building $(BINARY_NAME)..."
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
|
||||
@echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)"
|
||||
|
||||
test: ## Run tests
|
||||
@echo "Running tests..."
|
||||
$(GOTEST) -v -race -coverprofile=coverage.out ./...
|
||||
test: test-unit ## Run all unit tests (alias for test-unit)
|
||||
|
||||
test-unit: ## Run unit tests (excludes integration tests)
|
||||
@echo "Running unit tests..."
|
||||
$(GOTEST) -v -race -coverprofile=coverage.out -covermode=atomic $$(go list ./... | grep -v '/tests/integration' | grep -v '/tests/assets' | grep -v '/pkg/readers/pgsql')
|
||||
|
||||
test-integration: ## Run integration tests (requires RELSPEC_TEST_PG_CONN environment variable)
|
||||
@echo "Running integration tests..."
|
||||
@if [ -z "$$RELSPEC_TEST_PG_CONN" ]; then \
|
||||
echo "Error: RELSPEC_TEST_PG_CONN environment variable is not set"; \
|
||||
echo "Example: export RELSPEC_TEST_PG_CONN='postgres://relspec:relspec_test_password@localhost:5439/relspec_test'"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "Running PostgreSQL reader tests..."
|
||||
$(GOTEST) -v -count=1 ./pkg/readers/pgsql/
|
||||
@echo "Running general integration tests..."
|
||||
$(GOTEST) -v -count=1 ./tests/integration/
|
||||
|
||||
coverage: test ## Run tests with coverage report
|
||||
@echo "Generating coverage report..."
|
||||
@@ -40,6 +74,15 @@ lint: ## Run linter
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
lintfix: ## Run linter
|
||||
@echo "Running linter..."
|
||||
@if command -v golangci-lint > /dev/null; then \
|
||||
golangci-lint run --config=.golangci.json --fix; \
|
||||
else \
|
||||
echo "golangci-lint not installed. Install with: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
clean: ## Clean build artifacts
|
||||
@echo "Cleaning..."
|
||||
$(GOCLEAN)
|
||||
@@ -58,36 +101,105 @@ deps: ## Download dependencies
|
||||
$(GOMOD) tidy
|
||||
@echo "Dependencies updated"
|
||||
|
||||
start: docker-up ## Alias for docker-up (start PostgreSQL test database)
|
||||
|
||||
stop: docker-down ## Alias for docker-down (stop PostgreSQL test database)
|
||||
|
||||
docker-up: ## Start PostgreSQL test database
|
||||
@echo "Starting PostgreSQL test database..."
|
||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
||||
docker-compose up -d postgres; \
|
||||
@echo "Starting PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||
@if [ "$(CONTAINER_RUNTIME)" = "none" ]; then \
|
||||
echo "Error: Neither Docker nor Podman is installed"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||
podman run -d --name relspec-test-postgres \
|
||||
-e POSTGRES_USER=relspec \
|
||||
-e POSTGRES_PASSWORD=relspec_test_password \
|
||||
-e POSTGRES_DB=relspec_test \
|
||||
-p 5439:5432 \
|
||||
-v ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql:Z \
|
||||
postgres:16-alpine 2>/dev/null || echo "Container already running"; \
|
||||
else \
|
||||
docker compose up -d postgres; \
|
||||
$(COMPOSE_CMD) up -d postgres; \
|
||||
fi
|
||||
@echo "Waiting for PostgreSQL to be ready..."
|
||||
@sleep 3
|
||||
@echo "PostgreSQL is running on port 5433"
|
||||
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5433/relspec_test"
|
||||
@echo "PostgreSQL is running on port 5439"
|
||||
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5439/relspec_test"
|
||||
|
||||
docker-down: ## Stop PostgreSQL test database
|
||||
@echo "Stopping PostgreSQL test database..."
|
||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
||||
docker-compose down; \
|
||||
@echo "Stopping PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||
podman stop relspec-test-postgres 2>/dev/null || true; \
|
||||
podman rm relspec-test-postgres 2>/dev/null || true; \
|
||||
else \
|
||||
docker compose down; \
|
||||
$(COMPOSE_CMD) down; \
|
||||
fi
|
||||
@echo "PostgreSQL stopped"
|
||||
|
||||
docker-test: ## Run PostgreSQL integration tests with Docker
|
||||
@./tests/postgres/run_tests.sh
|
||||
docker-test: ## Run PostgreSQL integration tests with Docker/Podman
|
||||
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||
./tests/postgres/run_tests_podman.sh; \
|
||||
else \
|
||||
./tests/postgres/run_tests.sh; \
|
||||
fi
|
||||
|
||||
docker-test-integration: docker-up ## Start DB and run integration tests
|
||||
@echo "Running integration tests..."
|
||||
@sleep 2
|
||||
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5433/relspec_test" \
|
||||
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5439/relspec_test" \
|
||||
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
||||
@make docker-down
|
||||
|
||||
release: ## Create and push a new release tag (auto-increments patch version)
|
||||
@echo "Creating new release..."
|
||||
@latest_tag=$$(git describe --tags --abbrev=0 2>/dev/null || echo ""); \
|
||||
if [ -z "$$latest_tag" ]; then \
|
||||
version="v1.0.0"; \
|
||||
echo "No existing tags found. Creating first release: $$version"; \
|
||||
commit_logs=$$(git log --pretty=format:"- %s" --no-merges); \
|
||||
else \
|
||||
echo "Latest tag: $$latest_tag"; \
|
||||
version_number=$${latest_tag#v}; \
|
||||
IFS='.' read -r major minor patch <<< "$$version_number"; \
|
||||
patch=$$((patch + 1)); \
|
||||
version="v$$major.$$minor.$$patch"; \
|
||||
echo "Creating new release: $$version"; \
|
||||
commit_logs=$$(git log "$${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges); \
|
||||
fi; \
|
||||
if [ -z "$$commit_logs" ]; then \
|
||||
tag_message="Release $$version"; \
|
||||
else \
|
||||
tag_message="Release $$version\n\n$$commit_logs"; \
|
||||
fi; \
|
||||
git tag -a "$$version" -m "$$tag_message"; \
|
||||
git push origin "$$version"; \
|
||||
echo "Tag $$version created and pushed to remote repository."
|
||||
|
||||
release-version: ## Create and push a release with specific version (use: make release-version VERSION=v1.2.3)
|
||||
@if [ -z "$(VERSION)" ]; then \
|
||||
echo "Error: VERSION is required. Usage: make release-version VERSION=v1.2.3"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@version="$(VERSION)"; \
|
||||
if ! echo "$$version" | grep -q "^v"; then \
|
||||
version="v$$version"; \
|
||||
fi; \
|
||||
echo "Creating release: $$version"; \
|
||||
latest_tag=$$(git describe --tags --abbrev=0 2>/dev/null || echo ""); \
|
||||
if [ -z "$$latest_tag" ]; then \
|
||||
commit_logs=$$(git log --pretty=format:"- %s" --no-merges); \
|
||||
else \
|
||||
commit_logs=$$(git log "$${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges); \
|
||||
fi; \
|
||||
if [ -z "$$commit_logs" ]; then \
|
||||
tag_message="Release $$version"; \
|
||||
else \
|
||||
tag_message="Release $$version\n\n$$commit_logs"; \
|
||||
fi; \
|
||||
git tag -a "$$version" -m "$$tag_message"; \
|
||||
git push origin "$$version"; \
|
||||
echo "Tag $$version created and pushed to remote repository."
|
||||
|
||||
help: ## Display this help screen
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
212
README.md
212
README.md
@@ -1,34 +1,112 @@
|
||||
# RelSpec
|
||||
|
||||
[](https://git.warky.dev/wdevs/relspecgo/releases/latest)
|
||||
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/ci.yml)
|
||||
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/integration-tests.yml)
|
||||
[](https://go.dev/dl/)
|
||||
[](LICENSE)
|
||||
|
||||
> Database Relations Specification Tool for Go
|
||||
|
||||
RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
|
||||
|
||||
## Overview
|
||||
|
||||
RelSpec provides bidirectional conversion and comparison between various database specification formats, allowing you to:
|
||||
RelSpec provides bidirectional conversion, comparison, and validation of database specification formats, allowing you to:
|
||||
- Inspect live databases and extract their structure
|
||||
- Convert between different ORM models (GORM, Bun , etc.)
|
||||
- Validate schemas against configurable rules and naming conventions
|
||||
- Convert between different ORM models (GORM, Bun, etc.)
|
||||
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
||||
- Generate standardized specification files (JSON, YAML, etc.)
|
||||
- Compare database schemas and track changes
|
||||
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
### Input Formats
|
||||
- **XML** - Generic XML schema definitions
|
||||
- **JSON** - JSON-based schema specifications
|
||||
- **Clarion DCTX** - Clarion database dictionary format
|
||||
- **Database Inspection** - Direct database introspection
|
||||
- **GORM Models** - Read existing GORM Go structs
|
||||
- **Bun Models** - Read existing Bun Go structs
|
||||
### Readers (Input Formats)
|
||||
|
||||
RelSpec can read database schemas from multiple sources:
|
||||
|
||||
#### ORM Models
|
||||
- [GORM](pkg/readers/gorm/README.md) - Go GORM model definitions
|
||||
- [Bun](pkg/readers/bun/README.md) - Go Bun model definitions
|
||||
- [Drizzle](pkg/readers/drizzle/README.md) - TypeScript Drizzle ORM schemas
|
||||
- [Prisma](pkg/readers/prisma/README.md) - Prisma schema language
|
||||
- [TypeORM](pkg/readers/typeorm/README.md) - TypeScript TypeORM entities
|
||||
|
||||
#### Database Inspection
|
||||
- [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection
|
||||
|
||||
#### Schema Formats
|
||||
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
|
||||
- [DCTX](pkg/readers/dctx/README.md) - Clarion database dictionary format
|
||||
- [DrawDB](pkg/readers/drawdb/README.md) - DrawDB JSON format
|
||||
- [GraphQL](pkg/readers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||
- [JSON](pkg/readers/json/README.md) - RelSpec canonical JSON format
|
||||
- [YAML](pkg/readers/yaml/README.md) - RelSpec canonical YAML format
|
||||
|
||||
### Writers (Output Formats)
|
||||
|
||||
RelSpec can write database schemas to multiple formats:
|
||||
|
||||
#### ORM Models
|
||||
- [GORM](pkg/writers/gorm/README.md) - Generate GORM-compatible Go structs
|
||||
- [Bun](pkg/writers/bun/README.md) - Generate Bun-compatible Go structs
|
||||
- [Drizzle](pkg/writers/drizzle/README.md) - Generate Drizzle ORM TypeScript schemas
|
||||
- [Prisma](pkg/writers/prisma/README.md) - Generate Prisma schema files
|
||||
- [TypeORM](pkg/writers/typeorm/README.md) - Generate TypeORM TypeScript entities
|
||||
|
||||
#### Database DDL
|
||||
- [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.)
|
||||
|
||||
#### Schema Formats
|
||||
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
|
||||
- [DCTX](pkg/writers/dctx/README.md) - Clarion database dictionary format
|
||||
- [DrawDB](pkg/writers/drawdb/README.md) - DrawDB JSON format
|
||||
- [GraphQL](pkg/writers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
||||
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
||||
|
||||
### Inspector (Schema Validation)
|
||||
|
||||
RelSpec includes a powerful schema validation and linting tool:
|
||||
|
||||
- [Inspector](pkg/inspector/README.md) - Validate database schemas against configurable rules
|
||||
- Enforce naming conventions (snake_case, camelCase, custom patterns)
|
||||
- Check primary key and foreign key standards
|
||||
- Detect missing indexes on foreign keys
|
||||
- Prevent use of SQL reserved keywords
|
||||
- Ensure schema integrity (missing PKs, orphaned FKs, circular dependencies)
|
||||
- Support for custom validation rules
|
||||
- Multiple output formats (Markdown with colors, JSON)
|
||||
- CI/CD integration ready
|
||||
|
||||
## Use of AI
|
||||
[Rules and use of AI](./AI_USE.md)
|
||||
|
||||
## User Interface
|
||||
|
||||
RelSpec provides an interactive terminal-based user interface for managing and editing database schemas. The UI allows you to:
|
||||
|
||||
- **Browse Databases** - Navigate through your database structure with an intuitive menu system
|
||||
- **Edit Schemas** - Create, modify, and organize database schemas
|
||||
- **Manage Tables** - Add, update, or delete tables with full control over structure
|
||||
- **Configure Columns** - Define column properties, data types, constraints, and relationships
|
||||
- **Interactive Editing** - Real-time validation and feedback as you make changes
|
||||
|
||||
The interface supports multiple input formats, making it easy to load, edit, and save your database definitions in various formats.
|
||||
|
||||
<p align="center" width="100%">
|
||||
<img src="./assets/image/screenshots/main_screen.jpg">
|
||||
</p>
|
||||
<p align="center" width="100%">
|
||||
<img src="./assets/image/screenshots/table_view.jpg">
|
||||
</p>
|
||||
<p align="center" width="100%">
|
||||
<img src="./assets/image/screenshots/edit_column.jpg">
|
||||
</p>
|
||||
|
||||
### Output Formats
|
||||
- **GORM Models** - Generate GORM-compatible Go structs
|
||||
- **Bun Models** - Generate Bun-compatible Go structs
|
||||
- **JSON** - Standard JSON schema output
|
||||
- **YAML** - Human-readable YAML format
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -40,30 +118,114 @@ go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
||||
|
||||
## Usage
|
||||
|
||||
### Interactive Schema Editor
|
||||
|
||||
```bash
|
||||
# Inspect database and generate GORM models
|
||||
relspec --input db --conn "postgres://..." --output gorm --out-file models.go
|
||||
# Launch interactive editor with a DBML schema
|
||||
relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
|
||||
|
||||
# Edit PostgreSQL database in place
|
||||
relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||
--to pgsql --to-conn "postgres://user:pass@localhost/mydb"
|
||||
|
||||
# Edit JSON schema and save as GORM models
|
||||
relspec edit --from json --from-path db.json --to gorm --to-path models/
|
||||
```
|
||||
|
||||
The `edit` command launches an interactive terminal user interface where you can:
|
||||
- Browse and navigate your database structure
|
||||
- Create, modify, and delete schemas, tables, and columns
|
||||
- Configure column properties, constraints, and relationships
|
||||
- Save changes to various formats
|
||||
- Import and merge schemas from other databases
|
||||
|
||||
### Schema Merging
|
||||
|
||||
```bash
|
||||
# Merge two JSON schemas (additive merge - adds missing items only)
|
||||
relspec merge --target json --target-path base.json \
|
||||
--source json --source-path additions.json \
|
||||
--output json --output-path merged.json
|
||||
|
||||
# Merge PostgreSQL database into JSON, skipping specific tables
|
||||
relspec merge --target json --target-path current.json \
|
||||
--source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
|
||||
--output json --output-path updated.json \
|
||||
--skip-tables "audit_log,temp_tables"
|
||||
|
||||
# Cross-format merge (DBML + YAML → JSON)
|
||||
relspec merge --target dbml --target-path base.dbml \
|
||||
--source yaml --source-path additions.yaml \
|
||||
--output json --output-path result.json \
|
||||
--skip-relations --skip-views
|
||||
```
|
||||
|
||||
The `merge` command combines two database schemas additively:
|
||||
- Adds missing schemas, tables, columns, and other objects
|
||||
- Never modifies or deletes existing items (safe operation)
|
||||
- Supports selective merging with skip options (domains, relations, enums, views, sequences, specific tables)
|
||||
- Works across any combination of supported formats
|
||||
- Perfect for integrating multiple schema definitions or applying patches
|
||||
|
||||
### Schema Conversion
|
||||
|
||||
```bash
|
||||
# Convert PostgreSQL database to GORM models
|
||||
relspec convert --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||
--to gorm --to-path models/ --package models
|
||||
|
||||
# Convert GORM models to Bun
|
||||
relspec --input gorm --in-file existing.go --output bun --out-file bun_models.go
|
||||
relspec convert --from gorm --from-path models.go \
|
||||
--to bun --to-path bun_models.go --package models
|
||||
|
||||
# Export database schema to JSON
|
||||
relspec --input db --conn "mysql://..." --output json --out-file schema.json
|
||||
relspec convert --from pgsql --from-conn "postgres://..." \
|
||||
--to json --to-path schema.json
|
||||
|
||||
# Convert Clarion DCTX to YAML
|
||||
relspec --input dctx --in-file legacy.dctx --output yaml --out-file schema.yaml
|
||||
# Convert DBML to PostgreSQL SQL
|
||||
relspec convert --from dbml --from-path schema.dbml \
|
||||
--to pgsql --to-path schema.sql
|
||||
```
|
||||
|
||||
### Schema Validation
|
||||
|
||||
```bash
|
||||
# Validate a PostgreSQL database with default rules
|
||||
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||
|
||||
# Validate DBML file with custom rules
|
||||
relspec inspect --from dbml --from-path schema.dbml --rules .relspec-rules.yaml
|
||||
|
||||
# Generate JSON validation report
|
||||
relspec inspect --from json --from-path db.json \
|
||||
--output-format json --output report.json
|
||||
|
||||
# Validate specific schema only
|
||||
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||
```
|
||||
|
||||
### Schema Comparison
|
||||
|
||||
```bash
|
||||
# Compare two database schemas
|
||||
relspec diff --from pgsql --from-conn "postgres://localhost/db1" \
|
||||
--to pgsql --to-conn "postgres://localhost/db2"
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
relspecgo/
|
||||
├── cmd/ # CLI application
|
||||
├── cmd/
|
||||
│ └── relspec/ # CLI application (convert, inspect, diff, scripts)
|
||||
├── pkg/
|
||||
│ ├── readers/ # Input format readers
|
||||
│ ├── writers/ # Output format writers
|
||||
│ ├── readers/ # Input format readers (DBML, GORM, PostgreSQL, etc.)
|
||||
│ ├── writers/ # Output format writers (GORM, Bun, SQL, etc.)
|
||||
│ ├── inspector/ # Schema validation and linting
|
||||
│ ├── diff/ # Schema comparison
|
||||
│ ├── models/ # Internal data models
|
||||
│ └── transform/ # Transformation logic
|
||||
│ ├── transform/ # Transformation logic
|
||||
│ └── pgsql/ # PostgreSQL utilities (keywords, data types)
|
||||
├── examples/ # Usage examples
|
||||
└── tests/ # Test files
|
||||
```
|
||||
@@ -94,7 +256,7 @@ go test ./...
|
||||
|
||||
Apache License 2.0 - See [LICENSE](LICENSE) for details.
|
||||
|
||||
Copyright 2025 wdevs
|
||||
Copyright 2025 Warky Devs
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
39
TODO.md
39
TODO.md
@@ -2,23 +2,36 @@
|
||||
|
||||
|
||||
## Input Readers / Writers
|
||||
- [x] **Database Inspector**
|
||||
- [x] PostgreSQL driver
|
||||
|
||||
- [✔️] **Database Inspector**
|
||||
- [✔️] PostgreSQL driver
|
||||
- [ ] MySQL driver
|
||||
- [ ] SQLite driver
|
||||
- [ ] MSSQL driver
|
||||
- [x] Foreign key detection
|
||||
- [x] Index extraction
|
||||
- [ ] .sql file generation with sequence and priority
|
||||
- [*] .dbml: Database Markup Language (DBML) for textual schema representation.
|
||||
- [ ] Prisma schema support (PSL format) .prisma
|
||||
- [ ] Entity Framework (.NET) model .edmx
|
||||
- [ ] TypeORM support
|
||||
- [ ] .hbm.xml / schema.xml: Hibernate/Propel mappings (Java/PHP)
|
||||
- [ ] Django models.py (Python classes), Sequelize migrations (JS)
|
||||
- [ ] .avsc: Avro schema (JSON format for data serialization)
|
||||
- [✔️] Foreign key detection
|
||||
- [✔️] Index extraction
|
||||
- [*] .sql file generation with sequence and priority
|
||||
- [✔️] .dbml: Database Markup Language (DBML) for textual schema representation.
|
||||
- [✔️] Prisma schema support (PSL format) .prisma
|
||||
- [✔️] Drizzle ORM support .ts (TypeScript / JavaScript) (Mr. Edd wanted to move from Prisma to Drizzle. If you are bugs, you are welcome to do pull requests or issues)
|
||||
- [☠️] Entity Framework (.NET) model .edmx (Fuck no, EDMX files were bloated, verbose XML nightmares—hard to merge, error-prone, and a pain in teams. Microsoft wisely ditched them in EF Core for code-first. Classic overkill from old MS era.)
|
||||
- [✔️] TypeORM support
|
||||
- [] .hbm.xml / schema.xml: Hibernate/Propel mappings (Java/PHP) (💲 Someone can do this, not me)
|
||||
- [ ] Django models.py (Python classes), Sequelize migrations (JS) (💲 Someone can do this, not me)
|
||||
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
|
||||
- [✔️] GraphQL schema generation
|
||||
|
||||
|
||||
## UI
|
||||
- [✔️] Basic UI (I went with tview)
|
||||
- [✔️] Save / Load Database
|
||||
- [✔️] Schemas / Domains / Tables
|
||||
- [ ] Add Relations
|
||||
- [ ] Add Indexes
|
||||
- [ ] Add Views
|
||||
- [ ] Add Sequences
|
||||
- [ ] Add Scripts
|
||||
- [ ] Domain / Table Assignment
|
||||
|
||||
## Documentation
|
||||
- [ ] API documentation (godoc)
|
||||
@@ -36,7 +49,7 @@
|
||||
- [ ] Web UI for visual editing
|
||||
- [ ] REST API server mode
|
||||
- [ ] Support for NoSQL databases
|
||||
- [ ] GraphQL schema generation
|
||||
|
||||
|
||||
## Performance
|
||||
- [ ] Concurrent processing for multiple tables
|
||||
|
||||
BIN
assets/image/screenshots/edit_column.jpg
Normal file
BIN
assets/image/screenshots/edit_column.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
BIN
assets/image/screenshots/main_screen.jpg
Normal file
BIN
assets/image/screenshots/main_screen.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 50 KiB |
BIN
assets/image/screenshots/table_view.jpg
Normal file
BIN
assets/image/screenshots/table_view.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 67 KiB |
@@ -6,26 +6,35 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -51,20 +60,28 @@ Input formats:
|
||||
- dbml: DBML schema files
|
||||
- dctx: DCTX schema files
|
||||
- drawdb: DrawDB JSON files
|
||||
- graphql: GraphQL schema files (.graphql, SDL)
|
||||
- json: JSON database schema
|
||||
- yaml: YAML database schema
|
||||
- gorm: GORM model files (Go, file or directory)
|
||||
- bun: Bun model files (Go, file or directory)
|
||||
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||
- prisma: Prisma schema files (.prisma)
|
||||
- typeorm: TypeORM entity files (TypeScript)
|
||||
- pgsql: PostgreSQL database (live connection)
|
||||
|
||||
Output formats:
|
||||
- dbml: DBML schema files
|
||||
- dctx: DCTX schema files
|
||||
- drawdb: DrawDB JSON files
|
||||
- graphql: GraphQL schema files (.graphql, SDL)
|
||||
- json: JSON database schema
|
||||
- yaml: YAML database schema
|
||||
- gorm: GORM model files (Go)
|
||||
- bun: Bun model files (Go)
|
||||
- drizzle: Drizzle ORM schema files (TypeScript)
|
||||
- prisma: Prisma schema files (.prisma)
|
||||
- typeorm: TypeORM entity files (TypeScript)
|
||||
- pgsql: PostgreSQL SQL schema
|
||||
|
||||
PostgreSQL Connection String Examples:
|
||||
@@ -123,18 +140,27 @@ Examples:
|
||||
}
|
||||
|
||||
func init() {
|
||||
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
|
||||
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||
|
||||
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
|
||||
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
||||
|
||||
convertCmd.MarkFlagRequired("from")
|
||||
convertCmd.MarkFlagRequired("to")
|
||||
convertCmd.MarkFlagRequired("to-path")
|
||||
err := convertCmd.MarkFlagRequired("from")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||
}
|
||||
err = convertCmd.MarkFlagRequired("to")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||
}
|
||||
err = convertCmd.MarkFlagRequired("to-path")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking to-path flag as required: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func runConvert(cmd *cobra.Command, args []string) error {
|
||||
@@ -239,6 +265,30 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
||||
}
|
||||
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "drizzle":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Drizzle format")
|
||||
}
|
||||
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "prisma":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Prisma format")
|
||||
}
|
||||
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "typeorm":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for TypeORM format")
|
||||
}
|
||||
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "graphql", "gql":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||
}
|
||||
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
||||
}
|
||||
@@ -287,9 +337,21 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
||||
}
|
||||
writer = wbun.NewWriter(writerOpts)
|
||||
|
||||
case "drizzle":
|
||||
writer = wdrizzle.NewWriter(writerOpts)
|
||||
|
||||
case "pgsql", "postgres", "postgresql", "sql":
|
||||
writer = wpgsql.NewWriter(writerOpts)
|
||||
|
||||
case "prisma":
|
||||
writer = wprisma.NewWriter(writerOpts)
|
||||
|
||||
case "typeorm":
|
||||
writer = wtypeorm.NewWriter(writerOpts)
|
||||
|
||||
case "graphql", "gql":
|
||||
writer = wgraphql.NewWriter(writerOpts)
|
||||
|
||||
default:
|
||||
return fmt.Errorf("unsupported target format: %s", dbType)
|
||||
}
|
||||
@@ -318,7 +380,7 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
||||
}
|
||||
|
||||
// For formats like DCTX that don't support full database writes, require schema filter
|
||||
if strings.ToLower(dbType) == "dctx" {
|
||||
if strings.EqualFold(dbType, "dctx") {
|
||||
if len(db.Schemas) == 0 {
|
||||
return fmt.Errorf("no schemas found in database")
|
||||
}
|
||||
|
||||
@@ -6,6 +6,8 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/diff"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
@@ -15,7 +17,6 @@ import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -96,8 +97,14 @@ func init() {
|
||||
diffCmd.Flags().StringVar(&outputFormat, "format", "summary", "Output format (summary, json, html)")
|
||||
diffCmd.Flags().StringVar(&outputPath, "output", "", "Output file path (default: stdout for summary, required for json/html)")
|
||||
|
||||
diffCmd.MarkFlagRequired("from")
|
||||
diffCmd.MarkFlagRequired("to")
|
||||
err := diffCmd.MarkFlagRequired("from")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||
}
|
||||
err = diffCmd.MarkFlagRequired("to")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func runDiff(cmd *cobra.Command, args []string) error {
|
||||
|
||||
334
cmd/relspec/edit.go
Normal file
334
cmd/relspec/edit.go
Normal file
@@ -0,0 +1,334 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/ui"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||
)
|
||||
|
||||
var (
|
||||
editSourceType string
|
||||
editSourcePath string
|
||||
editSourceConn string
|
||||
editTargetType string
|
||||
editTargetPath string
|
||||
editSchemaFilter string
|
||||
)
|
||||
|
||||
var editCmd = &cobra.Command{
|
||||
Use: "edit",
|
||||
Short: "Edit database schema interactively with TUI",
|
||||
Long: `Edit database schemas from various formats using an interactive terminal UI.
|
||||
|
||||
Allows you to:
|
||||
- List and navigate schemas and tables
|
||||
- Create, edit, and delete schemas
|
||||
- Create, edit, and delete tables
|
||||
- Add, edit, and delete columns
|
||||
- Set table and column properties
|
||||
- Add constraints, indexes, and relationships
|
||||
|
||||
Supports reading from and writing to all supported formats:
|
||||
Input formats:
|
||||
- dbml: DBML schema files
|
||||
- dctx: DCTX schema files
|
||||
- drawdb: DrawDB JSON files
|
||||
- graphql: GraphQL schema files (.graphql, SDL)
|
||||
- json: JSON database schema
|
||||
- yaml: YAML database schema
|
||||
- gorm: GORM model files (Go, file or directory)
|
||||
- bun: Bun model files (Go, file or directory)
|
||||
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||
- prisma: Prisma schema files (.prisma)
|
||||
- typeorm: TypeORM entity files (TypeScript)
|
||||
- pgsql: PostgreSQL database (live connection)
|
||||
|
||||
Output formats:
|
||||
- dbml: DBML schema files
|
||||
- dctx: DCTX schema files
|
||||
- drawdb: DrawDB JSON files
|
||||
- graphql: GraphQL schema files (.graphql, SDL)
|
||||
- json: JSON database schema
|
||||
- yaml: YAML database schema
|
||||
- gorm: GORM model files (Go)
|
||||
- bun: Bun model files (Go)
|
||||
- drizzle: Drizzle ORM schema files (TypeScript)
|
||||
- prisma: Prisma schema files (.prisma)
|
||||
- typeorm: TypeORM entity files (TypeScript)
|
||||
- pgsql: PostgreSQL SQL schema
|
||||
|
||||
PostgreSQL Connection String Examples:
|
||||
postgres://username:password@localhost:5432/database_name
|
||||
postgres://username:password@localhost/database_name
|
||||
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||
postgresql://user:pass@host/dbname?sslmode=require
|
||||
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||
|
||||
Examples:
|
||||
# Edit a DBML schema file
|
||||
relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
|
||||
|
||||
# Edit a PostgreSQL database
|
||||
relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||
--to pgsql --to-conn "postgres://user:pass@localhost/mydb"
|
||||
|
||||
# Edit JSON schema and output to GORM
|
||||
relspec edit --from json --from-path db.json --to gorm --to-path models/
|
||||
|
||||
# Edit GORM models in place
|
||||
relspec edit --from gorm --from-path ./models --to gorm --to-path ./models`,
|
||||
RunE: runEdit,
|
||||
}
|
||||
|
||||
func init() {
|
||||
editCmd.Flags().StringVar(&editSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||
editCmd.Flags().StringVar(&editSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||
editCmd.Flags().StringVar(&editSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||
editCmd.Flags().StringVar(&editTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||
editCmd.Flags().StringVar(&editTargetPath, "to-path", "", "Target file path (for file-based formats)")
|
||||
editCmd.Flags().StringVar(&editSchemaFilter, "schema", "", "Filter to a specific schema by name")
|
||||
|
||||
// Flags are now optional - if not provided, UI will prompt for load/save options
|
||||
}
|
||||
|
||||
func runEdit(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Editor ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||
|
||||
var db *models.Database
|
||||
var loadConfig *ui.LoadConfig
|
||||
var saveConfig *ui.SaveConfig
|
||||
var err error
|
||||
|
||||
// Check if source parameters are provided
|
||||
if editSourceType != "" {
|
||||
// Read source database
|
||||
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", editSourceType)
|
||||
if editSourcePath != "" {
|
||||
fmt.Fprintf(os.Stderr, " Path: %s\n", editSourcePath)
|
||||
}
|
||||
if editSourceConn != "" {
|
||||
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(editSourceConn))
|
||||
}
|
||||
|
||||
db, err = readDatabaseForEdit(editSourceType, editSourcePath, editSourceConn, "Source")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read source: %w", err)
|
||||
}
|
||||
|
||||
// Apply schema filter if specified
|
||||
if editSchemaFilter != "" {
|
||||
db = filterDatabaseBySchema(db, editSchemaFilter)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||
|
||||
totalTables := 0
|
||||
for _, schema := range db.Schemas {
|
||||
totalTables += len(schema.Tables)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||
|
||||
// Store load config
|
||||
loadConfig = &ui.LoadConfig{
|
||||
SourceType: editSourceType,
|
||||
FilePath: editSourcePath,
|
||||
ConnString: editSourceConn,
|
||||
}
|
||||
} else {
|
||||
// No source parameters provided, UI will show load screen
|
||||
fmt.Fprintf(os.Stderr, "[1/2] No source specified, editor will prompt for database\n\n")
|
||||
}
|
||||
|
||||
// Store save config if target parameters are provided
|
||||
if editTargetType != "" {
|
||||
saveConfig = &ui.SaveConfig{
|
||||
TargetType: editTargetType,
|
||||
FilePath: editTargetPath,
|
||||
}
|
||||
}
|
||||
|
||||
// Launch interactive TUI
|
||||
if editSourceType != "" {
|
||||
fmt.Fprintf(os.Stderr, "[2/3] Launching interactive editor...\n")
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "[2/2] Launching interactive editor...\n")
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " Use arrow keys and shortcuts to navigate\n")
|
||||
fmt.Fprintf(os.Stderr, " Press ? for help\n\n")
|
||||
|
||||
editor := ui.NewSchemaEditorWithConfigs(db, loadConfig, saveConfig)
|
||||
if err := editor.Run(); err != nil {
|
||||
return fmt.Errorf("editor failed: %w", err)
|
||||
}
|
||||
|
||||
// Only write to output if target parameters were provided and database was loaded from command line
|
||||
if editTargetType != "" && editSourceType != "" && db != nil {
|
||||
fmt.Fprintf(os.Stderr, "[3/3] Writing changes to output...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", editTargetType)
|
||||
if editTargetPath != "" {
|
||||
fmt.Fprintf(os.Stderr, " Path: %s\n", editTargetPath)
|
||||
}
|
||||
|
||||
// Get the potentially modified database from the editor
|
||||
err = writeDatabaseForEdit(editTargetType, editTargetPath, "", editor.GetDatabase(), "Target")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write output: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully written database\n")
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\n=== Edit complete ===\n")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readDatabaseForEdit(dbType, filePath, connString, label string) (*models.Database, error) {
|
||||
var reader readers.Reader
|
||||
|
||||
switch strings.ToLower(dbType) {
|
||||
case "dbml":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
|
||||
}
|
||||
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "dctx":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
|
||||
}
|
||||
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drawdb":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
|
||||
}
|
||||
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "graphql":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
|
||||
}
|
||||
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "json":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
|
||||
}
|
||||
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "yaml":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
|
||||
}
|
||||
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "gorm":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
|
||||
}
|
||||
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "bun":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
|
||||
}
|
||||
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drizzle":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
|
||||
}
|
||||
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "prisma":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
|
||||
}
|
||||
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "typeorm":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
|
||||
}
|
||||
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "pgsql":
|
||||
if connString == "" {
|
||||
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
|
||||
}
|
||||
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||
default:
|
||||
return nil, fmt.Errorf("%s: unsupported format: %s", label, dbType)
|
||||
}
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%s: %w", label, err)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func writeDatabaseForEdit(dbType, filePath, connString string, db *models.Database, label string) error {
|
||||
var writer writers.Writer
|
||||
|
||||
switch strings.ToLower(dbType) {
|
||||
case "dbml":
|
||||
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "dctx":
|
||||
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "drawdb":
|
||||
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "graphql":
|
||||
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "json":
|
||||
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "yaml":
|
||||
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "gorm":
|
||||
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "bun":
|
||||
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "drizzle":
|
||||
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "prisma":
|
||||
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "typeorm":
|
||||
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "pgsql":
|
||||
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
default:
|
||||
return fmt.Errorf("%s: unsupported format: %s", label, dbType)
|
||||
}
|
||||
|
||||
err := writer.WriteDatabase(db)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %w", label, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
321
cmd/relspec/inspect.go
Normal file
321
cmd/relspec/inspect.go
Normal file
@@ -0,0 +1,321 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
)
|
||||
|
||||
var (
|
||||
inspectSourceType string
|
||||
inspectSourcePath string
|
||||
inspectSourceConn string
|
||||
inspectRulesPath string
|
||||
inspectOutputFormat string
|
||||
inspectOutputPath string
|
||||
inspectSchemaFilter string
|
||||
)
|
||||
|
||||
var inspectCmd = &cobra.Command{
|
||||
Use: "inspect",
|
||||
Short: "Inspect and validate database schemas against rules",
|
||||
Long: `Inspect database schemas from various formats and validate against configurable rules.
|
||||
|
||||
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
|
||||
JSON, YAML, etc.) and generates validation reports.
|
||||
|
||||
Input formats:
|
||||
- dbml: DBML schema files
|
||||
- dctx: DCTX schema files
|
||||
- drawdb: DrawDB JSON files
|
||||
- graphql: GraphQL schema files (.graphql, SDL)
|
||||
- json: JSON database schema
|
||||
- yaml: YAML database schema
|
||||
- gorm: GORM model files (Go, file or directory)
|
||||
- bun: Bun model files (Go, file or directory)
|
||||
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||
- prisma: Prisma schema files (.prisma)
|
||||
- typeorm: TypeORM entity files (TypeScript)
|
||||
- pgsql: PostgreSQL database (live connection)
|
||||
|
||||
Output formats:
|
||||
- markdown: Human-readable markdown report (default, with ANSI colors for terminal)
|
||||
- json: JSON report for tooling integration
|
||||
|
||||
PostgreSQL Connection String Examples:
|
||||
postgres://username:password@localhost:5432/database_name
|
||||
postgres://username:password@localhost/database_name
|
||||
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||
postgresql://user:pass@host/dbname?sslmode=require
|
||||
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||
|
||||
Examples:
|
||||
# Inspect a PostgreSQL database with default rules
|
||||
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||
|
||||
# Inspect a DBML file with custom rules
|
||||
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||
|
||||
# Inspect and output JSON report to file
|
||||
relspec inspect --from json --from-path db.json \
|
||||
--output-format json --output report.json
|
||||
|
||||
# Inspect specific schema only
|
||||
relspec inspect --from pgsql --from-conn "..." --schema public`,
|
||||
RunE: runInspect,
|
||||
}
|
||||
|
||||
func init() {
|
||||
inspectCmd.Flags().StringVar(&inspectSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||
inspectCmd.Flags().StringVar(&inspectSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||
inspectCmd.Flags().StringVar(&inspectSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||
inspectCmd.Flags().StringVar(&inspectRulesPath, "rules", ".relspec-rules.yaml", "Path to rules configuration file (uses defaults if not found)")
|
||||
inspectCmd.Flags().StringVar(&inspectOutputFormat, "output-format", "markdown", "Output format (markdown, json)")
|
||||
inspectCmd.Flags().StringVar(&inspectOutputPath, "output", "", "Output file path (default: stdout)")
|
||||
inspectCmd.Flags().StringVar(&inspectSchemaFilter, "schema", "", "Filter to a specific schema by name")
|
||||
|
||||
err := inspectCmd.MarkFlagRequired("from")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func runInspect(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Inspector ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||
|
||||
// Read source database
|
||||
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", inspectSourceType)
|
||||
if inspectSourcePath != "" {
|
||||
fmt.Fprintf(os.Stderr, " Path: %s\n", inspectSourcePath)
|
||||
}
|
||||
if inspectSourceConn != "" {
|
||||
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(inspectSourceConn))
|
||||
}
|
||||
|
||||
db, err := readDatabaseForInspect(inspectSourceType, inspectSourcePath, inspectSourceConn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read source: %w", err)
|
||||
}
|
||||
|
||||
// Apply schema filter if specified
|
||||
if inspectSchemaFilter != "" {
|
||||
db = filterDatabaseBySchema(db, inspectSchemaFilter)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||
|
||||
totalTables := 0
|
||||
for _, schema := range db.Schemas {
|
||||
totalTables += len(schema.Tables)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||
|
||||
// Load rules configuration
|
||||
fmt.Fprintf(os.Stderr, "[2/3] Loading validation rules...\n")
|
||||
fmt.Fprintf(os.Stderr, " Rules: %s\n", inspectRulesPath)
|
||||
|
||||
config, err := inspector.LoadConfig(inspectRulesPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load rules config: %w", err)
|
||||
}
|
||||
|
||||
enabledCount := 0
|
||||
for _, rule := range config.Rules {
|
||||
if rule.IsEnabled() {
|
||||
enabledCount++
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " ✓ Loaded %d rule(s) (%d enabled)\n\n", len(config.Rules), enabledCount)
|
||||
|
||||
// Run inspection
|
||||
fmt.Fprintf(os.Stderr, "[3/3] Running validation...\n")
|
||||
|
||||
insp := inspector.NewInspector(db, config)
|
||||
report, err := insp.Inspect()
|
||||
if err != nil {
|
||||
return fmt.Errorf("inspection failed: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Inspection complete\n")
|
||||
fmt.Fprintf(os.Stderr, " Errors: %d\n", report.Summary.ErrorCount)
|
||||
fmt.Fprintf(os.Stderr, " Warnings: %d\n\n", report.Summary.WarningCount)
|
||||
|
||||
// Format and output report
|
||||
var formattedReport string
|
||||
switch strings.ToLower(inspectOutputFormat) {
|
||||
case "json":
|
||||
formatter := inspector.NewJSONFormatter()
|
||||
formattedReport, err = formatter.Format(report)
|
||||
case "markdown", "md":
|
||||
// Determine output writer for terminal detection
|
||||
var output *os.File
|
||||
if inspectOutputPath != "" {
|
||||
output, err = os.Create(inspectOutputPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create output file: %w", err)
|
||||
}
|
||||
defer output.Close()
|
||||
} else {
|
||||
output = os.Stdout
|
||||
}
|
||||
|
||||
formatter := inspector.NewMarkdownFormatter(output)
|
||||
formattedReport, err = formatter.Format(report)
|
||||
default:
|
||||
return fmt.Errorf("unsupported output format: %s", inspectOutputFormat)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to format report: %w", err)
|
||||
}
|
||||
|
||||
// Write output
|
||||
if inspectOutputPath != "" {
|
||||
err = os.WriteFile(inspectOutputPath, []byte(formattedReport), 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write output file: %w", err)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Report written to: %s\n", inspectOutputPath)
|
||||
} else {
|
||||
fmt.Println(formattedReport)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\n=== Inspection Complete ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
|
||||
|
||||
// Exit with appropriate code
|
||||
if report.HasErrors() {
|
||||
return fmt.Errorf("inspection found %d error(s)", report.Summary.ErrorCount)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readDatabaseForInspect(dbType, filePath, connString string) (*models.Database, error) {
|
||||
var reader readers.Reader
|
||||
|
||||
switch strings.ToLower(dbType) {
|
||||
case "dbml":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for DBML format")
|
||||
}
|
||||
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "dctx":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for DCTX format")
|
||||
}
|
||||
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "drawdb":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for DrawDB format")
|
||||
}
|
||||
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "graphql":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||
}
|
||||
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "json":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for JSON format")
|
||||
}
|
||||
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "yaml", "yml":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for YAML format")
|
||||
}
|
||||
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "gorm":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for GORM format")
|
||||
}
|
||||
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "bun":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Bun format")
|
||||
}
|
||||
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "drizzle":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Drizzle format")
|
||||
}
|
||||
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "prisma":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Prisma format")
|
||||
}
|
||||
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "typeorm":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for TypeORM format")
|
||||
}
|
||||
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
|
||||
case "pgsql", "postgres", "postgresql":
|
||||
if connString == "" {
|
||||
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
|
||||
}
|
||||
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||
}
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func filterDatabaseBySchema(db *models.Database, schemaName string) *models.Database {
|
||||
filtered := &models.Database{
|
||||
Name: db.Name,
|
||||
Description: db.Description,
|
||||
DatabaseType: db.DatabaseType,
|
||||
DatabaseVersion: db.DatabaseVersion,
|
||||
SourceFormat: db.SourceFormat,
|
||||
Schemas: []*models.Schema{},
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
if schema.Name == schemaName {
|
||||
filtered.Schemas = append(filtered.Schemas, schema)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
433
cmd/relspec/merge.go
Normal file
433
cmd/relspec/merge.go
Normal file
@@ -0,0 +1,433 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/merge"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||
)
|
||||
|
||||
var (
|
||||
mergeTargetType string
|
||||
mergeTargetPath string
|
||||
mergeTargetConn string
|
||||
mergeSourceType string
|
||||
mergeSourcePath string
|
||||
mergeSourceConn string
|
||||
mergeOutputType string
|
||||
mergeOutputPath string
|
||||
mergeOutputConn string
|
||||
mergeSkipDomains bool
|
||||
mergeSkipRelations bool
|
||||
mergeSkipEnums bool
|
||||
mergeSkipViews bool
|
||||
mergeSkipSequences bool
|
||||
mergeSkipTables string // Comma-separated table names to skip
|
||||
mergeVerbose bool
|
||||
)
|
||||
|
||||
var mergeCmd = &cobra.Command{
|
||||
Use: "merge",
|
||||
Short: "Merge database schemas (additive only - adds missing items)",
|
||||
Long: `Merge one database schema into another. Performs additive merging only:
|
||||
adds missing schemas, tables, columns, and other objects without modifying
|
||||
or deleting existing items.
|
||||
|
||||
The target database is loaded first, then the source database is merged into it.
|
||||
The result can be saved to a new format or updated in place.
|
||||
|
||||
Examples:
|
||||
# Merge two JSON schemas
|
||||
relspec merge --target json --target-path base.json \
|
||||
--source json --source-path additional.json \
|
||||
--output json --output-path merged.json
|
||||
|
||||
# Merge from PostgreSQL into JSON
|
||||
relspec merge --target json --target-path mydb.json \
|
||||
--source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
|
||||
--output json --output-path combined.json
|
||||
|
||||
# Merge DBML and YAML, skip relations
|
||||
relspec merge --target dbml --target-path schema.dbml \
|
||||
--source yaml --source-path tables.yaml \
|
||||
--output dbml --output-path merged.dbml \
|
||||
--skip-relations
|
||||
|
||||
# Merge and save back to target format
|
||||
relspec merge --target json --target-path base.json \
|
||||
--source json --source-path patch.json \
|
||||
--output json --output-path base.json`,
|
||||
RunE: runMerge,
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Target database flags
|
||||
mergeCmd.Flags().StringVar(&mergeTargetType, "target", "", "Target format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
||||
mergeCmd.Flags().StringVar(&mergeTargetPath, "target-path", "", "Target file path (required for file-based formats)")
|
||||
mergeCmd.Flags().StringVar(&mergeTargetConn, "target-conn", "", "Target connection string (required for pgsql)")
|
||||
|
||||
// Source database flags
|
||||
mergeCmd.Flags().StringVar(&mergeSourceType, "source", "", "Source format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
||||
mergeCmd.Flags().StringVar(&mergeSourcePath, "source-path", "", "Source file path (required for file-based formats)")
|
||||
mergeCmd.Flags().StringVar(&mergeSourceConn, "source-conn", "", "Source connection string (required for pgsql)")
|
||||
|
||||
// Output flags
|
||||
mergeCmd.Flags().StringVar(&mergeOutputType, "output", "", "Output format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
|
||||
mergeCmd.Flags().StringVar(&mergeOutputPath, "output-path", "", "Output file path (required for file-based formats)")
|
||||
mergeCmd.Flags().StringVar(&mergeOutputConn, "output-conn", "", "Output connection string (for pgsql)")
|
||||
|
||||
// Merge options
|
||||
mergeCmd.Flags().BoolVar(&mergeSkipDomains, "skip-domains", false, "Skip domains during merge")
|
||||
mergeCmd.Flags().BoolVar(&mergeSkipRelations, "skip-relations", false, "Skip relations during merge")
|
||||
mergeCmd.Flags().BoolVar(&mergeSkipEnums, "skip-enums", false, "Skip enums during merge")
|
||||
mergeCmd.Flags().BoolVar(&mergeSkipViews, "skip-views", false, "Skip views during merge")
|
||||
mergeCmd.Flags().BoolVar(&mergeSkipSequences, "skip-sequences", false, "Skip sequences during merge")
|
||||
mergeCmd.Flags().StringVar(&mergeSkipTables, "skip-tables", "", "Comma-separated list of table names to skip during merge")
|
||||
mergeCmd.Flags().BoolVar(&mergeVerbose, "verbose", false, "Show verbose output")
|
||||
}
|
||||
|
||||
func runMerge(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== RelSpec Merge ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||
|
||||
// Validate required flags
|
||||
if mergeTargetType == "" {
|
||||
return fmt.Errorf("--target format is required")
|
||||
}
|
||||
if mergeSourceType == "" {
|
||||
return fmt.Errorf("--source format is required")
|
||||
}
|
||||
if mergeOutputType == "" {
|
||||
return fmt.Errorf("--output format is required")
|
||||
}
|
||||
|
||||
// Validate and expand file paths
|
||||
if mergeTargetType != "pgsql" {
|
||||
if mergeTargetPath == "" {
|
||||
return fmt.Errorf("--target-path is required for %s format", mergeTargetType)
|
||||
}
|
||||
mergeTargetPath = expandPath(mergeTargetPath)
|
||||
} else if mergeTargetConn == "" {
|
||||
|
||||
return fmt.Errorf("--target-conn is required for pgsql format")
|
||||
|
||||
}
|
||||
|
||||
if mergeSourceType != "pgsql" {
|
||||
if mergeSourcePath == "" {
|
||||
return fmt.Errorf("--source-path is required for %s format", mergeSourceType)
|
||||
}
|
||||
mergeSourcePath = expandPath(mergeSourcePath)
|
||||
} else if mergeSourceConn == "" {
|
||||
return fmt.Errorf("--source-conn is required for pgsql format")
|
||||
}
|
||||
|
||||
if mergeOutputType != "pgsql" {
|
||||
if mergeOutputPath == "" {
|
||||
return fmt.Errorf("--output-path is required for %s format", mergeOutputType)
|
||||
}
|
||||
mergeOutputPath = expandPath(mergeOutputPath)
|
||||
}
|
||||
|
||||
// Step 1: Read target database
|
||||
fmt.Fprintf(os.Stderr, "[1/3] Reading target database...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeTargetType)
|
||||
if mergeTargetPath != "" {
|
||||
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeTargetPath)
|
||||
}
|
||||
if mergeTargetConn != "" {
|
||||
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeTargetConn))
|
||||
}
|
||||
|
||||
targetDB, err := readDatabaseForMerge(mergeTargetType, mergeTargetPath, mergeTargetConn, "Target")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read target database: %w", err)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully read target database '%s'\n", targetDB.Name)
|
||||
printDatabaseStats(targetDB)
|
||||
|
||||
// Step 2: Read source database
|
||||
fmt.Fprintf(os.Stderr, "\n[2/3] Reading source database...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeSourceType)
|
||||
if mergeSourcePath != "" {
|
||||
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeSourcePath)
|
||||
}
|
||||
if mergeSourceConn != "" {
|
||||
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeSourceConn))
|
||||
}
|
||||
|
||||
sourceDB, err := readDatabaseForMerge(mergeSourceType, mergeSourcePath, mergeSourceConn, "Source")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read source database: %w", err)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully read source database '%s'\n", sourceDB.Name)
|
||||
printDatabaseStats(sourceDB)
|
||||
|
||||
// Step 3: Merge databases
|
||||
fmt.Fprintf(os.Stderr, "\n[3/3] Merging databases...\n")
|
||||
|
||||
opts := &merge.MergeOptions{
|
||||
SkipDomains: mergeSkipDomains,
|
||||
SkipRelations: mergeSkipRelations,
|
||||
SkipEnums: mergeSkipEnums,
|
||||
SkipViews: mergeSkipViews,
|
||||
SkipSequences: mergeSkipSequences,
|
||||
}
|
||||
|
||||
// Parse skip-tables flag
|
||||
if mergeSkipTables != "" {
|
||||
opts.SkipTableNames = parseSkipTables(mergeSkipTables)
|
||||
if len(opts.SkipTableNames) > 0 {
|
||||
fmt.Fprintf(os.Stderr, " Skipping tables: %s\n", mergeSkipTables)
|
||||
}
|
||||
}
|
||||
|
||||
result := merge.MergeDatabases(targetDB, sourceDB, opts)
|
||||
|
||||
// Update timestamp
|
||||
targetDB.UpdateDate()
|
||||
|
||||
// Print merge summary
|
||||
fmt.Fprintf(os.Stderr, " ✓ Merge complete\n\n")
|
||||
fmt.Fprintf(os.Stderr, "%s\n", merge.GetMergeSummary(result))
|
||||
|
||||
// Step 4: Write output
|
||||
fmt.Fprintf(os.Stderr, "\n[4/4] Writing output...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeOutputType)
|
||||
if mergeOutputPath != "" {
|
||||
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeOutputPath)
|
||||
}
|
||||
|
||||
err = writeDatabaseForMerge(mergeOutputType, mergeOutputPath, "", targetDB, "Output")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write output: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully written merged database\n")
|
||||
fmt.Fprintf(os.Stderr, "\n=== Merge complete ===\n")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readDatabaseForMerge(dbType, filePath, connString, label string) (*models.Database, error) {
|
||||
var reader readers.Reader
|
||||
|
||||
switch strings.ToLower(dbType) {
|
||||
case "dbml":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
|
||||
}
|
||||
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "dctx":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
|
||||
}
|
||||
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drawdb":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
|
||||
}
|
||||
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "graphql":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
|
||||
}
|
||||
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "json":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
|
||||
}
|
||||
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "yaml":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
|
||||
}
|
||||
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "gorm":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
|
||||
}
|
||||
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "bun":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
|
||||
}
|
||||
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drizzle":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
|
||||
}
|
||||
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "prisma":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
|
||||
}
|
||||
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "typeorm":
|
||||
if filePath == "" {
|
||||
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
|
||||
}
|
||||
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "pgsql":
|
||||
if connString == "" {
|
||||
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
|
||||
}
|
||||
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||
default:
|
||||
return nil, fmt.Errorf("%s: unsupported format '%s'", label, dbType)
|
||||
}
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Database, label string) error {
|
||||
var writer writers.Writer
|
||||
|
||||
switch strings.ToLower(dbType) {
|
||||
case "dbml":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for DBML format", label)
|
||||
}
|
||||
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "dctx":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for DCTX format", label)
|
||||
}
|
||||
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "drawdb":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for DrawDB format", label)
|
||||
}
|
||||
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "graphql":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for GraphQL format", label)
|
||||
}
|
||||
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "json":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for JSON format", label)
|
||||
}
|
||||
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "yaml":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for YAML format", label)
|
||||
}
|
||||
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "gorm":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for GORM format", label)
|
||||
}
|
||||
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "bun":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for Bun format", label)
|
||||
}
|
||||
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "drizzle":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for Drizzle format", label)
|
||||
}
|
||||
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "prisma":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for Prisma format", label)
|
||||
}
|
||||
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "typeorm":
|
||||
if filePath == "" {
|
||||
return fmt.Errorf("%s: file path is required for TypeORM format", label)
|
||||
}
|
||||
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "pgsql":
|
||||
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
default:
|
||||
return fmt.Errorf("%s: unsupported format '%s'", label, dbType)
|
||||
}
|
||||
|
||||
return writer.WriteDatabase(db)
|
||||
}
|
||||
|
||||
func expandPath(path string) string {
|
||||
if len(path) > 0 && path[0] == '~' {
|
||||
home, err := os.UserHomeDir()
|
||||
if err == nil {
|
||||
return filepath.Join(home, path[1:])
|
||||
}
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func printDatabaseStats(db *models.Database) {
|
||||
totalTables := 0
|
||||
totalColumns := 0
|
||||
totalConstraints := 0
|
||||
totalIndexes := 0
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
totalTables += len(schema.Tables)
|
||||
for _, table := range schema.Tables {
|
||||
totalColumns += len(table.Columns)
|
||||
totalConstraints += len(table.Constraints)
|
||||
totalIndexes += len(table.Indexes)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Schemas: %d, Tables: %d, Columns: %d, Constraints: %d, Indexes: %d\n",
|
||||
len(db.Schemas), totalTables, totalColumns, totalConstraints, totalIndexes)
|
||||
}
|
||||
|
||||
func parseSkipTables(skipTablesStr string) map[string]bool {
|
||||
skipTables := make(map[string]bool)
|
||||
if skipTablesStr == "" {
|
||||
return skipTables
|
||||
}
|
||||
|
||||
// Split by comma and trim whitespace
|
||||
parts := strings.Split(skipTablesStr, ",")
|
||||
for _, part := range parts {
|
||||
trimmed := strings.TrimSpace(part)
|
||||
if trimmed != "" {
|
||||
// Store in lowercase for case-insensitive matching
|
||||
skipTables[strings.ToLower(trimmed)] = true
|
||||
}
|
||||
}
|
||||
|
||||
return skipTables
|
||||
}
|
||||
@@ -18,4 +18,10 @@ JSON, YAML, SQL, etc.).`,
|
||||
func init() {
|
||||
rootCmd.AddCommand(convertCmd)
|
||||
rootCmd.AddCommand(diffCmd)
|
||||
rootCmd.AddCommand(inspectCmd)
|
||||
rootCmd.AddCommand(scriptsCmd)
|
||||
rootCmd.AddCommand(templCmd)
|
||||
rootCmd.AddCommand(editCmd)
|
||||
rootCmd.AddCommand(mergeCmd)
|
||||
rootCmd.AddCommand(splitCmd)
|
||||
}
|
||||
|
||||
263
cmd/relspec/scripts.go
Normal file
263
cmd/relspec/scripts.go
Normal file
@@ -0,0 +1,263 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"sort"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
var (
|
||||
scriptsDir string
|
||||
scriptsConn string
|
||||
scriptsSchemaName string
|
||||
scriptsDBName string
|
||||
)
|
||||
|
||||
var scriptsCmd = &cobra.Command{
|
||||
Use: "scripts",
|
||||
Short: "Manage and execute SQL migration scripts",
|
||||
Long: `Manage and execute SQL migration scripts from a directory.
|
||||
|
||||
Scripts must follow the naming pattern (both separators supported):
|
||||
{priority}_{sequence}_{name}.sql or .pgsql
|
||||
{priority}-{sequence}-{name}.sql or .pgsql
|
||||
|
||||
Example filenames (underscore format):
|
||||
1_001_create_users.sql # Priority 1, Sequence 1
|
||||
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||
2_001_add_indexes.pgsql # Priority 2, Sequence 1
|
||||
|
||||
Example filenames (hyphen format):
|
||||
1-001-create-users.sql # Priority 1, Sequence 1
|
||||
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||
|
||||
Both formats can be mixed in the same directory.
|
||||
Scripts are executed in order: Priority (ascending), then Sequence (ascending).`,
|
||||
}
|
||||
|
||||
var scriptsListCmd = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List SQL scripts from a directory",
|
||||
Long: `List SQL scripts from a directory and show their execution order.
|
||||
|
||||
The scripts are read from the specified directory and displayed in the order
|
||||
they would be executed (Priority ascending, then Sequence ascending).
|
||||
|
||||
Example:
|
||||
relspec scripts list --dir ./migrations`,
|
||||
RunE: runScriptsList,
|
||||
}
|
||||
|
||||
var scriptsExecuteCmd = &cobra.Command{
|
||||
Use: "execute",
|
||||
Short: "Execute SQL scripts against a database",
|
||||
Long: `Execute SQL scripts from a directory against a PostgreSQL database.
|
||||
|
||||
Scripts are executed in order: Priority (ascending), then Sequence (ascending).
|
||||
Execution stops immediately on the first error.
|
||||
|
||||
The directory is scanned recursively for files matching the patterns:
|
||||
{priority}_{sequence}_{name}.sql or .pgsql (underscore format)
|
||||
{priority}-{sequence}-{name}.sql or .pgsql (hyphen format)
|
||||
|
||||
PostgreSQL Connection String Examples:
|
||||
postgres://username:password@localhost:5432/database_name
|
||||
postgres://username:password@localhost/database_name
|
||||
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||
postgresql://user:pass@host/dbname?sslmode=require
|
||||
|
||||
Examples:
|
||||
# Execute migration scripts
|
||||
relspec scripts execute --dir ./migrations \
|
||||
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||
|
||||
# Execute with custom schema name
|
||||
relspec scripts execute --dir ./migrations \
|
||||
--conn "postgres://localhost/mydb" \
|
||||
--schema public
|
||||
|
||||
# Execute with SSL disabled
|
||||
relspec scripts execute --dir ./sql \
|
||||
--conn "postgres://user:pass@localhost/db?sslmode=disable"`,
|
||||
RunE: runScriptsExecute,
|
||||
}
|
||||
|
||||
func init() {
|
||||
// List command flags
|
||||
scriptsListCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||
scriptsListCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||
scriptsListCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||
err := scriptsListCmd.MarkFlagRequired("dir")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||
}
|
||||
|
||||
// Execute command flags
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||
|
||||
err = scriptsExecuteCmd.MarkFlagRequired("dir")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||
}
|
||||
err = scriptsExecuteCmd.MarkFlagRequired("conn")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking conn flag as required: %v\n", err)
|
||||
}
|
||||
|
||||
// Add subcommands to scripts command
|
||||
scriptsCmd.AddCommand(scriptsListCmd)
|
||||
scriptsCmd.AddCommand(scriptsExecuteCmd)
|
||||
}
|
||||
|
||||
func runScriptsList(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts List ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Directory: %s\n\n", scriptsDir)
|
||||
|
||||
// Read scripts from directory
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: scriptsDir,
|
||||
Metadata: map[string]any{
|
||||
"schema_name": scriptsSchemaName,
|
||||
"database_name": scriptsDBName,
|
||||
},
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read scripts: %w", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No schemas found\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No SQL scripts found matching pattern {priority}_{sequence}_{name}.sql\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Sort scripts by Priority then Sequence
|
||||
sortedScripts := make([]*struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
sqlLines int
|
||||
}, len(schema.Scripts))
|
||||
|
||||
for i, script := range schema.Scripts {
|
||||
// Count non-empty lines in SQL
|
||||
sqlLines := 0
|
||||
for _, line := range []byte(script.SQL) {
|
||||
if line == '\n' {
|
||||
sqlLines++
|
||||
}
|
||||
}
|
||||
if len(script.SQL) > 0 {
|
||||
sqlLines++ // Count last line if no trailing newline
|
||||
}
|
||||
|
||||
sortedScripts[i] = &struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
sqlLines int
|
||||
}{
|
||||
name: script.Name,
|
||||
priority: script.Priority,
|
||||
sequence: script.Sequence,
|
||||
sqlLines: sqlLines,
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||
if sortedScripts[i].priority != sortedScripts[j].priority {
|
||||
return sortedScripts[i].priority < sortedScripts[j].priority
|
||||
}
|
||||
return sortedScripts[i].sequence < sortedScripts[j].sequence
|
||||
})
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Found %d script(s) in execution order:\n\n", len(sortedScripts))
|
||||
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "No.", "Priority", "Sequence", "Name", "Lines")
|
||||
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "----", "--------", "--------", "------------------------------", "-----")
|
||||
|
||||
for i, script := range sortedScripts {
|
||||
fmt.Fprintf(os.Stderr, "%-4d %-10d %-8d %-30s %d\n",
|
||||
i+1,
|
||||
script.priority,
|
||||
script.sequence,
|
||||
script.name,
|
||||
script.sqlLines,
|
||||
)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
func runScriptsExecute(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts Execution ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Started at: %s\n", getCurrentTimestamp())
|
||||
fmt.Fprintf(os.Stderr, "Directory: %s\n", scriptsDir)
|
||||
fmt.Fprintf(os.Stderr, "Database: %s\n\n", maskPassword(scriptsConn))
|
||||
|
||||
// Step 1: Read scripts from directory
|
||||
fmt.Fprintf(os.Stderr, "[1/2] Reading SQL scripts...\n")
|
||||
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: scriptsDir,
|
||||
Metadata: map[string]any{
|
||||
"schema_name": scriptsSchemaName,
|
||||
"database_name": scriptsDBName,
|
||||
},
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read scripts: %w", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return fmt.Errorf("no schemas found")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) == 0 {
|
||||
fmt.Fprintf(os.Stderr, " No scripts found. Nothing to execute.\n\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Found %d script(s)\n\n", len(schema.Scripts))
|
||||
|
||||
// Step 2: Execute scripts
|
||||
fmt.Fprintf(os.Stderr, "[2/2] Executing scripts in order (Priority → Sequence)...\n\n")
|
||||
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": scriptsConn,
|
||||
},
|
||||
})
|
||||
|
||||
if err := writer.WriteSchema(schema); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "\n")
|
||||
return fmt.Errorf("execution failed: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||
fmt.Fprintf(os.Stderr, "Successfully executed %d script(s)\n\n", len(schema.Scripts))
|
||||
|
||||
return nil
|
||||
}
|
||||
318
cmd/relspec/split.go
Normal file
318
cmd/relspec/split.go
Normal file
@@ -0,0 +1,318 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
var (
|
||||
splitSourceType string
|
||||
splitSourcePath string
|
||||
splitSourceConn string
|
||||
splitTargetType string
|
||||
splitTargetPath string
|
||||
splitSchemas string
|
||||
splitTables string
|
||||
splitPackageName string
|
||||
splitDatabaseName string
|
||||
splitExcludeSchema string
|
||||
splitExcludeTables string
|
||||
)
|
||||
|
||||
var splitCmd = &cobra.Command{
|
||||
Use: "split",
|
||||
Short: "Split database schemas to extract selected tables into a separate database",
|
||||
Long: `Extract selected schemas and tables from a database and write them to a separate output.
|
||||
|
||||
The split command allows you to:
|
||||
- Select specific schemas to include in the output
|
||||
- Select specific tables within schemas
|
||||
- Exclude specific schemas or tables if preferred
|
||||
- Export the selected subset to any supported format
|
||||
|
||||
Input formats:
|
||||
- dbml: DBML schema files
|
||||
- dctx: DCTX schema files
|
||||
- drawdb: DrawDB JSON files
|
||||
- graphql: GraphQL schema files (.graphql, SDL)
|
||||
- json: JSON database schema
|
||||
- yaml: YAML database schema
|
||||
- gorm: GORM model files (Go, file or directory)
|
||||
- bun: Bun model files (Go, file or directory)
|
||||
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||
- prisma: Prisma schema files (.prisma)
|
||||
- typeorm: TypeORM entity files (TypeScript)
|
||||
- pgsql: PostgreSQL database (live connection)
|
||||
|
||||
Output formats:
|
||||
- dbml: DBML schema files
|
||||
- dctx: DCTX schema files
|
||||
- drawdb: DrawDB JSON files
|
||||
- graphql: GraphQL schema files (.graphql, SDL)
|
||||
- json: JSON database schema
|
||||
- yaml: YAML database schema
|
||||
- gorm: GORM model files (Go)
|
||||
- bun: Bun model files (Go)
|
||||
- drizzle: Drizzle ORM schema files (TypeScript)
|
||||
- prisma: Prisma schema files (.prisma)
|
||||
- typeorm: TypeORM entity files (TypeScript)
|
||||
- pgsql: PostgreSQL SQL schema
|
||||
|
||||
Examples:
|
||||
# Split specific schemas from DBML
|
||||
relspec split --from dbml --from-path schema.dbml \
|
||||
--schemas public,auth \
|
||||
--to json --to-path subset.json
|
||||
|
||||
# Extract specific tables from PostgreSQL
|
||||
relspec split --from pgsql \
|
||||
--from-conn "postgres://user:pass@localhost:5432/mydb" \
|
||||
--schemas public \
|
||||
--tables users,orders,products \
|
||||
--to dbml --to-path subset.dbml
|
||||
|
||||
# Exclude specific tables
|
||||
relspec split --from json --from-path schema.json \
|
||||
--exclude-tables "audit_log,system_config,temp_data" \
|
||||
--to json --to-path public_schema.json
|
||||
|
||||
# Split and convert to GORM
|
||||
relspec split --from json --from-path schema.json \
|
||||
--tables "users,posts,comments" \
|
||||
--to gorm --to-path models/ --package models \
|
||||
--database-name MyAppDB
|
||||
|
||||
# Exclude specific schema and tables
|
||||
relspec split --from pgsql \
|
||||
--from-conn "postgres://user:pass@localhost/db" \
|
||||
--exclude-schema pg_catalog,information_schema \
|
||||
--exclude-tables "temp_users,debug_logs" \
|
||||
--to json --to-path public_schema.json`,
|
||||
RunE: runSplit,
|
||||
}
|
||||
|
||||
func init() {
|
||||
splitCmd.Flags().StringVar(&splitSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||
splitCmd.Flags().StringVar(&splitSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||
splitCmd.Flags().StringVar(&splitSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||
|
||||
splitCmd.Flags().StringVar(&splitTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||
splitCmd.Flags().StringVar(&splitTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||
splitCmd.Flags().StringVar(&splitPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||
splitCmd.Flags().StringVar(&splitDatabaseName, "database-name", "", "Override database name in output")
|
||||
|
||||
splitCmd.Flags().StringVar(&splitSchemas, "schemas", "", "Comma-separated list of schema names to include")
|
||||
splitCmd.Flags().StringVar(&splitTables, "tables", "", "Comma-separated list of table names to include (case-insensitive)")
|
||||
splitCmd.Flags().StringVar(&splitExcludeSchema, "exclude-schema", "", "Comma-separated list of schema names to exclude")
|
||||
splitCmd.Flags().StringVar(&splitExcludeTables, "exclude-tables", "", "Comma-separated list of table names to exclude (case-insensitive)")
|
||||
|
||||
err := splitCmd.MarkFlagRequired("from")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||
}
|
||||
err = splitCmd.MarkFlagRequired("to")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||
}
|
||||
err = splitCmd.MarkFlagRequired("to-path")
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error marking to-path flag as required: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func runSplit(cmd *cobra.Command, args []string) error {
|
||||
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Split ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||
|
||||
// Read source database
|
||||
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", splitSourceType)
|
||||
if splitSourcePath != "" {
|
||||
fmt.Fprintf(os.Stderr, " Path: %s\n", splitSourcePath)
|
||||
}
|
||||
if splitSourceConn != "" {
|
||||
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(splitSourceConn))
|
||||
}
|
||||
|
||||
db, err := readDatabaseForConvert(splitSourceType, splitSourcePath, splitSourceConn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read source: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||
|
||||
totalTables := 0
|
||||
for _, schema := range db.Schemas {
|
||||
totalTables += len(schema.Tables)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||
|
||||
// Filter the database
|
||||
fmt.Fprintf(os.Stderr, "[2/3] Filtering schemas and tables...\n")
|
||||
filteredDB, err := filterDatabase(db)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to filter database: %w", err)
|
||||
}
|
||||
|
||||
if splitDatabaseName != "" {
|
||||
filteredDB.Name = splitDatabaseName
|
||||
}
|
||||
|
||||
filteredTables := 0
|
||||
for _, schema := range filteredDB.Schemas {
|
||||
filteredTables += len(schema.Tables)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, " ✓ Filtered to: %d schema(s), %d table(s)\n\n", len(filteredDB.Schemas), filteredTables)
|
||||
|
||||
// Write to target format
|
||||
fmt.Fprintf(os.Stderr, "[3/3] Writing to target format...\n")
|
||||
fmt.Fprintf(os.Stderr, " Format: %s\n", splitTargetType)
|
||||
fmt.Fprintf(os.Stderr, " Output: %s\n", splitTargetPath)
|
||||
if splitPackageName != "" {
|
||||
fmt.Fprintf(os.Stderr, " Package: %s\n", splitPackageName)
|
||||
}
|
||||
|
||||
err = writeDatabase(
|
||||
filteredDB,
|
||||
splitTargetType,
|
||||
splitTargetPath,
|
||||
splitPackageName,
|
||||
"", // no schema filter for split
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write output: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " ✓ Successfully written to '%s'\n\n", splitTargetPath)
|
||||
fmt.Fprintf(os.Stderr, "=== Split Completed Successfully ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// filterDatabase filters the database based on provided criteria
|
||||
func filterDatabase(db *models.Database) (*models.Database, error) {
|
||||
filteredDB := &models.Database{
|
||||
Name: db.Name,
|
||||
Description: db.Description,
|
||||
Comment: db.Comment,
|
||||
DatabaseType: db.DatabaseType,
|
||||
DatabaseVersion: db.DatabaseVersion,
|
||||
SourceFormat: db.SourceFormat,
|
||||
UpdatedAt: db.UpdatedAt,
|
||||
GUID: db.GUID,
|
||||
Schemas: []*models.Schema{},
|
||||
Domains: db.Domains, // Keep domains for now
|
||||
}
|
||||
|
||||
// Parse filter flags
|
||||
includeSchemas := parseCommaSeparated(splitSchemas)
|
||||
includeTables := parseCommaSeparated(splitTables)
|
||||
excludeSchemas := parseCommaSeparated(splitExcludeSchema)
|
||||
excludeTables := parseCommaSeparated(splitExcludeTables)
|
||||
|
||||
// Convert table names to lowercase for case-insensitive matching
|
||||
includeTablesLower := make(map[string]bool)
|
||||
for _, t := range includeTables {
|
||||
includeTablesLower[strings.ToLower(t)] = true
|
||||
}
|
||||
|
||||
excludeTablesLower := make(map[string]bool)
|
||||
for _, t := range excludeTables {
|
||||
excludeTablesLower[strings.ToLower(t)] = true
|
||||
}
|
||||
|
||||
// Iterate through schemas
|
||||
for _, schema := range db.Schemas {
|
||||
// Check if schema should be excluded
|
||||
if contains(excludeSchemas, schema.Name) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if schema should be included
|
||||
if len(includeSchemas) > 0 && !contains(includeSchemas, schema.Name) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create a copy of the schema with filtered tables
|
||||
filteredSchema := &models.Schema{
|
||||
Name: schema.Name,
|
||||
Description: schema.Description,
|
||||
Owner: schema.Owner,
|
||||
Permissions: schema.Permissions,
|
||||
Comment: schema.Comment,
|
||||
Metadata: schema.Metadata,
|
||||
Scripts: schema.Scripts,
|
||||
Sequence: schema.Sequence,
|
||||
Relations: schema.Relations,
|
||||
Enums: schema.Enums,
|
||||
UpdatedAt: schema.UpdatedAt,
|
||||
GUID: schema.GUID,
|
||||
Tables: []*models.Table{},
|
||||
Views: schema.Views,
|
||||
Sequences: schema.Sequences,
|
||||
}
|
||||
|
||||
// Filter tables within the schema
|
||||
for _, table := range schema.Tables {
|
||||
tableLower := strings.ToLower(table.Name)
|
||||
|
||||
// Check if table should be excluded
|
||||
if excludeTablesLower[tableLower] {
|
||||
continue
|
||||
}
|
||||
|
||||
// If specific tables are requested, only include those
|
||||
if len(includeTablesLower) > 0 {
|
||||
if !includeTablesLower[tableLower] {
|
||||
continue
|
||||
}
|
||||
}
|
||||
filteredSchema.Tables = append(filteredSchema.Tables, table)
|
||||
}
|
||||
|
||||
// Only add schema if it has tables (unless no table filter was specified)
|
||||
if len(filteredSchema.Tables) > 0 || (len(includeTablesLower) == 0 && len(excludeTablesLower) == 0) {
|
||||
filteredDB.Schemas = append(filteredDB.Schemas, filteredSchema)
|
||||
}
|
||||
}
|
||||
|
||||
if len(filteredDB.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas matched the filter criteria")
|
||||
}
|
||||
|
||||
return filteredDB, nil
|
||||
}
|
||||
|
||||
// parseCommaSeparated parses a comma-separated string into a slice, trimming whitespace
|
||||
func parseCommaSeparated(s string) []string {
|
||||
if s == "" {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
parts := strings.Split(s, ",")
|
||||
result := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
trimmed := strings.TrimSpace(p)
|
||||
if trimmed != "" {
|
||||
result = append(result, trimmed)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// contains checks if a string is in a slice
|
||||
func contains(slice []string, item string) bool {
|
||||
for _, s := range slice {
|
||||
if s == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
167
cmd/relspec/templ.go
Normal file
167
cmd/relspec/templ.go
Normal file
@@ -0,0 +1,167 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
wtemplate "git.warky.dev/wdevs/relspecgo/pkg/writers/template"
|
||||
)
|
||||
|
||||
var (
|
||||
templSourceType string
|
||||
templSourcePath string
|
||||
templSourceConn string
|
||||
templTemplatePath string
|
||||
templOutputPath string
|
||||
templSchemaFilter string
|
||||
templMode string
|
||||
templFilenamePattern string
|
||||
)
|
||||
|
||||
var templCmd = &cobra.Command{
|
||||
Use: "templ",
|
||||
Short: "Apply custom templates to database schemas",
|
||||
Long: `Apply custom Go text templates to database schemas with flexible execution modes.
|
||||
|
||||
The templ command allows you to transform database schemas using custom Go text
|
||||
templates. It supports multiple execution modes for different use cases:
|
||||
|
||||
Execution Modes:
|
||||
database Execute template once for entire database (single output file)
|
||||
schema Execute template once per schema (one file per schema)
|
||||
script Execute template once per script (one file per script)
|
||||
table Execute template once per table (one file per table)
|
||||
|
||||
Supported Input Formats:
|
||||
dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql
|
||||
|
||||
Template Functions:
|
||||
String utilities: toUpper, toLower, toCamelCase, toPascalCase, toSnakeCase, toKebabCase,
|
||||
pluralize, singularize, title, trim, split, join, replace
|
||||
|
||||
Type conversion: sqlToGo, sqlToTypeScript, sqlToJava, sqlToPython, sqlToRust,
|
||||
sqlToCSharp, sqlToPhp
|
||||
|
||||
Filtering: filterTables, filterColumns, filterPrimaryKeys, filterForeignKeys,
|
||||
filterNullable, filterNotNull, filterColumnsByType
|
||||
|
||||
Formatting: toJSON, toJSONPretty, toYAML, indent, escape, comment
|
||||
|
||||
Loop helpers: enumerate, batch, reverse, first, last, skip, take, concat,
|
||||
unique, sortBy, groupBy
|
||||
|
||||
Safe access: get, getOr, getPath, has, keys, values, merge, pick, omit,
|
||||
sliceContains, indexOf, pluck
|
||||
|
||||
Examples:
|
||||
# Generate documentation from PostgreSQL database
|
||||
relspec templ --from pgsql --from-conn "postgres://user:pass@localhost/db" \
|
||||
--template docs.tmpl --output schema-docs.md
|
||||
|
||||
# Generate one TypeScript model file per table
|
||||
relspec templ --from dbml --from-path schema.dbml \
|
||||
--template ts-model.tmpl --mode table \
|
||||
--output ./models/ \
|
||||
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||
|
||||
# Generate schema documentation files
|
||||
relspec templ --from json --from-path db.json \
|
||||
--template schema.tmpl --mode schema \
|
||||
--output ./docs/ \
|
||||
--filename-pattern "{{.Name}}_schema.md"`,
|
||||
RunE: runTempl,
|
||||
}
|
||||
|
||||
func init() {
|
||||
templCmd.Flags().StringVar(&templSourceType, "from", "", "Source format (dbml, pgsql, json, etc.)")
|
||||
templCmd.Flags().StringVar(&templSourcePath, "from-path", "", "Source file path (for file-based sources)")
|
||||
templCmd.Flags().StringVar(&templSourceConn, "from-conn", "", "Source connection string (for database sources)")
|
||||
templCmd.Flags().StringVar(&templTemplatePath, "template", "", "Template file path (required)")
|
||||
templCmd.Flags().StringVar(&templOutputPath, "output", "", "Output path (file or directory, empty for stdout)")
|
||||
templCmd.Flags().StringVar(&templSchemaFilter, "schema", "", "Filter to specific schema")
|
||||
templCmd.Flags().StringVar(&templMode, "mode", "database", "Execution mode: database, schema, script, or table")
|
||||
templCmd.Flags().StringVar(&templFilenamePattern, "filename-pattern", "{{.Name}}.txt", "Filename pattern for multi-output modes")
|
||||
|
||||
_ = templCmd.MarkFlagRequired("from")
|
||||
_ = templCmd.MarkFlagRequired("template")
|
||||
}
|
||||
|
||||
func runTempl(cmd *cobra.Command, args []string) error {
|
||||
// Print header
|
||||
fmt.Fprintf(os.Stderr, "=== RelSpec Template Execution ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||
|
||||
// Read database using the same function as convert
|
||||
fmt.Fprintf(os.Stderr, "Reading from %s...\n", templSourceType)
|
||||
db, err := readDatabaseForConvert(templSourceType, templSourcePath, templSourceConn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read source: %w", err)
|
||||
}
|
||||
|
||||
// Print database stats
|
||||
schemaCount := len(db.Schemas)
|
||||
tableCount := 0
|
||||
for _, schema := range db.Schemas {
|
||||
tableCount += len(schema.Tables)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "✓ Successfully read database: %s\n", db.Name)
|
||||
fmt.Fprintf(os.Stderr, " Schemas: %d\n", schemaCount)
|
||||
fmt.Fprintf(os.Stderr, " Tables: %d\n\n", tableCount)
|
||||
|
||||
// Apply schema filter if specified
|
||||
if templSchemaFilter != "" {
|
||||
fmt.Fprintf(os.Stderr, "Filtering to schema: %s\n", templSchemaFilter)
|
||||
found := false
|
||||
for _, schema := range db.Schemas {
|
||||
if schema.Name == templSchemaFilter {
|
||||
db.Schemas = []*models.Schema{schema}
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return fmt.Errorf("schema not found: %s", templSchemaFilter)
|
||||
}
|
||||
}
|
||||
|
||||
// Create template writer
|
||||
fmt.Fprintf(os.Stderr, "Loading template: %s\n", templTemplatePath)
|
||||
fmt.Fprintf(os.Stderr, "Execution mode: %s\n", templMode)
|
||||
|
||||
metadata := map[string]interface{}{
|
||||
"template_path": templTemplatePath,
|
||||
"mode": templMode,
|
||||
"filename_pattern": templFilenamePattern,
|
||||
}
|
||||
|
||||
writerOpts := &writers.WriterOptions{
|
||||
OutputPath: templOutputPath,
|
||||
Metadata: metadata,
|
||||
}
|
||||
|
||||
writer, err := wtemplate.NewWriter(writerOpts)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create template writer: %w", err)
|
||||
}
|
||||
|
||||
// Execute template
|
||||
fmt.Fprintf(os.Stderr, "\nExecuting template...\n")
|
||||
if err := writer.WriteDatabase(db); err != nil {
|
||||
return fmt.Errorf("failed to execute template: %w", err)
|
||||
}
|
||||
|
||||
// Print success message
|
||||
fmt.Fprintf(os.Stderr, "\n✓ Template executed successfully\n")
|
||||
if templOutputPath != "" {
|
||||
fmt.Fprintf(os.Stderr, "Output written to: %s\n", templOutputPath)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "Output written to stdout\n")
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -9,7 +9,7 @@ services:
|
||||
POSTGRES_PASSWORD: relspec_test_password
|
||||
POSTGRES_DB: relspec_test
|
||||
ports:
|
||||
- "5433:5432" # Using 5433 to avoid conflicts with local PostgreSQL
|
||||
- "5439:5432" # Using 5439 to avoid conflicts with local PostgreSQL
|
||||
volumes:
|
||||
- ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
|
||||
149
docs/DOMAINS_DRAWDB.md
Normal file
149
docs/DOMAINS_DRAWDB.md
Normal file
@@ -0,0 +1,149 @@
|
||||
# Domains and DrawDB Areas Integration
|
||||
|
||||
## Overview
|
||||
|
||||
Domains provide a way to organize tables from potentially multiple schemas into logical business groupings. When working with DrawDB format, domains are automatically imported/exported as **Subject Areas** - a native DrawDB feature for visually grouping tables.
|
||||
|
||||
## How It Works
|
||||
|
||||
### Writing Domains to DrawDB (Export)
|
||||
|
||||
When you export a database with domains to DrawDB format:
|
||||
|
||||
1. **Schema Areas** are created automatically for each schema (existing behavior)
|
||||
2. **Domain Areas** are created for each domain, calculated based on the positions of the tables they contain
|
||||
3. The domain area bounds are automatically calculated to encompass all its tables with a small padding
|
||||
|
||||
```go
|
||||
// Example: Creating a domain and exporting to DrawDB
|
||||
db := models.InitDatabase("mydb")
|
||||
|
||||
// Create an "authentication" domain
|
||||
authDomain := models.InitDomain("authentication")
|
||||
authDomain.Tables = append(authDomain.Tables,
|
||||
models.InitDomainTable("users", "public"),
|
||||
models.InitDomainTable("roles", "public"),
|
||||
models.InitDomainTable("permissions", "public"),
|
||||
)
|
||||
db.Domains = append(db.Domains, authDomain)
|
||||
|
||||
// Create a "financial" domain spanning multiple schemas
|
||||
finDomain := models.InitDomain("financial")
|
||||
finDomain.Tables = append(finDomain.Tables,
|
||||
models.InitDomainTable("accounts", "public"),
|
||||
models.InitDomainTable("transactions", "public"),
|
||||
models.InitDomainTable("ledger", "finance"), // Different schema!
|
||||
)
|
||||
db.Domains = append(db.Domains, finDomain)
|
||||
|
||||
// Write to DrawDB - domains become subject areas
|
||||
writer := drawdb.NewWriter(&writers.WriterOptions{
|
||||
OutputPath: "schema.json",
|
||||
})
|
||||
writer.WriteDatabase(db)
|
||||
```
|
||||
|
||||
The resulting DrawDB JSON will have Subject Areas for both:
|
||||
- "authentication" area containing the auth tables
|
||||
- "financial" area containing the financial tables from both schemas
|
||||
|
||||
### Reading Domains from DrawDB (Import)
|
||||
|
||||
When you import a DrawDB file with Subject Areas:
|
||||
|
||||
1. **Subject Areas** are automatically converted to **Domains**
|
||||
2. Tables are assigned to a domain if they fall within the area's visual bounds
|
||||
3. Table references include both the table name and schema name
|
||||
|
||||
```go
|
||||
// Example: Reading DrawDB with areas
|
||||
reader := drawdb.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "schema.json",
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Access domains
|
||||
for _, domain := range db.Domains {
|
||||
fmt.Printf("Domain: %s\n", domain.Name)
|
||||
for _, domainTable := range domain.Tables {
|
||||
fmt.Printf(" - %s.%s\n", domainTable.SchemaName, domainTable.TableName)
|
||||
|
||||
// Access the actual table reference if loaded
|
||||
if domainTable.RefTable != nil {
|
||||
fmt.Printf(" Description: %s\n", domainTable.RefTable.Description)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Domain Structure
|
||||
|
||||
```go
|
||||
type Domain struct {
|
||||
Name string // Domain name (e.g., "authentication", "user_data")
|
||||
Description string // Optional human-readable description
|
||||
Tables []*DomainTable // Tables belonging to this domain
|
||||
Comment string // Optional comment
|
||||
Metadata map[string]any // Extensible metadata
|
||||
Sequence uint // Ordering hint
|
||||
}
|
||||
|
||||
type DomainTable struct {
|
||||
TableName string // Table name
|
||||
SchemaName string // Schema containing the table
|
||||
Sequence uint // Ordering hint
|
||||
RefTable *Table // Pointer to actual table (in-memory only, not serialized)
|
||||
}
|
||||
```
|
||||
|
||||
## Multi-Schema Domains
|
||||
|
||||
One of the key features of domains is that they can span multiple schemas:
|
||||
|
||||
```
|
||||
Domain: "user_data"
|
||||
├── public.users
|
||||
├── public.profiles
|
||||
├── public.user_preferences
|
||||
├── auth.user_sessions
|
||||
└── auth.mfa_devices
|
||||
```
|
||||
|
||||
This allows you to organize related tables even when they're stored in different schemas.
|
||||
|
||||
## Visual Organization in DrawDB
|
||||
|
||||
When viewing the exported DrawDB file in DrawDB Editor:
|
||||
|
||||
1. **Schema areas** appear in one color (original behavior)
|
||||
2. **Domain areas** appear in a different color
|
||||
3. Domain area bounds are calculated to fit all contained tables
|
||||
4. Areas can overlap - a table can visually belong to multiple areas
|
||||
|
||||
## Integration with Other Formats
|
||||
|
||||
Currently, domain/area integration is implemented for DrawDB format.
|
||||
|
||||
To implement similar functionality for other formats:
|
||||
|
||||
1. Identify if the format has a native grouping/area feature
|
||||
2. Add conversion logic in the reader to map format areas → Domain model
|
||||
3. Add conversion logic in the writer to map Domain model → format areas
|
||||
|
||||
Example formats that could support domains:
|
||||
- **DBML**: Could use DBML's `TableGroup` feature
|
||||
- **DrawDB**: ✅ Already implemented (Subject Areas)
|
||||
- **GraphQL**: Could use schema directives
|
||||
- **Custom formats**: Implement as needed
|
||||
|
||||
## Tips and Best Practices
|
||||
|
||||
1. **Keep domains focused**: Each domain should represent a distinct business area
|
||||
2. **Document purposes**: Use Description and Comment fields to explain each domain
|
||||
3. **Use meaningful names**: Domain names should clearly reflect their purpose
|
||||
4. **Maintain schema consistency**: Keep related tables together in the same schema when possible
|
||||
5. **Use metadata**: Store tool-specific information in the Metadata field
|
||||
360
docs/SCRIPTS_COMMAND.md
Normal file
360
docs/SCRIPTS_COMMAND.md
Normal file
@@ -0,0 +1,360 @@
|
||||
# RelSpec Scripts Command
|
||||
|
||||
The `relspec scripts` command provides tools for managing and executing SQL migration scripts from a directory structure.
|
||||
|
||||
## Overview
|
||||
|
||||
The scripts command supports two main operations:
|
||||
- **list**: List SQL scripts from a directory in execution order
|
||||
- **execute**: Execute SQL scripts against a PostgreSQL database
|
||||
|
||||
Scripts are read from a directory (recursively) and executed in a deterministic order based on **Priority** (ascending) and **Sequence** (ascending).
|
||||
|
||||
## File Naming Convention
|
||||
|
||||
SQL scripts must follow this naming pattern (both separators are supported):
|
||||
|
||||
```
|
||||
{priority}_{sequence}_{name}.{sql|pgsql} (underscore format)
|
||||
{priority}-{sequence}-{name}.{sql|pgsql} (hyphen format)
|
||||
```
|
||||
|
||||
### Components
|
||||
|
||||
- **priority**: Integer (0-9999) - Execution priority level (lower executes first)
|
||||
- **sequence**: Integer (0-9999) - Order within priority level (lower executes first)
|
||||
- **separator**: Underscore `_` or hyphen `-` (both formats can be mixed)
|
||||
- **name**: Descriptive name (alphanumeric, underscores, hyphens)
|
||||
- **extension**: `.sql` or `.pgsql`
|
||||
|
||||
### Valid Examples
|
||||
|
||||
**Underscore format:**
|
||||
```
|
||||
1_001_create_users.sql # Priority 1, Sequence 1
|
||||
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||
1_003_create_comments.pgsql # Priority 1, Sequence 3
|
||||
2_001_add_indexes.sql # Priority 2, Sequence 1
|
||||
2_002_add_constraints.sql # Priority 2, Sequence 2
|
||||
3_001_seed_users.sql # Priority 3, Sequence 1
|
||||
```
|
||||
|
||||
**Hyphen format:**
|
||||
```
|
||||
1-001-create-users.sql # Priority 1, Sequence 1
|
||||
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||
1-003-create-comments.pgsql # Priority 1, Sequence 3
|
||||
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||
```
|
||||
|
||||
**Mixed format (both in same directory):**
|
||||
```
|
||||
1_001_create_users.sql # Priority 1, Sequence 1 (underscore)
|
||||
1-002-create-posts.sql # Priority 1, Sequence 2 (hyphen)
|
||||
2_001_add_indexes.sql # Priority 2, Sequence 1 (underscore)
|
||||
```
|
||||
|
||||
**Execution Order**: 1→2→3→4→5→6 (sorted by Priority, then Sequence)
|
||||
|
||||
### Invalid Examples (Will be ignored)
|
||||
|
||||
```
|
||||
migration.sql # Missing priority/sequence
|
||||
create_users.sql # Missing priority/sequence
|
||||
1_create_users.sql # Missing sequence
|
||||
1_001_test.txt # Wrong extension
|
||||
README.md # Not a SQL file
|
||||
```
|
||||
|
||||
## Directory Structure
|
||||
|
||||
Scripts can be organized in subdirectories. The scanner recursively finds all matching SQL files:
|
||||
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_schema.sql
|
||||
├── 1_002_create_users.sql
|
||||
├── tables/
|
||||
│ ├── 1_003_create_posts.sql
|
||||
│ └── 1_004_create_comments.pgsql
|
||||
├── indexes/
|
||||
│ └── 2_001_add_indexes.sql
|
||||
└── data/
|
||||
└── 3_001_seed_data.sql
|
||||
```
|
||||
|
||||
All files will be found and executed in Priority→Sequence order regardless of directory structure.
|
||||
|
||||
## Commands
|
||||
|
||||
### relspec scripts list
|
||||
|
||||
List all SQL scripts in a directory and show their execution order.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
relspec scripts list --dir <directory> [flags]
|
||||
```
|
||||
|
||||
**Flags:**
|
||||
- `--dir <path>` (required): Directory containing SQL scripts
|
||||
- `--schema <name>`: Schema name (default: "public")
|
||||
- `--database <name>`: Database name (default: "database")
|
||||
|
||||
**Example:**
|
||||
```bash
|
||||
relspec scripts list --dir ./migrations
|
||||
```
|
||||
|
||||
**Output:**
|
||||
```
|
||||
=== SQL Scripts List ===
|
||||
Directory: ./migrations
|
||||
|
||||
Found 5 script(s) in execution order:
|
||||
|
||||
No. Priority Sequence Name Lines
|
||||
---- -------- -------- ------------------------------ -----
|
||||
1 1 1 create_users 7
|
||||
2 1 2 create_posts 8
|
||||
3 2 1 add_indexes 4
|
||||
4 2 2 add_constraints 6
|
||||
5 3 1 seed_data 4
|
||||
```
|
||||
|
||||
### relspec scripts execute
|
||||
|
||||
Execute SQL scripts from a directory against a PostgreSQL database.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
relspec scripts execute --dir <directory> --conn <connection-string> [flags]
|
||||
```
|
||||
|
||||
**Flags:**
|
||||
- `--dir <path>` (required): Directory containing SQL scripts
|
||||
- `--conn <string>` (required): PostgreSQL connection string
|
||||
- `--schema <name>`: Schema name (default: "public")
|
||||
- `--database <name>`: Database name (default: "database")
|
||||
|
||||
**Connection String Formats:**
|
||||
|
||||
```bash
|
||||
# Standard PostgreSQL URLs
|
||||
postgres://username:password@localhost:5432/database_name
|
||||
postgres://username:password@localhost/database_name
|
||||
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||
postgresql://user:pass@host/dbname?sslmode=require
|
||||
|
||||
# Key-value format
|
||||
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||
```
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Execute migration scripts
|
||||
relspec scripts execute \
|
||||
--dir ./migrations \
|
||||
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||
|
||||
# Execute with custom schema
|
||||
relspec scripts execute \
|
||||
--dir ./migrations \
|
||||
--conn "postgres://localhost/mydb" \
|
||||
--schema public
|
||||
|
||||
# Execute with SSL disabled
|
||||
relspec scripts execute \
|
||||
--dir ./sql \
|
||||
--conn "postgres://user:pass@localhost/db?sslmode=disable"
|
||||
|
||||
# Execute using key-value connection string
|
||||
relspec scripts execute \
|
||||
--dir ./migrations \
|
||||
--conn "host=localhost port=5432 user=admin password=secret dbname=prod"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
```
|
||||
=== SQL Scripts Execution ===
|
||||
Started at: 2025-12-30 22:30:15
|
||||
Directory: ./migrations
|
||||
Database: postgres://user:***@localhost:5432/mydb
|
||||
|
||||
[1/2] Reading SQL scripts...
|
||||
✓ Found 4 script(s)
|
||||
|
||||
[2/2] Executing scripts in order (Priority → Sequence)...
|
||||
|
||||
Executing script: create_users (Priority=1, Sequence=1)
|
||||
✓ Successfully executed: create_users
|
||||
Executing script: create_posts (Priority=1, Sequence=2)
|
||||
✓ Successfully executed: create_posts
|
||||
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||
✓ Successfully executed: add_indexes
|
||||
Executing script: seed_data (Priority=2, Sequence=2)
|
||||
✓ Successfully executed: seed_data
|
||||
|
||||
=== Execution Complete ===
|
||||
Completed at: 2025-12-30 22:30:16
|
||||
Successfully executed 4 script(s)
|
||||
```
|
||||
|
||||
## Execution Behavior
|
||||
|
||||
### Execution Order
|
||||
|
||||
Scripts are **always** executed in this order:
|
||||
1. Sort by **Priority** (ascending)
|
||||
2. Within same priority, sort by **Sequence** (ascending)
|
||||
|
||||
Example:
|
||||
```
|
||||
Priority 1, Sequence 1 → Executes 1st
|
||||
Priority 1, Sequence 2 → Executes 2nd
|
||||
Priority 1, Sequence 10 → Executes 3rd
|
||||
Priority 2, Sequence 1 → Executes 4th
|
||||
Priority 2, Sequence 5 → Executes 5th
|
||||
Priority 10, Sequence 1 → Executes 6th
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
- **Stop on First Error**: Execution stops immediately when any script fails
|
||||
- **No Automatic Rollback**: Scripts executed before the failure remain committed
|
||||
- **Error Details**: Full error message with script name, priority, and sequence
|
||||
|
||||
Example error output:
|
||||
```
|
||||
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||
Error: execution failed: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||
ERROR: syntax error at or near "IDNEX" (SQLSTATE 42601)
|
||||
```
|
||||
|
||||
### Transaction Behavior
|
||||
|
||||
- Each script executes in its own implicit transaction (PostgreSQL default)
|
||||
- No automatic transaction wrapping across multiple scripts
|
||||
- For atomic migrations, manually wrap SQL in `BEGIN/COMMIT` blocks
|
||||
|
||||
### Empty Scripts
|
||||
|
||||
Scripts with empty SQL content are silently skipped.
|
||||
|
||||
## Use Cases
|
||||
|
||||
### Development Migrations
|
||||
|
||||
Organize database changes by priority levels:
|
||||
|
||||
```
|
||||
migrations/
|
||||
├── 1_xxx_schema.sql # Priority 1: Core schema
|
||||
├── 1_xxx_tables.sql
|
||||
├── 2_xxx_indexes.sql # Priority 2: Performance
|
||||
├── 2_xxx_constraints.sql
|
||||
└── 3_xxx_seed.sql # Priority 3: Data
|
||||
```
|
||||
|
||||
### Multi-Environment Deployments
|
||||
|
||||
Use priority levels for environment-specific scripts:
|
||||
|
||||
```
|
||||
deploy/
|
||||
├── 1_xxx_core_schema.sql # Priority 1: All environments
|
||||
├── 2_xxx_dev_data.sql # Priority 2: Dev only
|
||||
├── 2_xxx_staging_data.sql # Priority 2: Staging only
|
||||
└── 3_xxx_prod_data.sql # Priority 3: Production only
|
||||
```
|
||||
|
||||
### Incremental Rollouts
|
||||
|
||||
Use sequence for ordered feature rollouts:
|
||||
|
||||
```
|
||||
features/
|
||||
├── 1_001_feature_a_schema.sql
|
||||
├── 1_002_feature_a_data.sql
|
||||
├── 1_003_feature_b_schema.sql
|
||||
├── 1_004_feature_b_data.sql
|
||||
```
|
||||
|
||||
## Integration with RelSpec
|
||||
|
||||
The scripts command uses:
|
||||
- **Reader**: `pkg/readers/sqldir/` - Reads SQL files into `models.Schema.Scripts`
|
||||
- **Writer**: `pkg/writers/sqlexec/` - Executes scripts from `models.Schema.Scripts`
|
||||
|
||||
You can use these packages programmatically:
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
// Read scripts
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "./migrations",
|
||||
})
|
||||
db, _ := reader.ReadDatabase()
|
||||
|
||||
// Execute scripts
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/mydb",
|
||||
},
|
||||
})
|
||||
writer.WriteDatabase(db)
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Naming
|
||||
|
||||
- Use zero-padded sequences: `001`, `002`, `010` (not `1`, `2`, `10`)
|
||||
- Use descriptive names: `create_users_table`, not `table1`
|
||||
- Group related changes: same priority for related DDL
|
||||
|
||||
### Organization
|
||||
|
||||
- Keep scripts small and focused (one logical change per file)
|
||||
- Use priority levels to organize phases (schema → indexes → data)
|
||||
- Document complex migrations with SQL comments
|
||||
|
||||
### Safety
|
||||
|
||||
- Always test migrations in development first
|
||||
- Use `scripts list` to verify execution order before running
|
||||
- Back up production databases before executing
|
||||
- Consider using transactions for critical changes
|
||||
- Review generated SQL before execution
|
||||
|
||||
### Version Control
|
||||
|
||||
- Commit scripts to version control
|
||||
- Never modify executed scripts (create new ones instead)
|
||||
- Use meaningful commit messages
|
||||
- Tag releases with migration checkpoints
|
||||
|
||||
## Limitations
|
||||
|
||||
- PostgreSQL only (currently)
|
||||
- No built-in rollback support
|
||||
- No migration state tracking (no "already executed" detection)
|
||||
- No dry-run mode
|
||||
- Stops on first error (no partial execution tracking)
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
Potential future features:
|
||||
- Migration state tracking (executed scripts table)
|
||||
- Rollback script support (using `models.Script.Rollback` field)
|
||||
- Dry-run mode (validate without executing)
|
||||
- Transaction wrapping (all-or-nothing execution)
|
||||
- Multi-database support (MySQL, SQLite, etc.)
|
||||
- Parallel execution for independent scripts
|
||||
393
docs/SCRIPTS_EXAMPLES.md
Normal file
393
docs/SCRIPTS_EXAMPLES.md
Normal file
@@ -0,0 +1,393 @@
|
||||
# RelSpec Scripts Command - Quick Examples
|
||||
|
||||
## Basic Workflow
|
||||
|
||||
### 1. Create migration directory structure
|
||||
|
||||
```bash
|
||||
mkdir -p migrations
|
||||
```
|
||||
|
||||
### 2. Create migration scripts
|
||||
|
||||
Both underscore and hyphen formats are supported. Examples below use underscore format,
|
||||
but you can also use: `1-001-create-users-table.sql`
|
||||
|
||||
```bash
|
||||
# Priority 1: Core schema
|
||||
cat > migrations/1_001_create_users_table.sql << 'EOF'
|
||||
CREATE TABLE users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(100) NOT NULL UNIQUE,
|
||||
email VARCHAR(255) NOT NULL UNIQUE,
|
||||
password_hash VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX idx_users_username ON users(username);
|
||||
CREATE INDEX idx_users_email ON users(email);
|
||||
EOF
|
||||
|
||||
cat > migrations/1_002_create_posts_table.sql << 'EOF'
|
||||
CREATE TABLE posts (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
title VARCHAR(200) NOT NULL,
|
||||
content TEXT,
|
||||
published BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
EOF
|
||||
|
||||
# Priority 2: Additional indexes
|
||||
cat > migrations/2_001_add_post_indexes.sql << 'EOF'
|
||||
CREATE INDEX idx_posts_user_id ON posts(user_id);
|
||||
CREATE INDEX idx_posts_published ON posts(published);
|
||||
CREATE INDEX idx_posts_created_at ON posts(created_at);
|
||||
EOF
|
||||
|
||||
# Priority 3: Seed data
|
||||
cat > migrations/3_001_seed_admin_user.sql << 'EOF'
|
||||
INSERT INTO users (username, email, password_hash)
|
||||
VALUES ('admin', 'admin@example.com', 'hashed_password_here')
|
||||
ON CONFLICT (username) DO NOTHING;
|
||||
EOF
|
||||
```
|
||||
|
||||
### 3. List scripts to verify order
|
||||
|
||||
```bash
|
||||
relspec scripts list --dir migrations
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
=== SQL Scripts List ===
|
||||
Directory: migrations
|
||||
|
||||
Found 4 script(s) in execution order:
|
||||
|
||||
No. Priority Sequence Name Lines
|
||||
---- -------- -------- ------------------------------ -----
|
||||
1 1 1 create_users_table 13
|
||||
2 1 2 create_posts_table 11
|
||||
3 2 1 add_post_indexes 4
|
||||
4 3 1 seed_admin_user 4
|
||||
```
|
||||
|
||||
### 4. Execute against database
|
||||
|
||||
```bash
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://myuser:mypass@localhost:5432/myapp"
|
||||
```
|
||||
|
||||
## Real-World Examples
|
||||
|
||||
### Example 1: E-commerce Database Setup
|
||||
|
||||
```bash
|
||||
# Directory structure
|
||||
migrations/
|
||||
├── 1_001_create_users.sql
|
||||
├── 1_002_create_products.sql
|
||||
├── 1_003_create_orders.sql
|
||||
├── 1_004_create_order_items.sql
|
||||
├── 2_001_add_indexes.sql
|
||||
├── 2_002_add_constraints.sql
|
||||
├── 3_001_seed_categories.sql
|
||||
└── 3_002_seed_sample_products.sql
|
||||
|
||||
# Execute
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://ecommerce_user:pass@db.example.com:5432/ecommerce_prod?sslmode=require"
|
||||
```
|
||||
|
||||
### Example 2: Multi-Schema Database
|
||||
|
||||
```bash
|
||||
# Organize by schema using subdirectories
|
||||
migrations/
|
||||
├── public/
|
||||
│ ├── 1_001_create_users.sql
|
||||
│ └── 1_002_create_sessions.sql
|
||||
├── analytics/
|
||||
│ ├── 1_001_create_events.sql
|
||||
│ └── 2_001_create_views.sql
|
||||
└── reporting/
|
||||
└── 1_001_create_reports.sql
|
||||
|
||||
# Execute (all schemas processed together)
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://localhost/multi_schema_db" \
|
||||
--schema public
|
||||
```
|
||||
|
||||
### Example 3: Development Environment Setup
|
||||
|
||||
```bash
|
||||
# Create local development database
|
||||
createdb myapp_dev
|
||||
|
||||
# Run migrations
|
||||
relspec scripts execute \
|
||||
--dir ./db/migrations \
|
||||
--conn "postgres://localhost/myapp_dev?sslmode=disable"
|
||||
|
||||
# Verify
|
||||
psql myapp_dev -c "\dt"
|
||||
```
|
||||
|
||||
### Example 4: CI/CD Pipeline
|
||||
|
||||
```yaml
|
||||
# .github/workflows/deploy.yml
|
||||
- name: Run database migrations
|
||||
run: |
|
||||
relspec scripts list --dir migrations
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "${{ secrets.DATABASE_URL }}"
|
||||
```
|
||||
|
||||
### Example 5: Docker Compose Integration
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
environment:
|
||||
POSTGRES_DB: myapp
|
||||
POSTGRES_USER: myuser
|
||||
POSTGRES_PASSWORD: mypass
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
migrate:
|
||||
image: relspec:latest
|
||||
depends_on:
|
||||
- postgres
|
||||
volumes:
|
||||
- ./migrations:/migrations
|
||||
command: >
|
||||
scripts execute
|
||||
--dir /migrations
|
||||
--conn "postgres://myuser:mypass@postgres:5432/myapp"
|
||||
```
|
||||
|
||||
```bash
|
||||
# Run migrations with docker-compose
|
||||
docker-compose up -d postgres
|
||||
sleep 5 # Wait for postgres to be ready
|
||||
docker-compose run --rm migrate
|
||||
```
|
||||
|
||||
### Example 6: Incremental Feature Rollout
|
||||
|
||||
```bash
|
||||
# Feature branch structure
|
||||
migrations/
|
||||
├── 1_100_user_profiles_schema.sql # Feature: User profiles
|
||||
├── 1_101_user_profiles_constraints.sql
|
||||
├── 1_102_user_profiles_indexes.sql
|
||||
├── 2_100_notifications_schema.sql # Feature: Notifications
|
||||
├── 2_101_notifications_constraints.sql
|
||||
└── 2_102_notifications_indexes.sql
|
||||
|
||||
# Deploy just user profiles (Priority 1)
|
||||
# Then later deploy notifications (Priority 2)
|
||||
```
|
||||
|
||||
### Example 7: Rollback Strategy (Manual)
|
||||
|
||||
```bash
|
||||
# Forward migration
|
||||
cat > migrations/1_001_add_column.sql << 'EOF'
|
||||
ALTER TABLE users ADD COLUMN phone VARCHAR(20);
|
||||
EOF
|
||||
|
||||
# Create manual rollback script (not auto-executed)
|
||||
cat > rollbacks/1_001_remove_column.sql << 'EOF'
|
||||
ALTER TABLE users DROP COLUMN phone;
|
||||
EOF
|
||||
|
||||
# If needed, manually execute rollback
|
||||
psql myapp -f rollbacks/1_001_remove_column.sql
|
||||
```
|
||||
|
||||
### Example 8: Complex Schema Changes
|
||||
|
||||
```bash
|
||||
# migrations/1_001_alter_users_table.sql
|
||||
BEGIN;
|
||||
|
||||
-- Add new column
|
||||
ALTER TABLE users ADD COLUMN full_name VARCHAR(200);
|
||||
|
||||
-- Populate from existing data
|
||||
UPDATE users SET full_name = username WHERE full_name IS NULL;
|
||||
|
||||
-- Make it required
|
||||
ALTER TABLE users ALTER COLUMN full_name SET NOT NULL;
|
||||
|
||||
-- Add index
|
||||
CREATE INDEX idx_users_full_name ON users(full_name);
|
||||
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
Execute:
|
||||
```bash
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://localhost/myapp"
|
||||
```
|
||||
|
||||
## File Naming Format Examples
|
||||
|
||||
### Underscore Format (Traditional)
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_users.sql
|
||||
├── 1_002_create_posts.sql
|
||||
├── 2_001_add_indexes.sql
|
||||
└── 3_001_seed_data.sql
|
||||
```
|
||||
|
||||
### Hyphen Format (Alternative)
|
||||
```
|
||||
migrations/
|
||||
├── 1-001-create-users.sql
|
||||
├── 1-002-create-posts.sql
|
||||
├── 10-10-create-newid.pgsql
|
||||
└── 2-001-add-indexes.sql
|
||||
```
|
||||
|
||||
### Mixed Format (Both in Same Directory)
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_users.sql # Underscore format
|
||||
├── 1-002-create-posts.sql # Hyphen format
|
||||
├── 2_001_add_indexes.sql # Underscore format
|
||||
└── 10-10-special-migration.pgsql # Hyphen format
|
||||
```
|
||||
|
||||
**Note:** All three approaches work identically - use whichever naming style you prefer!
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Pattern 1: Schema → Indexes → Constraints → Data
|
||||
|
||||
```
|
||||
1_xxx_*.sql # Tables and basic structure
|
||||
2_xxx_*.sql # Indexes for performance
|
||||
3_xxx_*.sql # Foreign keys and constraints
|
||||
4_xxx_*.sql # Seed/reference data
|
||||
```
|
||||
|
||||
### Pattern 2: Feature-Based Organization
|
||||
|
||||
```
|
||||
1_001_feature_auth_users.sql
|
||||
1_002_feature_auth_sessions.sql
|
||||
1_003_feature_auth_permissions.sql
|
||||
2_001_feature_blog_posts.sql
|
||||
2_002_feature_blog_comments.sql
|
||||
3_001_feature_payments_transactions.sql
|
||||
```
|
||||
|
||||
### Pattern 3: Date-Based Versioning
|
||||
|
||||
```
|
||||
1_20250130_create_users.sql
|
||||
2_20250131_add_user_indexes.sql
|
||||
3_20250201_create_posts.sql
|
||||
```
|
||||
|
||||
### Pattern 4: Environment-Specific Scripts
|
||||
|
||||
```bash
|
||||
# Base migrations (all environments)
|
||||
migrations/base/
|
||||
├── 1_001_create_users.sql
|
||||
├── 1_002_create_products.sql
|
||||
|
||||
# Development-specific
|
||||
migrations/dev/
|
||||
└── 9_001_seed_test_data.sql
|
||||
|
||||
# Production-specific
|
||||
migrations/prod/
|
||||
└── 9_001_seed_production_config.sql
|
||||
|
||||
# Execute different paths based on environment
|
||||
ENV=dev
|
||||
relspec scripts execute \
|
||||
--dir migrations/base \
|
||||
--conn "postgres://localhost/myapp_${ENV}"
|
||||
|
||||
relspec scripts execute \
|
||||
--dir migrations/${ENV} \
|
||||
--conn "postgres://localhost/myapp_${ENV}"
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Check script order before execution
|
||||
```bash
|
||||
relspec scripts list --dir migrations
|
||||
```
|
||||
|
||||
### Test against local database first
|
||||
```bash
|
||||
# Create test database
|
||||
createdb myapp_test
|
||||
|
||||
# Test migrations
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://localhost/myapp_test"
|
||||
|
||||
# Inspect results
|
||||
psql myapp_test
|
||||
|
||||
# Cleanup
|
||||
dropdb myapp_test
|
||||
```
|
||||
|
||||
### Validate SQL syntax
|
||||
```bash
|
||||
# Use PostgreSQL to check syntax without executing
|
||||
for f in migrations/*.sql; do
|
||||
echo "Checking $f..."
|
||||
psql myapp -c "BEGIN; \i $f; ROLLBACK;" --single-transaction
|
||||
done
|
||||
```
|
||||
|
||||
### Debug connection issues
|
||||
```bash
|
||||
# Test connection string
|
||||
psql "postgres://user:pass@localhost:5432/myapp"
|
||||
|
||||
# If that works, use the same string for relspec
|
||||
relspec scripts execute \
|
||||
--dir migrations \
|
||||
--conn "postgres://user:pass@localhost:5432/myapp"
|
||||
```
|
||||
|
||||
## Tips
|
||||
|
||||
1. **Always review execution order** with `list` before running `execute`
|
||||
2. **Test in development** before running against production
|
||||
3. **Use zero-padded sequences** (001, 002, not 1, 2) for consistent sorting
|
||||
4. **Keep scripts idempotent** when possible (use IF NOT EXISTS, ON CONFLICT, etc.)
|
||||
5. **Back up production** before running migrations
|
||||
6. **Use transactions** for complex multi-statement migrations
|
||||
7. **Document breaking changes** with SQL comments in the migration files
|
||||
8. **Version control everything** - commit migrations with code changes
|
||||
572
docs/TEMPLATE_MODE.md
Normal file
572
docs/TEMPLATE_MODE.md
Normal file
@@ -0,0 +1,572 @@
|
||||
# RelSpec Template Mode
|
||||
|
||||
The `templ` command allows you to transform database schemas using custom Go text templates. It provides powerful template functions and flexible execution modes for generating any type of output from your database schema.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Quick Start](#quick-start)
|
||||
- [Execution Modes](#execution-modes)
|
||||
- [Template Functions](#template-functions)
|
||||
- [String Utilities](#string-utilities)
|
||||
- [Type Conversion](#type-conversion)
|
||||
- [Filtering](#filtering)
|
||||
- [Formatting](#formatting)
|
||||
- [Loop Helpers](#loop-helpers)
|
||||
- [Sorting Helpers](#sorting-helpers)
|
||||
- [Safe Access](#safe-access)
|
||||
- [Utility Functions](#utility-functions)
|
||||
- [Data Model](#data-model)
|
||||
- [Examples](#examples)
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Generate documentation from a database
|
||||
relspec templ --from pgsql --from-conn "postgres://user:pass@localhost/db" \
|
||||
--template docs.tmpl --output schema-docs.md
|
||||
|
||||
# Generate TypeScript models (one file per table)
|
||||
relspec templ --from dbml --from-path schema.dbml \
|
||||
--template model.tmpl --mode table \
|
||||
--output ./models/ \
|
||||
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||
|
||||
# Output to stdout
|
||||
relspec templ --from json --from-path schema.json \
|
||||
--template report.tmpl
|
||||
```
|
||||
|
||||
## Execution Modes
|
||||
|
||||
The `--mode` flag controls how the template is executed:
|
||||
|
||||
| Mode | Description | Output | When to Use |
|
||||
|------|-------------|--------|-------------|
|
||||
| `database` | Execute once for entire database | Single file | Documentation, reports, overview files |
|
||||
| `schema` | Execute once per schema | One file per schema | Schema-specific documentation |
|
||||
| `domain` | Execute once per domain | One file per domain | Domain-based documentation, domain exports |
|
||||
| `script` | Execute once per script | One file per script | Script processing |
|
||||
| `table` | Execute once per table | One file per table | Model generation, table docs |
|
||||
|
||||
### Filename Patterns
|
||||
|
||||
For multi-file modes (`schema`, `domain`, `script`, `table`), use `--filename-pattern` to control output filenames:
|
||||
|
||||
```bash
|
||||
# Default pattern
|
||||
--filename-pattern "{{.Name}}.txt"
|
||||
|
||||
# With transformations
|
||||
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||
|
||||
# Nested directories
|
||||
--filename-pattern "{{.Schema}}/{{.Name}}.md"
|
||||
|
||||
# Complex patterns
|
||||
--filename-pattern "{{.ParentSchema.Name}}/models/{{.Name | toPascalCase}}Model.java"
|
||||
```
|
||||
|
||||
## Template Functions
|
||||
|
||||
### String Utilities
|
||||
|
||||
Transform and manipulate strings in your templates.
|
||||
|
||||
| Function | Description | Example | Output |
|
||||
|----------|-------------|---------|--------|
|
||||
| `toUpper` | Convert to uppercase | `{{ "hello" \| toUpper }}` | `HELLO` |
|
||||
| `toLower` | Convert to lowercase | `{{ "HELLO" \| toLower }}` | `hello` |
|
||||
| `toCamelCase` | Convert to camelCase | `{{ "user_name" \| toCamelCase }}` | `userName` |
|
||||
| `toPascalCase` | Convert to PascalCase | `{{ "user_name" \| toPascalCase }}` | `UserName` |
|
||||
| `toSnakeCase` | Convert to snake_case | `{{ "UserName" \| toSnakeCase }}` | `user_name` |
|
||||
| `toKebabCase` | Convert to kebab-case | `{{ "UserName" \| toKebabCase }}` | `user-name` |
|
||||
| `pluralize` | Convert to plural | `{{ "user" \| pluralize }}` | `users` |
|
||||
| `singularize` | Convert to singular | `{{ "users" \| singularize }}` | `user` |
|
||||
| `title` | Capitalize first letter | `{{ "hello world" \| title }}` | `Hello World` |
|
||||
| `trim` | Trim whitespace | `{{ " hello " \| trim }}` | `hello` |
|
||||
| `trimPrefix` | Remove prefix | `{{ trimPrefix "tbl_users" "tbl_" }}` | `users` |
|
||||
| `trimSuffix` | Remove suffix | `{{ trimSuffix "users_old" "_old" }}` | `users` |
|
||||
| `replace` | Replace occurrences | `{{ replace "hello" "l" "L" -1 }}` | `heLLo` |
|
||||
| `stringContains` | Check if contains substring | `{{ stringContains "hello" "ell" }}` | `true` |
|
||||
| `hasPrefix` | Check if starts with | `{{ hasPrefix "hello" "hel" }}` | `true` |
|
||||
| `hasSuffix` | Check if ends with | `{{ hasSuffix "hello" "llo" }}` | `true` |
|
||||
| `split` | Split by separator | `{{ split "a,b,c" "," }}` | `[a b c]` |
|
||||
| `join` | Join with separator | `{{ join (list "a" "b") "," }}` | `a,b` |
|
||||
|
||||
### Type Conversion
|
||||
|
||||
Convert SQL types to various programming language types.
|
||||
|
||||
| Function | Parameters | Description | Example |
|
||||
|----------|------------|-------------|---------|
|
||||
| `sqlToGo` | `sqlType`, `nullable` | SQL to Go | `{{ sqlToGo "varchar" true }}` → `string` |
|
||||
| `sqlToTypeScript` | `sqlType`, `nullable` | SQL to TypeScript | `{{ sqlToTypeScript "integer" false }}` → `number \| null` |
|
||||
| `sqlToJava` | `sqlType`, `nullable` | SQL to Java | `{{ sqlToJava "varchar" true }}` → `String` |
|
||||
| `sqlToPython` | `sqlType` | SQL to Python | `{{ sqlToPython "integer" }}` → `int` |
|
||||
| `sqlToRust` | `sqlType`, `nullable` | SQL to Rust | `{{ sqlToRust "varchar" false }}` → `Option<String>` |
|
||||
| `sqlToCSharp` | `sqlType`, `nullable` | SQL to C# | `{{ sqlToCSharp "integer" false }}` → `int?` |
|
||||
| `sqlToPhp` | `sqlType`, `nullable` | SQL to PHP | `{{ sqlToPhp "varchar" false }}` → `?string` |
|
||||
|
||||
**Supported SQL Types:**
|
||||
- Integer: `integer`, `int`, `smallint`, `bigint`, `serial`, `bigserial`
|
||||
- String: `text`, `varchar`, `char`, `character`, `citext`
|
||||
- Boolean: `boolean`, `bool`
|
||||
- Float: `real`, `float`, `double precision`, `numeric`, `decimal`
|
||||
- Date/Time: `timestamp`, `date`, `time`, `timestamptz`
|
||||
- Binary: `bytea`
|
||||
- Special: `uuid`, `json`, `jsonb`, `array`
|
||||
|
||||
### Filtering
|
||||
|
||||
Filter and select specific database objects.
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `filterTables` | Filter tables by pattern | `{{ filterTables .Schema.Tables "user_*" }}` |
|
||||
| `filterTablesByPattern` | Alias for filterTables | `{{ filterTablesByPattern .Schema.Tables "temp_*" }}` |
|
||||
| `filterColumns` | Filter columns by pattern | `{{ filterColumns .Table.Columns "*_id" }}` |
|
||||
| `filterColumnsByType` | Filter by SQL type | `{{ filterColumnsByType .Table.Columns "varchar" }}` |
|
||||
| `filterPrimaryKeys` | Get primary key columns | `{{ filterPrimaryKeys .Table.Columns }}` |
|
||||
| `filterForeignKeys` | Get foreign key constraints | `{{ filterForeignKeys .Table.Constraints }}` |
|
||||
| `filterUniqueConstraints` | Get unique constraints | `{{ filterUniqueConstraints .Table.Constraints }}` |
|
||||
| `filterCheckConstraints` | Get check constraints | `{{ filterCheckConstraints .Table.Constraints }}` |
|
||||
| `filterNullable` | Get nullable columns | `{{ filterNullable .Table.Columns }}` |
|
||||
| `filterNotNull` | Get non-nullable columns | `{{ filterNotNull .Table.Columns }}` |
|
||||
|
||||
**Pattern Matching:**
|
||||
- `*` - Match any characters
|
||||
- `?` - Match single character
|
||||
- Example: `user_*` matches `user_profile`, `user_settings`
|
||||
|
||||
### Formatting
|
||||
|
||||
Format output and add structure to generated code.
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `toJSON` | Convert to JSON | `{{ .Database \| toJSON }}` |
|
||||
| `toJSONPretty` | Pretty-print JSON | `{{ toJSONPretty .Table " " }}` |
|
||||
| `toYAML` | Convert to YAML | `{{ .Schema \| toYAML }}` |
|
||||
| `indent` | Indent by spaces | `{{ indent .Column.Description 4 }}` |
|
||||
| `indentWith` | Indent with prefix | `{{ indentWith .Comment " " }}` |
|
||||
| `escape` | Escape special chars | `{{ escape .Column.Default }}` |
|
||||
| `escapeQuotes` | Escape quotes only | `{{ escapeQuotes .String }}` |
|
||||
| `comment` | Add comment prefix | `{{ comment .Description "//" }}` |
|
||||
| `quoteString` | Add quotes | `{{ quoteString "value" }}` → `"value"` |
|
||||
| `unquoteString` | Remove quotes | `{{ unquoteString "\"value\"" }}` → `value` |
|
||||
|
||||
**Comment Styles:**
|
||||
- `//` - C/Go/JavaScript style
|
||||
- `#` - Python/Shell style
|
||||
- `--` - SQL style
|
||||
- `/* */` - Block comment style
|
||||
|
||||
### Loop Helpers
|
||||
|
||||
Iterate and manipulate collections.
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `enumerate` | Add index to items | `{{ range enumerate .Tables }}{{ .Index }}: {{ .Value.Name }}{{ end }}` |
|
||||
| `batch` | Split into chunks | `{{ range batch .Columns 3 }}...{{ end }}` |
|
||||
| `chunk` | Alias for batch | `{{ range chunk .Columns 5 }}...{{ end }}` |
|
||||
| `reverse` | Reverse order | `{{ range reverse .Tables }}...{{ end }}` |
|
||||
| `first` | Get first N items | `{{ range first .Tables 5 }}...{{ end }}` |
|
||||
| `last` | Get last N items | `{{ range last .Tables 3 }}...{{ end }}` |
|
||||
| `skip` | Skip first N items | `{{ range skip .Tables 2 }}...{{ end }}` |
|
||||
| `take` | Take first N (alias) | `{{ range take .Tables 10 }}...{{ end }}` |
|
||||
| `concat` | Concatenate slices | `{{ $all := concat .Schema1.Tables .Schema2.Tables }}` |
|
||||
| `unique` | Remove duplicates | `{{ $unique := unique .Items }}` |
|
||||
| `sortBy` | Sort by field | `{{ $sorted := sortBy .Tables "Name" }}` |
|
||||
| `groupBy` | Group by field | `{{ $grouped := groupBy .Tables "Schema" }}` |
|
||||
|
||||
### Sorting Helpers
|
||||
|
||||
Sort database objects by name or sequence number. All sort functions modify the slice in-place.
|
||||
|
||||
**Schema Sorting:**
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `sortSchemasByName` | Sort schemas by name | `{{ sortSchemasByName .Database.Schemas false }}` |
|
||||
| `sortSchemasBySequence` | Sort schemas by sequence | `{{ sortSchemasBySequence .Database.Schemas false }}` |
|
||||
|
||||
**Table Sorting:**
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `sortTablesByName` | Sort tables by name | `{{ sortTablesByName .Schema.Tables false }}` |
|
||||
| `sortTablesBySequence` | Sort tables by sequence | `{{ sortTablesBySequence .Schema.Tables true }}` |
|
||||
|
||||
**Column Sorting:**
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `sortColumnsMapByName` | Convert column map to sorted slice by name | `{{ $cols := sortColumnsMapByName .Table.Columns false }}` |
|
||||
| `sortColumnsMapBySequence` | Convert column map to sorted slice by sequence | `{{ $cols := sortColumnsMapBySequence .Table.Columns false }}` |
|
||||
| `sortColumnsByName` | Sort column slice by name | `{{ sortColumnsByName $columns false }}` |
|
||||
| `sortColumnsBySequence` | Sort column slice by sequence | `{{ sortColumnsBySequence $columns true }}` |
|
||||
|
||||
**Other Object Sorting:**
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `sortViewsByName` | Sort views by name | `{{ sortViewsByName .Schema.Views false }}` |
|
||||
| `sortViewsBySequence` | Sort views by sequence | `{{ sortViewsBySequence .Schema.Views false }}` |
|
||||
| `sortSequencesByName` | Sort sequences by name | `{{ sortSequencesByName .Schema.Sequences false }}` |
|
||||
| `sortSequencesBySequence` | Sort sequences by sequence | `{{ sortSequencesBySequence .Schema.Sequences false }}` |
|
||||
| `sortIndexesMapByName` | Convert index map to sorted slice by name | `{{ $idx := sortIndexesMapByName .Table.Indexes false }}` |
|
||||
| `sortIndexesMapBySequence` | Convert index map to sorted slice by sequence | `{{ $idx := sortIndexesMapBySequence .Table.Indexes false }}` |
|
||||
| `sortIndexesByName` | Sort index slice by name | `{{ sortIndexesByName $indexes false }}` |
|
||||
| `sortIndexesBySequence` | Sort index slice by sequence | `{{ sortIndexesBySequence $indexes false }}` |
|
||||
| `sortConstraintsMapByName` | Convert constraint map to sorted slice by name | `{{ $cons := sortConstraintsMapByName .Table.Constraints false }}` |
|
||||
| `sortConstraintsByName` | Sort constraint slice by name | `{{ sortConstraintsByName $constraints false }}` |
|
||||
| `sortRelationshipsMapByName` | Convert relationship map to sorted slice by name | `{{ $rels := sortRelationshipsMapByName .Table.Relationships false }}` |
|
||||
| `sortRelationshipsByName` | Sort relationship slice by name | `{{ sortRelationshipsByName $relationships false }}` |
|
||||
| `sortScriptsByName` | Sort scripts by name | `{{ sortScriptsByName .Schema.Scripts false }}` |
|
||||
| `sortEnumsByName` | Sort enums by name | `{{ sortEnumsByName .Schema.Enums false }}` |
|
||||
|
||||
**Sort Parameters:**
|
||||
- Second parameter: `false` = ascending, `true` = descending
|
||||
- Example: `{{ sortTablesByName .Schema.Tables true }}` sorts descending (Z-A)
|
||||
|
||||
### Safe Access
|
||||
|
||||
Safely access nested data without panicking.
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `get` | Get map value | `{{ get .Metadata "key" }}` |
|
||||
| `getOr` | Get with default | `{{ getOr .Metadata "key" "default" }}` |
|
||||
| `getPath` | Nested access | `{{ getPath .Config "database.host" }}` |
|
||||
| `getPathOr` | Nested with default | `{{ getPathOr .Config "db.port" 5432 }}` |
|
||||
| `safeIndex` | Safe array access | `{{ safeIndex .Tables 0 }}` |
|
||||
| `safeIndexOr` | Safe with default | `{{ safeIndexOr .Tables 0 nil }}` |
|
||||
| `has` | Check key exists | `{{ if has .Metadata "key" }}...{{ end }}` |
|
||||
| `hasPath` | Check nested path | `{{ if hasPath .Config "db.host" }}...{{ end }}` |
|
||||
| `keys` | Get map keys | `{{ range keys .Metadata }}...{{ end }}` |
|
||||
| `values` | Get map values | `{{ range values .Table.Columns }}...{{ end }}` |
|
||||
| `merge` | Merge maps | `{{ $merged := merge .Map1 .Map2 }}` |
|
||||
| `pick` | Select keys | `{{ $subset := pick .Metadata "name" "desc" }}` |
|
||||
| `omit` | Exclude keys | `{{ $filtered := omit .Metadata "internal" }}` |
|
||||
| `sliceContains` | Check contains | `{{ if sliceContains .Names "admin" }}...{{ end }}` |
|
||||
| `indexOf` | Find index | `{{ $idx := indexOf .Names "admin" }}` |
|
||||
| `pluck` | Extract field | `{{ $names := pluck .Tables "Name" }}` |
|
||||
|
||||
### Utility Functions
|
||||
|
||||
General-purpose template helpers.
|
||||
|
||||
| Function | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `add` | Add numbers | `{{ add 5 3 }}` → `8` |
|
||||
| `sub` | Subtract | `{{ sub 10 3 }}` → `7` |
|
||||
| `mul` | Multiply | `{{ mul 4 5 }}` → `20` |
|
||||
| `div` | Divide | `{{ div 10 2 }}` → `5` |
|
||||
| `mod` | Modulo | `{{ mod 10 3 }}` → `1` |
|
||||
| `default` | Default value | `{{ default "unknown" .Name }}` |
|
||||
| `dict` | Create map | `{{ $m := dict "key1" "val1" "key2" "val2" }}` |
|
||||
| `list` | Create list | `{{ $l := list "a" "b" "c" }}` |
|
||||
| `seq` | Number sequence | `{{ range seq 1 5 }}{{ . }}{{ end }}` → `12345` |
|
||||
|
||||
## Data Model
|
||||
|
||||
The data available in templates depends on the execution mode:
|
||||
|
||||
### Database Mode
|
||||
```go
|
||||
.Database // *models.Database - Full database
|
||||
.ParentDatabase // *models.Database - Same as .Database
|
||||
.FlatColumns // []*models.FlatColumn - All columns flattened
|
||||
.FlatTables // []*models.FlatTable - All tables flattened
|
||||
.FlatConstraints // []*models.FlatConstraint - All constraints
|
||||
.FlatRelationships // []*models.FlatRelationship - All relationships
|
||||
.Summary // *models.DatabaseSummary - Statistics
|
||||
.Metadata // map[string]interface{} - User metadata
|
||||
```
|
||||
|
||||
### Schema Mode
|
||||
```go
|
||||
.Schema // *models.Schema - Current schema
|
||||
.ParentDatabase // *models.Database - Parent database context
|
||||
.FlatColumns // []*models.FlatColumn - Schema's columns flattened
|
||||
.FlatTables // []*models.FlatTable - Schema's tables flattened
|
||||
.FlatConstraints // []*models.FlatConstraint - Schema's constraints
|
||||
.FlatRelationships // []*models.FlatRelationship - Schema's relationships
|
||||
.Summary // *models.DatabaseSummary - Statistics
|
||||
.Metadata // map[string]interface{} - User metadata
|
||||
```
|
||||
|
||||
### Domain Mode
|
||||
```go
|
||||
.Domain // *models.Domain - Current domain
|
||||
.ParentDatabase // *models.Database - Parent database context
|
||||
.Metadata // map[string]interface{} - User metadata
|
||||
```
|
||||
|
||||
### Table Mode
|
||||
```go
|
||||
.Table // *models.Table - Current table
|
||||
.ParentSchema // *models.Schema - Parent schema
|
||||
.ParentDatabase // *models.Database - Parent database context
|
||||
.Metadata // map[string]interface{} - User metadata
|
||||
```
|
||||
|
||||
### Script Mode
|
||||
```go
|
||||
.Script // *models.Script - Current script
|
||||
.ParentSchema // *models.Schema - Parent schema
|
||||
.ParentDatabase // *models.Database - Parent database context
|
||||
.Metadata // map[string]interface{} - User metadata
|
||||
```
|
||||
|
||||
### Model Structures
|
||||
|
||||
**Database:**
|
||||
- `.Name` - Database name
|
||||
- `.Schemas` - List of schemas
|
||||
- `.Domains` - List of domains (business domain groupings)
|
||||
- `.Description`, `.Comment` - Documentation
|
||||
|
||||
**Schema:**
|
||||
- `.Name` - Schema name
|
||||
- `.Tables` - List of tables
|
||||
- `.Views`, `.Sequences`, `.Scripts` - Other objects
|
||||
- `.Enums` - Enum types
|
||||
|
||||
**Domain:**
|
||||
- `.Name` - Domain name
|
||||
- `.Tables` - List of DomainTable references
|
||||
- `.Description`, `.Comment` - Documentation
|
||||
- `.Metadata` - Custom metadata map
|
||||
|
||||
**DomainTable:**
|
||||
- `.TableName` - Name of the table
|
||||
- `.SchemaName` - Schema containing the table
|
||||
- `.RefTable` - Pointer to actual Table object (if loaded)
|
||||
|
||||
**Table:**
|
||||
- `.Name` - Table name
|
||||
- `.Schema` - Schema name
|
||||
- `.Columns` - Map of columns (use `values` function to iterate)
|
||||
- `.Constraints` - Map of constraints
|
||||
- `.Indexes` - Map of indexes
|
||||
- `.Relationships` - Map of relationships
|
||||
- `.Description`, `.Comment` - Documentation
|
||||
|
||||
**Column:**
|
||||
- `.Name` - Column name
|
||||
- `.Type` - SQL type
|
||||
- `.NotNull` - Is NOT NULL
|
||||
- `.IsPrimaryKey` - Is primary key
|
||||
- `.Default` - Default value
|
||||
- `.Description`, `.Comment` - Documentation
|
||||
|
||||
## Examples
|
||||
|
||||
### Example 1: TypeScript Interfaces (Table Mode)
|
||||
|
||||
**Template:** `typescript-interface.tmpl`
|
||||
```typescript
|
||||
// Generated from {{ .ParentDatabase.Name }}.{{ .ParentSchema.Name }}.{{ .Table.Name }}
|
||||
|
||||
export interface {{ .Table.Name | toPascalCase }} {
|
||||
{{- range .Table.Columns | values }}
|
||||
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type .NotNull }};
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
{{- $fks := filterForeignKeys .Table.Constraints }}
|
||||
{{- if $fks }}
|
||||
|
||||
// Foreign Keys:
|
||||
{{- range $fks }}
|
||||
// - {{ .Name }}: references {{ .ReferencedTable }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
```
|
||||
|
||||
**Command:**
|
||||
```bash
|
||||
relspec templ --from pgsql --from-conn "..." \
|
||||
--template typescript-interface.tmpl \
|
||||
--mode table \
|
||||
--output ./src/types/ \
|
||||
--filename-pattern "{{.Name | toCamelCase}}.ts"
|
||||
```
|
||||
|
||||
### Example 2: Markdown Documentation (Database Mode)
|
||||
|
||||
**Template:** `database-docs.tmpl`
|
||||
```markdown
|
||||
# Database: {{ .Database.Name }}
|
||||
|
||||
{{ if .Database.Description }}{{ .Database.Description }}{{ end }}
|
||||
|
||||
**Statistics:**
|
||||
- Schemas: {{ len .Database.Schemas }}
|
||||
- Tables: {{ .Summary.TotalTables }}
|
||||
- Columns: {{ .Summary.TotalColumns }}
|
||||
|
||||
{{ range .Database.Schemas }}
|
||||
## Schema: {{ .Name }}
|
||||
|
||||
{{ range .Tables }}
|
||||
### {{ .Name }}
|
||||
|
||||
{{ if .Description }}{{ .Description }}{{ end }}
|
||||
|
||||
**Columns:**
|
||||
|
||||
| Column | Type | Nullable | PK | Description |
|
||||
|--------|------|----------|----|----|
|
||||
{{- range .Columns | values }}
|
||||
| {{ .Name }} | `{{ .Type }}` | {{ if .NotNull }}No{{ else }}Yes{{ end }} | {{ if .IsPrimaryKey }}✓{{ end }} | {{ .Description }} |
|
||||
{{- end }}
|
||||
|
||||
{{- $fks := filterForeignKeys .Constraints }}
|
||||
{{- if $fks }}
|
||||
|
||||
**Foreign Keys:**
|
||||
|
||||
{{ range $fks }}
|
||||
- `{{ .Name }}`: {{ join .Columns ", " }} → {{ .ReferencedTable }}({{ join .ReferencedColumns ", " }})
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
```
|
||||
|
||||
### Example 3: Python SQLAlchemy Models (Table Mode)
|
||||
|
||||
**Template:** `python-model.tmpl`
|
||||
```python
|
||||
"""{{ .Table.Name | toPascalCase }} model for {{ .ParentDatabase.Name }}.{{ .ParentSchema.Name }}"""
|
||||
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class {{ .Table.Name | toPascalCase }}(Base):
|
||||
"""{{ if .Table.Description }}{{ .Table.Description }}{{ else }}{{ .Table.Name }} table{{ end }}"""
|
||||
|
||||
__tablename__ = "{{ .Table.Name }}"
|
||||
__table_args__ = {"schema": "{{ .ParentSchema.Name }}"}
|
||||
|
||||
{{- range .Table.Columns | values }}
|
||||
{{ .Name }} = Column({{ sqlToPython .Type }}{{ if .IsPrimaryKey }}, primary_key=True{{ end }}{{ if .NotNull }}, nullable=False{{ end }})
|
||||
{{- end }}
|
||||
```
|
||||
|
||||
### Example 4: GraphQL Schema (Schema Mode)
|
||||
|
||||
**Template:** `graphql-schema.tmpl`
|
||||
```graphql
|
||||
"""{{ .Schema.Name }} schema"""
|
||||
|
||||
{{ range .Schema.Tables }}
|
||||
type {{ .Name | toPascalCase }} {
|
||||
{{- range .Columns | values }}
|
||||
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type .NotNull | replace " | null" "" }}{{ if not .NotNull }}{{ end }}
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
input {{ .Name | toPascalCase }}Input {
|
||||
{{- $cols := filterNotNull .Columns | filterPrimaryKeys }}
|
||||
{{- range $cols }}
|
||||
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type true | replace " | null" "" }}!
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
{{ end }}
|
||||
```
|
||||
|
||||
### Example 5: SQL Migration (Database Mode)
|
||||
|
||||
**Template:** `migration.tmpl`
|
||||
```sql
|
||||
-- Migration for {{ .Database.Name }}
|
||||
-- Generated: {{ .Metadata.timestamp }}
|
||||
|
||||
BEGIN;
|
||||
|
||||
{{ range .Database.Schemas }}
|
||||
-- Schema: {{ .Name }}
|
||||
CREATE SCHEMA IF NOT EXISTS {{ .Name }};
|
||||
|
||||
{{ range .Tables }}
|
||||
CREATE TABLE {{ $.Database.Name }}.{{ .Schema }}.{{ .Name }} (
|
||||
{{- range $i, $col := .Columns | values }}
|
||||
{{- if $i }},{{ end }}
|
||||
{{ $col.Name }} {{ $col.Type }}{{ if $col.NotNull }} NOT NULL{{ end }}{{ if $col.Default }} DEFAULT {{ $col.Default }}{{ end }}
|
||||
{{- end }}
|
||||
);
|
||||
|
||||
{{- $pks := filterPrimaryKeys .Columns }}
|
||||
{{- if $pks }}
|
||||
|
||||
ALTER TABLE {{ $.Database.Name }}.{{ .Schema }}.{{ .Name }}
|
||||
ADD PRIMARY KEY ({{ range $i, $pk := $pks }}{{ if $i }}, {{ end }}{{ $pk.Name }}{{ end }});
|
||||
{{- end }}
|
||||
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use Hyphen for Whitespace Control:**
|
||||
```
|
||||
{{- removes whitespace before
|
||||
-}} removes whitespace after
|
||||
```
|
||||
|
||||
2. **Store Intermediate Results:**
|
||||
```
|
||||
{{ $pks := filterPrimaryKeys .Table.Columns }}
|
||||
{{ if $pks }}...{{ end }}
|
||||
```
|
||||
|
||||
3. **Check Before Accessing:**
|
||||
```
|
||||
{{ if .Table.Description }}{{ .Table.Description }}{{ end }}
|
||||
```
|
||||
|
||||
4. **Use Safe Access for Maps:**
|
||||
```
|
||||
{{ getOr .Metadata "key" "default-value" }}
|
||||
```
|
||||
|
||||
5. **Iterate Map Values:**
|
||||
```
|
||||
{{ range .Table.Columns | values }}...{{ end }}
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Error: "wrong type for value"**
|
||||
- Check function parameter order (e.g., `sqlToGo .Type .NotNull` not `.NotNull .Type`)
|
||||
|
||||
**Error: "can't evaluate field"**
|
||||
- Field doesn't exist on the object
|
||||
- Use `{{ if .Field }}` to check before accessing
|
||||
|
||||
**Empty Output:**
|
||||
- Check your mode matches your template expectations
|
||||
- Verify data exists (use `{{ .Database | toJSON }}` to inspect)
|
||||
|
||||
**Whitespace Issues:**
|
||||
- Use `{{-` and `-}}` to control whitespace
|
||||
- Run output through a formatter if needed
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Go Template Documentation](https://pkg.go.dev/text/template)
|
||||
- [RelSpec Documentation](../README.md)
|
||||
- [Model Structure Reference](../pkg/models/)
|
||||
- [Example Templates](../examples/templates/)
|
||||
18
go.mod
18
go.mod
@@ -1,24 +1,38 @@
|
||||
module git.warky.dev/wdevs/relspecgo
|
||||
|
||||
go 1.24
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
github.com/gdamore/tcell/v2 v2.8.1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/rivo/tview v0.42.0
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/uptrace/bun v1.2.16
|
||||
golang.org/x/text v0.28.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/gdamore/encoding v1.0.1 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/spf13/pflag v1.0.10 // indirect
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
golang.org/x/crypto v0.41.0 // indirect
|
||||
golang.org/x/text v0.28.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/term v0.34.0 // indirect
|
||||
)
|
||||
|
||||
93
go.sum
93
go.sum
@@ -3,6 +3,11 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||
github.com/gdamore/tcell/v2 v2.8.1 h1:KPNxyqclpWpWQlPLx6Xui1pMk8S+7+R37h3g07997NU=
|
||||
github.com/gdamore/tcell/v2 v2.8.1/go.mod h1:bj8ori1BG3OYMjmb3IklZVWfZUJ1UBQt9JXrOCOhGWw=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
@@ -15,13 +20,27 @@ github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk=
|
||||
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||
github.com/rivo/tview v0.42.0 h1:b/ftp+RxtDsHSaynXTbJb+/n/BxDEi+W3UfF5jILK6c=
|
||||
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
@@ -36,13 +55,87 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo=
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
|
||||
github.com/uptrace/bun v1.2.16 h1:QlObi6ZIK5Ao7kAALnh91HWYNZUBbVwye52fmlQM9kc=
|
||||
github.com/uptrace/bun v1.2.16/go.mod h1:jMoNg2n56ckaawi/O/J92BHaECmrz6IRjuMWqlMaMTM=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
||||
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
|
||||
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Ask if the user wants to make a release version
|
||||
read -p "Do you want to make a release version? (y/n): " make_release
|
||||
|
||||
if [[ $make_release =~ ^[Yy]$ ]]; then
|
||||
# Get the latest tag from git
|
||||
latest_tag=$(git describe --tags --abbrev=0 2>/dev/null)
|
||||
|
||||
if [ -z "$latest_tag" ]; then
|
||||
# No tags exist yet, start with v1.0.0
|
||||
suggested_version="v1.0.0"
|
||||
echo "No existing tags found. Starting with $suggested_version"
|
||||
else
|
||||
echo "Latest tag: $latest_tag"
|
||||
|
||||
# Remove 'v' prefix if present
|
||||
version_number="${latest_tag#v}"
|
||||
|
||||
# Split version into major.minor.patch
|
||||
IFS='.' read -r major minor patch <<< "$version_number"
|
||||
|
||||
# Increment patch version
|
||||
patch=$((patch + 1))
|
||||
|
||||
# Construct new version
|
||||
suggested_version="v${major}.${minor}.${patch}"
|
||||
echo "Suggested next version: $suggested_version"
|
||||
fi
|
||||
|
||||
# Ask the user for the version number with the suggested version as default
|
||||
read -p "Enter the version number (press Enter for $suggested_version): " version
|
||||
|
||||
# Use suggested version if user pressed Enter without input
|
||||
if [ -z "$version" ]; then
|
||||
version="$suggested_version"
|
||||
fi
|
||||
|
||||
# Prepend 'v' to the version if it doesn't start with it
|
||||
if ! [[ $version =~ ^v ]]; then
|
||||
version="v$version"
|
||||
fi
|
||||
|
||||
# Get commit logs since the last tag
|
||||
if [ -z "$latest_tag" ]; then
|
||||
# No previous tag, get all commits
|
||||
commit_logs=$(git log --pretty=format:"- %s" --no-merges)
|
||||
else
|
||||
# Get commits since the last tag
|
||||
commit_logs=$(git log "${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges)
|
||||
fi
|
||||
|
||||
# Create the tag message
|
||||
if [ -z "$commit_logs" ]; then
|
||||
tag_message="Release $version"
|
||||
else
|
||||
tag_message="Release $version
|
||||
|
||||
${commit_logs}"
|
||||
fi
|
||||
|
||||
# Create an annotated tag with the commit logs
|
||||
git tag -a "$version" -m "$tag_message"
|
||||
|
||||
# Push the tag to the remote repository
|
||||
git push origin "$version"
|
||||
|
||||
echo "Tag $version created and pushed to the remote repository."
|
||||
else
|
||||
echo "No release version created."
|
||||
fi
|
||||
74
pkg/commontypes/csharp.go
Normal file
74
pkg/commontypes/csharp.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package commontypes
|
||||
|
||||
import "strings"
|
||||
|
||||
// CSharpTypeMap maps PostgreSQL types to C# types
|
||||
var CSharpTypeMap = map[string]string{
|
||||
// Integer types
|
||||
"integer": "int",
|
||||
"int": "int",
|
||||
"int4": "int",
|
||||
"smallint": "short",
|
||||
"int2": "short",
|
||||
"bigint": "long",
|
||||
"int8": "long",
|
||||
"serial": "int",
|
||||
"bigserial": "long",
|
||||
"smallserial": "short",
|
||||
|
||||
// String types
|
||||
"text": "string",
|
||||
"varchar": "string",
|
||||
"char": "string",
|
||||
"character": "string",
|
||||
"citext": "string",
|
||||
"bpchar": "string",
|
||||
"uuid": "Guid",
|
||||
|
||||
// Boolean
|
||||
"boolean": "bool",
|
||||
"bool": "bool",
|
||||
|
||||
// Float types
|
||||
"real": "float",
|
||||
"float4": "float",
|
||||
"double precision": "double",
|
||||
"float8": "double",
|
||||
"numeric": "decimal",
|
||||
"decimal": "decimal",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": "DateTime",
|
||||
"timestamp without time zone": "DateTime",
|
||||
"timestamp with time zone": "DateTimeOffset",
|
||||
"timestamptz": "DateTimeOffset",
|
||||
"date": "DateTime",
|
||||
"time": "TimeSpan",
|
||||
"time without time zone": "TimeSpan",
|
||||
"time with time zone": "DateTimeOffset",
|
||||
"timetz": "DateTimeOffset",
|
||||
|
||||
// Binary
|
||||
"bytea": "byte[]",
|
||||
|
||||
// JSON
|
||||
"json": "string",
|
||||
"jsonb": "string",
|
||||
}
|
||||
|
||||
// SQLToCSharp converts SQL types to C# types
|
||||
func SQLToCSharp(sqlType string, nullable bool) string {
|
||||
baseType := ExtractBaseType(sqlType)
|
||||
|
||||
csType, ok := CSharpTypeMap[baseType]
|
||||
if !ok {
|
||||
csType = "object"
|
||||
}
|
||||
|
||||
// Handle nullable value types (reference types are already nullable)
|
||||
if !nullable && csType != "string" && !strings.HasSuffix(csType, "[]") && csType != "object" {
|
||||
return csType + "?"
|
||||
}
|
||||
|
||||
return csType
|
||||
}
|
||||
89
pkg/commontypes/golang.go
Normal file
89
pkg/commontypes/golang.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package commontypes
|
||||
|
||||
import "strings"
|
||||
|
||||
// GoTypeMap maps PostgreSQL types to Go types
|
||||
var GoTypeMap = map[string]string{
|
||||
// Integer types
|
||||
"integer": "int32",
|
||||
"int": "int32",
|
||||
"int4": "int32",
|
||||
"smallint": "int16",
|
||||
"int2": "int16",
|
||||
"bigint": "int64",
|
||||
"int8": "int64",
|
||||
"serial": "int32",
|
||||
"bigserial": "int64",
|
||||
"smallserial": "int16",
|
||||
|
||||
// String types
|
||||
"text": "string",
|
||||
"varchar": "string",
|
||||
"char": "string",
|
||||
"character": "string",
|
||||
"citext": "string",
|
||||
"bpchar": "string",
|
||||
|
||||
// Boolean
|
||||
"boolean": "bool",
|
||||
"bool": "bool",
|
||||
|
||||
// Float types
|
||||
"real": "float32",
|
||||
"float4": "float32",
|
||||
"double precision": "float64",
|
||||
"float8": "float64",
|
||||
"numeric": "float64",
|
||||
"decimal": "float64",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": "time.Time",
|
||||
"timestamp without time zone": "time.Time",
|
||||
"timestamp with time zone": "time.Time",
|
||||
"timestamptz": "time.Time",
|
||||
"date": "time.Time",
|
||||
"time": "time.Time",
|
||||
"time without time zone": "time.Time",
|
||||
"time with time zone": "time.Time",
|
||||
"timetz": "time.Time",
|
||||
|
||||
// Binary
|
||||
"bytea": "[]byte",
|
||||
|
||||
// UUID
|
||||
"uuid": "string",
|
||||
|
||||
// JSON
|
||||
"json": "string",
|
||||
"jsonb": "string",
|
||||
|
||||
// Array
|
||||
"array": "[]string",
|
||||
}
|
||||
|
||||
// SQLToGo converts SQL types to Go types
|
||||
func SQLToGo(sqlType string, nullable bool) string {
|
||||
baseType := ExtractBaseType(sqlType)
|
||||
|
||||
goType, ok := GoTypeMap[baseType]
|
||||
if !ok {
|
||||
goType = "interface{}"
|
||||
}
|
||||
|
||||
// Handle nullable types
|
||||
if nullable {
|
||||
return goType
|
||||
}
|
||||
|
||||
// For nullable, use pointer types (except for slices and interfaces)
|
||||
if !strings.HasPrefix(goType, "[]") && goType != "interface{}" {
|
||||
return "*" + goType
|
||||
}
|
||||
|
||||
return goType
|
||||
}
|
||||
|
||||
// NeedsTimeImport checks if a Go type requires the time package
|
||||
func NeedsTimeImport(goType string) bool {
|
||||
return strings.Contains(goType, "time.Time")
|
||||
}
|
||||
68
pkg/commontypes/java.go
Normal file
68
pkg/commontypes/java.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package commontypes
|
||||
|
||||
// JavaTypeMap maps PostgreSQL types to Java types
|
||||
var JavaTypeMap = map[string]string{
|
||||
// Integer types
|
||||
"integer": "Integer",
|
||||
"int": "Integer",
|
||||
"int4": "Integer",
|
||||
"smallint": "Short",
|
||||
"int2": "Short",
|
||||
"bigint": "Long",
|
||||
"int8": "Long",
|
||||
"serial": "Integer",
|
||||
"bigserial": "Long",
|
||||
"smallserial": "Short",
|
||||
|
||||
// String types
|
||||
"text": "String",
|
||||
"varchar": "String",
|
||||
"char": "String",
|
||||
"character": "String",
|
||||
"citext": "String",
|
||||
"bpchar": "String",
|
||||
"uuid": "UUID",
|
||||
|
||||
// Boolean
|
||||
"boolean": "Boolean",
|
||||
"bool": "Boolean",
|
||||
|
||||
// Float types
|
||||
"real": "Float",
|
||||
"float4": "Float",
|
||||
"double precision": "Double",
|
||||
"float8": "Double",
|
||||
"numeric": "BigDecimal",
|
||||
"decimal": "BigDecimal",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": "Timestamp",
|
||||
"timestamp without time zone": "Timestamp",
|
||||
"timestamp with time zone": "Timestamp",
|
||||
"timestamptz": "Timestamp",
|
||||
"date": "Date",
|
||||
"time": "Time",
|
||||
"time without time zone": "Time",
|
||||
"time with time zone": "Time",
|
||||
"timetz": "Time",
|
||||
|
||||
// Binary
|
||||
"bytea": "byte[]",
|
||||
|
||||
// JSON
|
||||
"json": "String",
|
||||
"jsonb": "String",
|
||||
}
|
||||
|
||||
// SQLToJava converts SQL types to Java types
|
||||
func SQLToJava(sqlType string, nullable bool) string {
|
||||
baseType := ExtractBaseType(sqlType)
|
||||
|
||||
javaType, ok := JavaTypeMap[baseType]
|
||||
if !ok {
|
||||
javaType = "Object"
|
||||
}
|
||||
|
||||
// Java uses wrapper classes for nullable types by default
|
||||
return javaType
|
||||
}
|
||||
72
pkg/commontypes/php.go
Normal file
72
pkg/commontypes/php.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package commontypes
|
||||
|
||||
// PHPTypeMap maps PostgreSQL types to PHP types
|
||||
var PHPTypeMap = map[string]string{
|
||||
// Integer types
|
||||
"integer": "int",
|
||||
"int": "int",
|
||||
"int4": "int",
|
||||
"smallint": "int",
|
||||
"int2": "int",
|
||||
"bigint": "int",
|
||||
"int8": "int",
|
||||
"serial": "int",
|
||||
"bigserial": "int",
|
||||
"smallserial": "int",
|
||||
|
||||
// String types
|
||||
"text": "string",
|
||||
"varchar": "string",
|
||||
"char": "string",
|
||||
"character": "string",
|
||||
"citext": "string",
|
||||
"bpchar": "string",
|
||||
"uuid": "string",
|
||||
|
||||
// Boolean
|
||||
"boolean": "bool",
|
||||
"bool": "bool",
|
||||
|
||||
// Float types
|
||||
"real": "float",
|
||||
"float4": "float",
|
||||
"double precision": "float",
|
||||
"float8": "float",
|
||||
"numeric": "float",
|
||||
"decimal": "float",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": "\\DateTime",
|
||||
"timestamp without time zone": "\\DateTime",
|
||||
"timestamp with time zone": "\\DateTime",
|
||||
"timestamptz": "\\DateTime",
|
||||
"date": "\\DateTime",
|
||||
"time": "\\DateTime",
|
||||
"time without time zone": "\\DateTime",
|
||||
"time with time zone": "\\DateTime",
|
||||
"timetz": "\\DateTime",
|
||||
|
||||
// Binary
|
||||
"bytea": "string",
|
||||
|
||||
// JSON
|
||||
"json": "array",
|
||||
"jsonb": "array",
|
||||
}
|
||||
|
||||
// SQLToPhp converts SQL types to PHP types
|
||||
func SQLToPhp(sqlType string, nullable bool) string {
|
||||
baseType := ExtractBaseType(sqlType)
|
||||
|
||||
phpType, ok := PHPTypeMap[baseType]
|
||||
if !ok {
|
||||
phpType = "mixed"
|
||||
}
|
||||
|
||||
// PHP 7.1+ supports nullable types with ?Type syntax
|
||||
if !nullable && phpType != "mixed" {
|
||||
return "?" + phpType
|
||||
}
|
||||
|
||||
return phpType
|
||||
}
|
||||
71
pkg/commontypes/python.go
Normal file
71
pkg/commontypes/python.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package commontypes
|
||||
|
||||
// PythonTypeMap maps PostgreSQL types to Python types
|
||||
var PythonTypeMap = map[string]string{
|
||||
// Integer types
|
||||
"integer": "int",
|
||||
"int": "int",
|
||||
"int4": "int",
|
||||
"smallint": "int",
|
||||
"int2": "int",
|
||||
"bigint": "int",
|
||||
"int8": "int",
|
||||
"serial": "int",
|
||||
"bigserial": "int",
|
||||
"smallserial": "int",
|
||||
|
||||
// String types
|
||||
"text": "str",
|
||||
"varchar": "str",
|
||||
"char": "str",
|
||||
"character": "str",
|
||||
"citext": "str",
|
||||
"bpchar": "str",
|
||||
"uuid": "UUID",
|
||||
|
||||
// Boolean
|
||||
"boolean": "bool",
|
||||
"bool": "bool",
|
||||
|
||||
// Float types
|
||||
"real": "float",
|
||||
"float4": "float",
|
||||
"double precision": "float",
|
||||
"float8": "float",
|
||||
"numeric": "Decimal",
|
||||
"decimal": "Decimal",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": "datetime",
|
||||
"timestamp without time zone": "datetime",
|
||||
"timestamp with time zone": "datetime",
|
||||
"timestamptz": "datetime",
|
||||
"date": "date",
|
||||
"time": "time",
|
||||
"time without time zone": "time",
|
||||
"time with time zone": "time",
|
||||
"timetz": "time",
|
||||
|
||||
// Binary
|
||||
"bytea": "bytes",
|
||||
|
||||
// JSON
|
||||
"json": "dict",
|
||||
"jsonb": "dict",
|
||||
|
||||
// Array
|
||||
"array": "list",
|
||||
}
|
||||
|
||||
// SQLToPython converts SQL types to Python types
|
||||
func SQLToPython(sqlType string) string {
|
||||
baseType := ExtractBaseType(sqlType)
|
||||
|
||||
pyType, ok := PythonTypeMap[baseType]
|
||||
if !ok {
|
||||
pyType = "Any"
|
||||
}
|
||||
|
||||
// Python uses Optional[Type] for nullable, but we return the base type
|
||||
return pyType
|
||||
}
|
||||
72
pkg/commontypes/rust.go
Normal file
72
pkg/commontypes/rust.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package commontypes
|
||||
|
||||
// RustTypeMap maps PostgreSQL types to Rust types
|
||||
var RustTypeMap = map[string]string{
|
||||
// Integer types
|
||||
"integer": "i32",
|
||||
"int": "i32",
|
||||
"int4": "i32",
|
||||
"smallint": "i16",
|
||||
"int2": "i16",
|
||||
"bigint": "i64",
|
||||
"int8": "i64",
|
||||
"serial": "i32",
|
||||
"bigserial": "i64",
|
||||
"smallserial": "i16",
|
||||
|
||||
// String types
|
||||
"text": "String",
|
||||
"varchar": "String",
|
||||
"char": "String",
|
||||
"character": "String",
|
||||
"citext": "String",
|
||||
"bpchar": "String",
|
||||
"uuid": "String",
|
||||
|
||||
// Boolean
|
||||
"boolean": "bool",
|
||||
"bool": "bool",
|
||||
|
||||
// Float types
|
||||
"real": "f32",
|
||||
"float4": "f32",
|
||||
"double precision": "f64",
|
||||
"float8": "f64",
|
||||
"numeric": "f64",
|
||||
"decimal": "f64",
|
||||
|
||||
// Date/Time types (using chrono crate)
|
||||
"timestamp": "NaiveDateTime",
|
||||
"timestamp without time zone": "NaiveDateTime",
|
||||
"timestamp with time zone": "DateTime<Utc>",
|
||||
"timestamptz": "DateTime<Utc>",
|
||||
"date": "NaiveDate",
|
||||
"time": "NaiveTime",
|
||||
"time without time zone": "NaiveTime",
|
||||
"time with time zone": "DateTime<Utc>",
|
||||
"timetz": "DateTime<Utc>",
|
||||
|
||||
// Binary
|
||||
"bytea": "Vec<u8>",
|
||||
|
||||
// JSON
|
||||
"json": "serde_json::Value",
|
||||
"jsonb": "serde_json::Value",
|
||||
}
|
||||
|
||||
// SQLToRust converts SQL types to Rust types
|
||||
func SQLToRust(sqlType string, nullable bool) string {
|
||||
baseType := ExtractBaseType(sqlType)
|
||||
|
||||
rustType, ok := RustTypeMap[baseType]
|
||||
if !ok {
|
||||
rustType = "String"
|
||||
}
|
||||
|
||||
// Handle nullable types with Option<T>
|
||||
if nullable {
|
||||
return rustType
|
||||
}
|
||||
|
||||
return "Option<" + rustType + ">"
|
||||
}
|
||||
22
pkg/commontypes/sql.go
Normal file
22
pkg/commontypes/sql.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package commontypes
|
||||
|
||||
import "strings"
|
||||
|
||||
// ExtractBaseType extracts the base type from a SQL type string
|
||||
// Examples: varchar(100) → varchar, numeric(10,2) → numeric
|
||||
func ExtractBaseType(sqlType string) string {
|
||||
sqlType = strings.ToLower(strings.TrimSpace(sqlType))
|
||||
|
||||
// Remove everything after '('
|
||||
if idx := strings.Index(sqlType, "("); idx > 0 {
|
||||
sqlType = sqlType[:idx]
|
||||
}
|
||||
|
||||
return sqlType
|
||||
}
|
||||
|
||||
// NormalizeType normalizes a SQL type to its base form
|
||||
// Alias for ExtractBaseType for backwards compatibility
|
||||
func NormalizeType(sqlType string) string {
|
||||
return ExtractBaseType(sqlType)
|
||||
}
|
||||
75
pkg/commontypes/typescript.go
Normal file
75
pkg/commontypes/typescript.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package commontypes
|
||||
|
||||
// TypeScriptTypeMap maps PostgreSQL types to TypeScript types
|
||||
var TypeScriptTypeMap = map[string]string{
|
||||
// Integer types
|
||||
"integer": "number",
|
||||
"int": "number",
|
||||
"int4": "number",
|
||||
"smallint": "number",
|
||||
"int2": "number",
|
||||
"bigint": "number",
|
||||
"int8": "number",
|
||||
"serial": "number",
|
||||
"bigserial": "number",
|
||||
"smallserial": "number",
|
||||
|
||||
// String types
|
||||
"text": "string",
|
||||
"varchar": "string",
|
||||
"char": "string",
|
||||
"character": "string",
|
||||
"citext": "string",
|
||||
"bpchar": "string",
|
||||
"uuid": "string",
|
||||
|
||||
// Boolean
|
||||
"boolean": "boolean",
|
||||
"bool": "boolean",
|
||||
|
||||
// Float types
|
||||
"real": "number",
|
||||
"float4": "number",
|
||||
"double precision": "number",
|
||||
"float8": "number",
|
||||
"numeric": "number",
|
||||
"decimal": "number",
|
||||
|
||||
// Date/Time types
|
||||
"timestamp": "Date",
|
||||
"timestamp without time zone": "Date",
|
||||
"timestamp with time zone": "Date",
|
||||
"timestamptz": "Date",
|
||||
"date": "Date",
|
||||
"time": "Date",
|
||||
"time without time zone": "Date",
|
||||
"time with time zone": "Date",
|
||||
"timetz": "Date",
|
||||
|
||||
// Binary
|
||||
"bytea": "Buffer",
|
||||
|
||||
// JSON
|
||||
"json": "any",
|
||||
"jsonb": "any",
|
||||
|
||||
// Array
|
||||
"array": "any[]",
|
||||
}
|
||||
|
||||
// SQLToTypeScript converts SQL types to TypeScript types
|
||||
func SQLToTypeScript(sqlType string, nullable bool) string {
|
||||
baseType := ExtractBaseType(sqlType)
|
||||
|
||||
tsType, ok := TypeScriptTypeMap[baseType]
|
||||
if !ok {
|
||||
tsType = "any"
|
||||
}
|
||||
|
||||
// Handle nullable types
|
||||
if nullable {
|
||||
return tsType
|
||||
}
|
||||
|
||||
return tsType + " | null"
|
||||
}
|
||||
@@ -2,14 +2,15 @@ package diff
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// CompareDatabases compares two database models and returns the differences
|
||||
func CompareDatabases(source, target *models.Database) *DiffResult {
|
||||
result := &DiffResult{
|
||||
Source: source.Name,
|
||||
Target: target.Name,
|
||||
Source: source.Name,
|
||||
Target: target.Name,
|
||||
Schemas: compareSchemas(source.Schemas, target.Schemas),
|
||||
}
|
||||
return result
|
||||
|
||||
@@ -4,8 +4,8 @@ import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
|
||||
// DiffResult represents the complete difference analysis between two databases
|
||||
type DiffResult struct {
|
||||
Source string `json:"source"`
|
||||
Target string `json:"target"`
|
||||
Source string `json:"source"`
|
||||
Target string `json:"target"`
|
||||
Schemas *SchemaDiff `json:"schemas"`
|
||||
}
|
||||
|
||||
@@ -18,17 +18,17 @@ type SchemaDiff struct {
|
||||
|
||||
// SchemaChange represents changes within a schema
|
||||
type SchemaChange struct {
|
||||
Name string `json:"name"`
|
||||
Tables *TableDiff `json:"tables,omitempty"`
|
||||
Views *ViewDiff `json:"views,omitempty"`
|
||||
Sequences *SequenceDiff `json:"sequences,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Tables *TableDiff `json:"tables,omitempty"`
|
||||
Views *ViewDiff `json:"views,omitempty"`
|
||||
Sequences *SequenceDiff `json:"sequences,omitempty"`
|
||||
}
|
||||
|
||||
// TableDiff represents differences in tables
|
||||
type TableDiff struct {
|
||||
Missing []*models.Table `json:"missing"` // Tables in source but not in target
|
||||
Extra []*models.Table `json:"extra"` // Tables in target but not in source
|
||||
Modified []*TableChange `json:"modified"` // Tables that exist in both but differ
|
||||
Missing []*models.Table `json:"missing"` // Tables in source but not in target
|
||||
Extra []*models.Table `json:"extra"` // Tables in target but not in source
|
||||
Modified []*TableChange `json:"modified"` // Tables that exist in both but differ
|
||||
}
|
||||
|
||||
// TableChange represents changes within a table
|
||||
@@ -50,16 +50,16 @@ type ColumnDiff struct {
|
||||
|
||||
// ColumnChange represents a modified column
|
||||
type ColumnChange struct {
|
||||
Name string `json:"name"`
|
||||
Source *models.Column `json:"source"`
|
||||
Target *models.Column `json:"target"`
|
||||
Changes map[string]any `json:"changes"` // Map of field name to what changed
|
||||
Name string `json:"name"`
|
||||
Source *models.Column `json:"source"`
|
||||
Target *models.Column `json:"target"`
|
||||
Changes map[string]any `json:"changes"` // Map of field name to what changed
|
||||
}
|
||||
|
||||
// IndexDiff represents differences in indexes
|
||||
type IndexDiff struct {
|
||||
Missing []*models.Index `json:"missing"` // Indexes in source but not in target
|
||||
Extra []*models.Index `json:"extra"` // Indexes in target but not in source
|
||||
Missing []*models.Index `json:"missing"` // Indexes in source but not in target
|
||||
Extra []*models.Index `json:"extra"` // Indexes in target but not in source
|
||||
Modified []*IndexChange `json:"modified"` // Indexes that exist in both but differ
|
||||
}
|
||||
|
||||
@@ -103,8 +103,8 @@ type RelationshipChange struct {
|
||||
|
||||
// ViewDiff represents differences in views
|
||||
type ViewDiff struct {
|
||||
Missing []*models.View `json:"missing"` // Views in source but not in target
|
||||
Extra []*models.View `json:"extra"` // Views in target but not in source
|
||||
Missing []*models.View `json:"missing"` // Views in source but not in target
|
||||
Extra []*models.View `json:"extra"` // Views in target but not in source
|
||||
Modified []*ViewChange `json:"modified"` // Views that exist in both but differ
|
||||
}
|
||||
|
||||
@@ -133,14 +133,14 @@ type SequenceChange struct {
|
||||
|
||||
// Summary provides counts for quick overview
|
||||
type Summary struct {
|
||||
Schemas SchemaSummary `json:"schemas"`
|
||||
Tables TableSummary `json:"tables"`
|
||||
Columns ColumnSummary `json:"columns"`
|
||||
Indexes IndexSummary `json:"indexes"`
|
||||
Constraints ConstraintSummary `json:"constraints"`
|
||||
Schemas SchemaSummary `json:"schemas"`
|
||||
Tables TableSummary `json:"tables"`
|
||||
Columns ColumnSummary `json:"columns"`
|
||||
Indexes IndexSummary `json:"indexes"`
|
||||
Constraints ConstraintSummary `json:"constraints"`
|
||||
Relationships RelationshipSummary `json:"relationships"`
|
||||
Views ViewSummary `json:"views"`
|
||||
Sequences SequenceSummary `json:"sequences"`
|
||||
Views ViewSummary `json:"views"`
|
||||
Sequences SequenceSummary `json:"sequences"`
|
||||
}
|
||||
|
||||
type SchemaSummary struct {
|
||||
|
||||
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
@@ -0,0 +1,177 @@
|
||||
# RelSpec Inspector Rules Configuration Example
|
||||
# Copy this file to .relspec-rules.yaml and customize as needed
|
||||
|
||||
version: "1.0"
|
||||
|
||||
rules:
|
||||
# ============================================================================
|
||||
# PRIMARY KEY RULES
|
||||
# ============================================================================
|
||||
|
||||
# Validate primary key column naming convention
|
||||
primary_key_naming:
|
||||
enabled: warn # enforce|warn|off
|
||||
function: primary_key_naming
|
||||
pattern: "^id_" # Regex pattern - PK columns must start with "id_"
|
||||
message: "Primary key columns should start with 'id_'"
|
||||
|
||||
# Validate primary key data types
|
||||
primary_key_datatype:
|
||||
enabled: warn
|
||||
function: primary_key_datatype
|
||||
allowed_types:
|
||||
- bigserial
|
||||
- bigint
|
||||
- int
|
||||
- serial
|
||||
- integer
|
||||
- int4
|
||||
- int8
|
||||
message: "Primary keys should use integer types (bigserial, bigint, int, serial)"
|
||||
|
||||
# Check if primary keys have auto-increment enabled
|
||||
primary_key_auto_increment:
|
||||
enabled: off # Often disabled as not all PKs need auto-increment
|
||||
function: primary_key_auto_increment
|
||||
require_auto_increment: true
|
||||
message: "Primary key without auto-increment detected"
|
||||
|
||||
# ============================================================================
|
||||
# FOREIGN KEY RULES
|
||||
# ============================================================================
|
||||
|
||||
# Validate foreign key column naming convention
|
||||
foreign_key_column_naming:
|
||||
enabled: warn
|
||||
function: foreign_key_column_naming
|
||||
pattern: "^rid_" # FK columns must start with "rid_" (referenced id)
|
||||
message: "Foreign key columns should start with 'rid_'"
|
||||
|
||||
# Validate foreign key constraint naming convention
|
||||
foreign_key_constraint_naming:
|
||||
enabled: warn
|
||||
function: foreign_key_constraint_naming
|
||||
pattern: "^fk_" # FK constraints must start with "fk_"
|
||||
message: "Foreign key constraint names should start with 'fk_'"
|
||||
|
||||
# Ensure foreign key columns have indexes for performance
|
||||
foreign_key_index:
|
||||
enabled: warn
|
||||
function: foreign_key_index
|
||||
require_index: true
|
||||
message: "Foreign key columns should have indexes for optimal performance"
|
||||
|
||||
# ============================================================================
|
||||
# NAMING CONVENTION RULES
|
||||
# ============================================================================
|
||||
|
||||
# Validate table naming follows snake_case convention
|
||||
table_naming_case:
|
||||
enabled: warn
|
||||
function: table_regexpr # Generic regex validator for table names
|
||||
case: lowercase
|
||||
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||
message: "Table names should be lowercase with underscores (snake_case)"
|
||||
|
||||
# Validate column naming follows snake_case convention
|
||||
column_naming_case:
|
||||
enabled: warn
|
||||
function: column_regexpr # Generic regex validator for column names
|
||||
case: lowercase
|
||||
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||
message: "Column names should be lowercase with underscores (snake_case)"
|
||||
|
||||
# ============================================================================
|
||||
# LENGTH RULES
|
||||
# ============================================================================
|
||||
|
||||
# Limit table name length (PostgreSQL max is 63, but 64 is common practice)
|
||||
table_name_length:
|
||||
enabled: warn
|
||||
function: table_name_length
|
||||
max_length: 64
|
||||
message: "Table name exceeds recommended maximum length of 64 characters"
|
||||
|
||||
# Limit column name length
|
||||
column_name_length:
|
||||
enabled: warn
|
||||
function: column_name_length
|
||||
max_length: 64
|
||||
message: "Column name exceeds recommended maximum length of 64 characters"
|
||||
|
||||
# ============================================================================
|
||||
# RESERVED KEYWORDS
|
||||
# ============================================================================
|
||||
|
||||
# Warn about using SQL reserved keywords as identifiers
|
||||
reserved_keywords:
|
||||
enabled: warn
|
||||
function: reserved_words
|
||||
check_tables: true
|
||||
check_columns: true
|
||||
message: "Using SQL reserved keywords as identifiers can cause issues"
|
||||
|
||||
# ============================================================================
|
||||
# SCHEMA INTEGRITY RULES
|
||||
# ============================================================================
|
||||
|
||||
# Ensure all tables have primary keys
|
||||
missing_primary_key:
|
||||
enabled: warn
|
||||
function: have_primary_key
|
||||
message: "Table is missing a primary key"
|
||||
|
||||
# Detect orphaned foreign keys (referencing non-existent tables)
|
||||
orphaned_foreign_key:
|
||||
enabled: warn
|
||||
function: orphaned_foreign_key
|
||||
message: "Foreign key references a non-existent table"
|
||||
|
||||
# Detect circular foreign key dependencies
|
||||
circular_dependency:
|
||||
enabled: warn
|
||||
function: circular_dependency
|
||||
message: "Circular foreign key dependency detected"
|
||||
|
||||
# ============================================================================
|
||||
# RULE CONFIGURATION NOTES
|
||||
# ============================================================================
|
||||
#
|
||||
# enabled: Controls rule enforcement level
|
||||
# - enforce: Violations are errors (exit code 1)
|
||||
# - warn: Violations are warnings (exit code 0)
|
||||
# - off: Rule is disabled
|
||||
#
|
||||
# function: The validation function to execute
|
||||
# - Must match a registered validator function
|
||||
# - Generic functions like table_regexpr and column_regexpr can be reused
|
||||
#
|
||||
# pattern: Regular expression for pattern matching
|
||||
# - Used by naming validators
|
||||
# - Must be valid Go regex syntax
|
||||
#
|
||||
# message: Custom message shown when rule is violated
|
||||
# - Should be clear and actionable
|
||||
# - Explains what the violation is and how to fix it
|
||||
#
|
||||
# ============================================================================
|
||||
# CUSTOM RULES EXAMPLES
|
||||
# ============================================================================
|
||||
#
|
||||
# You can add custom rules using the generic validator functions:
|
||||
#
|
||||
# # Example: Ensure table names don't contain numbers
|
||||
# table_no_numbers:
|
||||
# enabled: warn
|
||||
# function: table_regexpr
|
||||
# pattern: "^[a-z_]+$"
|
||||
# message: "Table names should not contain numbers"
|
||||
#
|
||||
# # Example: Audit columns must end with _audit
|
||||
# audit_column_suffix:
|
||||
# enabled: enforce
|
||||
# function: column_regexpr
|
||||
# pattern: ".*_audit$"
|
||||
# message: "Audit columns must end with '_audit'"
|
||||
#
|
||||
# ============================================================================
|
||||
472
pkg/inspector/PLAN.md
Normal file
472
pkg/inspector/PLAN.md
Normal file
@@ -0,0 +1,472 @@
|
||||
# Inspector Feature Implementation Plan
|
||||
|
||||
## Overview
|
||||
Add a model inspection feature that validates database schemas against configurable rules. The inspector will read any supported format, apply validation rules from a YAML config, and output a report in markdown or JSON format.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
1. **CLI Command** (`cmd/relspec/inspect.go`)
|
||||
- New subcommand: `relspec inspect`
|
||||
- Flags:
|
||||
- `--from` (required): Input format (dbml, pgsql, json, etc.)
|
||||
- `--from-path`: File path for file-based formats
|
||||
- `--from-conn`: Connection string for database formats
|
||||
- `--rules` (optional): Path to rules YAML file (default: `.relspec-rules.yaml`)
|
||||
- `--output-format`: Report format (markdown, json) (default: markdown)
|
||||
- `--output`: Output file path (default: stdout)
|
||||
- `--schema`: Schema name filter (optional)
|
||||
|
||||
2. **Inspector Package** (`pkg/inspector/`)
|
||||
- `inspector.go`: Main inspector logic
|
||||
- `rules.go`: Rule definitions and configuration
|
||||
- `validators.go`: Individual validation rule implementations
|
||||
- `report.go`: Report generation (markdown, JSON)
|
||||
- `config.go`: YAML config loading and parsing
|
||||
|
||||
### Data Flow
|
||||
```
|
||||
Input Format → Reader → Database Model → Inspector → Validation Results → Report Formatter → Output
|
||||
```
|
||||
|
||||
## Rules Configuration Structure
|
||||
|
||||
### YAML Schema (`rules.yaml`)
|
||||
```yaml
|
||||
version: "1.0"
|
||||
rules:
|
||||
# Primary Key Rules
|
||||
primary_key_naming:
|
||||
enabled: enforce|warn|off
|
||||
pattern: "^id_" # regex pattern
|
||||
message: "Primary key columns must start with 'id_'"
|
||||
|
||||
primary_key_datatype:
|
||||
enabled: enforce|warn|off
|
||||
allowed_types: ["bigserial", "bigint", "int", "serial", "integer"]
|
||||
message: "Primary keys must use approved integer types"
|
||||
|
||||
primary_key_auto_increment:
|
||||
enabled: enforce|warn|off
|
||||
require_auto_increment: true|false
|
||||
message: "Primary keys without auto-increment detected"
|
||||
|
||||
# Foreign Key Rules
|
||||
foreign_key_column_naming:
|
||||
enabled: enforce|warn|off
|
||||
pattern: "^rid_"
|
||||
message: "Foreign key columns must start with 'rid_'"
|
||||
|
||||
foreign_key_constraint_naming:
|
||||
enabled: enforce|warn|off
|
||||
pattern: "^fk_"
|
||||
message: "Foreign key constraint names must start with 'fk_'"
|
||||
|
||||
foreign_key_index:
|
||||
enabled: enforce|warn|off
|
||||
require_index: true
|
||||
message: "Foreign keys should have indexes"
|
||||
|
||||
# Naming Convention Rules
|
||||
table_naming_case:
|
||||
enabled: enforce|warn|off
|
||||
case: "lowercase" # lowercase, uppercase, snake_case, camelCase
|
||||
pattern: "^[a-z][a-z0-9_]*$"
|
||||
message: "Table names must be lowercase with underscores"
|
||||
|
||||
column_naming_case:
|
||||
enabled: enforce|warn|off
|
||||
case: "lowercase"
|
||||
pattern: "^[a-z][a-z0-9_]*$"
|
||||
message: "Column names must be lowercase with underscores"
|
||||
|
||||
# Length Rules
|
||||
table_name_length:
|
||||
enabled: enforce|warn|off
|
||||
max_length: 64
|
||||
message: "Table name exceeds maximum length"
|
||||
|
||||
column_name_length:
|
||||
enabled: enforce|warn|off
|
||||
max_length: 64
|
||||
message: "Column name exceeds maximum length"
|
||||
|
||||
# Reserved Keywords
|
||||
reserved_keywords:
|
||||
enabled: enforce|warn|off
|
||||
check_tables: true
|
||||
check_columns: true
|
||||
message: "Using reserved SQL keywords"
|
||||
|
||||
# Schema Integrity Rules
|
||||
missing_primary_key:
|
||||
enabled: enforce|warn|off
|
||||
message: "Table missing primary key"
|
||||
|
||||
orphaned_foreign_key:
|
||||
enabled: enforce|warn|off
|
||||
message: "Foreign key references non-existent table"
|
||||
|
||||
circular_dependency:
|
||||
enabled: enforce|warn|off
|
||||
message: "Circular foreign key dependency detected"
|
||||
```
|
||||
|
||||
### Rule Levels
|
||||
- **enforce**: Violations are errors (exit code 1)
|
||||
- **warn**: Violations are warnings (exit code 0)
|
||||
- **off**: Rule disabled
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### 1. Inspector Core (`pkg/inspector/inspector.go`)
|
||||
|
||||
```go
|
||||
type Inspector struct {
|
||||
config *Config
|
||||
db *models.Database
|
||||
}
|
||||
|
||||
type ValidationResult struct {
|
||||
RuleName string
|
||||
Level string // "error" or "warning"
|
||||
Message string
|
||||
Location string // e.g., "schema.table.column"
|
||||
Context map[string]interface{}
|
||||
Passed bool
|
||||
}
|
||||
|
||||
type InspectorReport struct {
|
||||
Summary ReportSummary
|
||||
Violations []ValidationResult
|
||||
GeneratedAt time.Time
|
||||
Database string
|
||||
SourceFormat string
|
||||
}
|
||||
|
||||
type ReportSummary struct {
|
||||
TotalRules int
|
||||
RulesChecked int
|
||||
RulesSkipped int
|
||||
ErrorCount int
|
||||
WarningCount int
|
||||
PassedCount int
|
||||
}
|
||||
|
||||
func NewInspector(db *models.Database, config *Config) *Inspector
|
||||
func (i *Inspector) Inspect() (*InspectorReport, error)
|
||||
func (i *Inspector) validateDatabase() []ValidationResult
|
||||
func (i *Inspector) validateSchema(schema *models.Schema) []ValidationResult
|
||||
func (i *Inspector) validateTable(table *models.Table) []ValidationResult
|
||||
```
|
||||
|
||||
### 2. Rule Definitions (`pkg/inspector/rules.go`)
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
Version string
|
||||
Rules map[string]Rule
|
||||
}
|
||||
|
||||
type Rule struct {
|
||||
Enabled string // "enforce", "warn", "off"
|
||||
Message string
|
||||
Pattern string
|
||||
AllowedTypes []string
|
||||
MaxLength int
|
||||
Case string
|
||||
RequireIndex bool
|
||||
CheckTables bool
|
||||
CheckColumns bool
|
||||
// ... rule-specific fields
|
||||
}
|
||||
|
||||
type RuleValidator interface {
|
||||
Name() string
|
||||
Validate(db *models.Database, rule Rule) []ValidationResult
|
||||
}
|
||||
|
||||
func LoadConfig(path string) (*Config, error)
|
||||
func GetDefaultConfig() *Config
|
||||
```
|
||||
|
||||
**Configuration Loading Behavior:**
|
||||
- If `--rules` flag is provided but file not found: Use default configuration (don't error)
|
||||
- If file exists but is invalid YAML: Return error
|
||||
- Default configuration has sensible rules enabled at "warn" level
|
||||
- Users can override by creating their own `.relspec-rules.yaml` file
|
||||
|
||||
### 3. Validators (`pkg/inspector/validators.go`)
|
||||
|
||||
Each validator implements rule logic:
|
||||
|
||||
```go
|
||||
// Primary Key Validators
|
||||
func validatePrimaryKeyNaming(db *models.Database, rule Rule) []ValidationResult
|
||||
func validatePrimaryKeyDatatype(db *models.Database, rule Rule) []ValidationResult
|
||||
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule) []ValidationResult
|
||||
|
||||
// Foreign Key Validators
|
||||
func validateForeignKeyColumnNaming(db *models.Database, rule Rule) []ValidationResult
|
||||
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule) []ValidationResult
|
||||
func validateForeignKeyIndex(db *models.Database, rule Rule) []ValidationResult
|
||||
|
||||
// Naming Convention Validators
|
||||
func validateTableNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||
func validateColumnNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||
|
||||
// Length Validators
|
||||
func validateTableNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||
func validateColumnNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||
|
||||
// Reserved Keywords Validator
|
||||
func validateReservedKeywords(db *models.Database, rule Rule) []ValidationResult
|
||||
|
||||
// Integrity Validators
|
||||
func validateMissingPrimaryKey(db *models.Database, rule Rule) []ValidationResult
|
||||
func validateOrphanedForeignKey(db *models.Database, rule Rule) []ValidationResult
|
||||
func validateCircularDependency(db *models.Database, rule Rule) []ValidationResult
|
||||
|
||||
// Registry of all validators
|
||||
var validators = map[string]RuleValidator{
|
||||
"primary_key_naming": primaryKeyNamingValidator{},
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Report Formatting (`pkg/inspector/report.go`)
|
||||
|
||||
```go
|
||||
type ReportFormatter interface {
|
||||
Format(report *InspectorReport) (string, error)
|
||||
}
|
||||
|
||||
type MarkdownFormatter struct {
|
||||
UseColors bool // ANSI colors for terminal output
|
||||
}
|
||||
type JSONFormatter struct{}
|
||||
|
||||
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error)
|
||||
func (f *JSONFormatter) Format(report *InspectorReport) (string, error)
|
||||
|
||||
// Helper to detect if output is a TTY (terminal)
|
||||
func isTerminal(w io.Writer) bool
|
||||
```
|
||||
|
||||
**Output Behavior:**
|
||||
- Markdown format will use ANSI color codes when outputting to a terminal (TTY)
|
||||
- When piped or redirected to a file, plain markdown without colors
|
||||
- Colors: Red for errors, Yellow for warnings, Green for passed checks
|
||||
|
||||
**Markdown Format Example:**
|
||||
```markdown
|
||||
# RelSpec Inspector Report
|
||||
|
||||
**Database:** my_database
|
||||
**Source Format:** pgsql
|
||||
**Generated:** 2025-12-31 10:30:45
|
||||
|
||||
## Summary
|
||||
- Rules Checked: 12
|
||||
- Errors: 3
|
||||
- Warnings: 5
|
||||
- Passed: 4
|
||||
|
||||
## Violations
|
||||
|
||||
### Errors (3)
|
||||
|
||||
#### primary_key_naming
|
||||
**Table:** users, **Column:** user_id
|
||||
Primary key columns must start with 'id_'
|
||||
|
||||
#### table_name_length
|
||||
**Table:** user_authentication_sessions_with_metadata
|
||||
Table name exceeds maximum length (64 characters)
|
||||
|
||||
### Warnings (5)
|
||||
|
||||
#### foreign_key_index
|
||||
**Table:** orders, **Column:** customer_id
|
||||
Foreign keys should have indexes
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
**JSON Format Example:**
|
||||
```json
|
||||
{
|
||||
"summary": {
|
||||
"total_rules": 12,
|
||||
"rules_checked": 12,
|
||||
"error_count": 3,
|
||||
"warning_count": 5,
|
||||
"passed_count": 4
|
||||
},
|
||||
"violations": [
|
||||
{
|
||||
"rule_name": "primary_key_naming",
|
||||
"level": "error",
|
||||
"message": "Primary key columns must start with 'id_'",
|
||||
"location": "public.users.user_id",
|
||||
"context": {
|
||||
"schema": "public",
|
||||
"table": "users",
|
||||
"column": "user_id",
|
||||
"current_name": "user_id",
|
||||
"expected_pattern": "^id_"
|
||||
},
|
||||
"passed": false
|
||||
}
|
||||
],
|
||||
"generated_at": "2025-12-31T10:30:45Z",
|
||||
"database": "my_database",
|
||||
"source_format": "pgsql"
|
||||
}
|
||||
```
|
||||
|
||||
### 5. CLI Command (`cmd/relspec/inspect.go`)
|
||||
|
||||
```go
|
||||
var inspectCmd = &cobra.Command{
|
||||
Use: "inspect",
|
||||
Short: "Inspect and validate database schemas against rules",
|
||||
Long: `Read database schemas from various formats and validate against configurable rules.`,
|
||||
RunE: runInspect,
|
||||
}
|
||||
|
||||
func init() {
|
||||
inspectCmd.Flags().String("from", "", "Input format (dbml, pgsql, json, etc.)")
|
||||
inspectCmd.Flags().String("from-path", "", "Input file path")
|
||||
inspectCmd.Flags().String("from-conn", "", "Database connection string")
|
||||
inspectCmd.Flags().String("rules", ".relspec-rules.yaml", "Rules configuration file")
|
||||
inspectCmd.Flags().String("output-format", "markdown", "Output format (markdown, json)")
|
||||
inspectCmd.Flags().String("output", "", "Output file (default: stdout)")
|
||||
inspectCmd.Flags().String("schema", "", "Filter by schema name")
|
||||
inspectCmd.MarkFlagRequired("from")
|
||||
}
|
||||
|
||||
func runInspect(cmd *cobra.Command, args []string) error {
|
||||
// 1. Parse flags
|
||||
// 2. Create reader (reuse pattern from convert.go)
|
||||
// 3. Read database
|
||||
// 4. Load rules config (use defaults if file not found)
|
||||
// 5. Create inspector
|
||||
// 6. Run inspection
|
||||
// 7. Detect if output is terminal (for color support)
|
||||
// 8. Format report (with/without ANSI colors)
|
||||
// 9. Write output
|
||||
// 10. Exit with appropriate code (0 if no errors, 1 if errors)
|
||||
}
|
||||
```
|
||||
|
||||
## Implementation Phases
|
||||
|
||||
### Phase 1: Core Infrastructure
|
||||
1. Create `pkg/inspector/` package structure
|
||||
2. Implement `Config` and YAML loading
|
||||
3. Implement `Inspector` core with basic validation framework
|
||||
4. Create CLI command skeleton
|
||||
|
||||
### Phase 2: Basic Validators
|
||||
1. Implement naming convention validators
|
||||
- Primary key naming
|
||||
- Foreign key column naming
|
||||
- Foreign key constraint naming
|
||||
- Table/column case validation
|
||||
2. Implement length validators
|
||||
3. Implement reserved keywords validator (leverage `pkg/pgsql/keywords.go`)
|
||||
|
||||
### Phase 3: Advanced Validators
|
||||
1. Implement datatype validators
|
||||
2. Implement integrity validators (missing PK, orphaned FK, circular deps)
|
||||
3. Implement foreign key index validator
|
||||
|
||||
### Phase 4: Reporting
|
||||
1. Implement `InspectorReport` structure
|
||||
2. Implement markdown formatter
|
||||
3. Implement JSON formatter
|
||||
4. Add summary statistics
|
||||
|
||||
### Phase 5: CLI Integration
|
||||
1. Wire up CLI command with flags
|
||||
2. Integrate reader factory (from convert.go pattern)
|
||||
3. Add output file handling
|
||||
4. Add exit code logic
|
||||
5. Add progress reporting
|
||||
|
||||
### Phase 6: Testing & Documentation
|
||||
1. Unit tests for validators
|
||||
2. Integration tests with sample schemas
|
||||
3. Test with all reader formats
|
||||
4. Update README with inspector documentation
|
||||
5. Create example rules configuration file
|
||||
|
||||
## Files to Create
|
||||
|
||||
1. `pkg/inspector/inspector.go` - Core inspector logic
|
||||
2. `pkg/inspector/rules.go` - Rule definitions and config loading
|
||||
3. `pkg/inspector/validators.go` - Validation implementations
|
||||
4. `pkg/inspector/report.go` - Report formatting
|
||||
5. `pkg/inspector/config.go` - Config utilities
|
||||
6. `cmd/relspec/inspect.go` - CLI command
|
||||
7. `.relspec-rules.yaml.example` - Example configuration
|
||||
8. `pkg/inspector/inspector_test.go` - Tests
|
||||
|
||||
## Files to Modify
|
||||
|
||||
1. `cmd/relspec/root.go` - Register inspect command
|
||||
2. `README.md` - Add inspector documentation (if requested)
|
||||
|
||||
## Example Usage
|
||||
|
||||
```bash
|
||||
# Inspect a PostgreSQL database with default rules
|
||||
relspec inspect --from pgsql --from-conn "postgresql://localhost/mydb"
|
||||
|
||||
# Inspect a DBML file with custom rules
|
||||
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||
|
||||
# Output JSON report to file
|
||||
relspec inspect --from json --from-path db.json --output-format json --output report.json
|
||||
|
||||
# Inspect specific schema only
|
||||
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||
|
||||
# Use custom rules location
|
||||
relspec inspect --from dbml --from-path schema.dbml --rules /path/to/rules.yaml
|
||||
```
|
||||
|
||||
## Exit Codes
|
||||
- 0: Success (no errors, only warnings or all passed)
|
||||
- 1: Validation errors found (rules with level="enforce" failed)
|
||||
- 2: Runtime error (invalid config, reader error, etc.)
|
||||
|
||||
## Dependencies
|
||||
- Existing: `pkg/models`, `pkg/readers`, `pkg/pgsql/keywords.go`
|
||||
- New: `gopkg.in/yaml.v3` for YAML parsing (may already be in go.mod)
|
||||
|
||||
## Design Decisions
|
||||
|
||||
### Confirmed Choices (from user)
|
||||
1. **Example config file**: Create `.relspec-rules.yaml.example` in repository root with documented examples
|
||||
2. **Missing rules file**: Use sensible built-in defaults (don't error), all rules at "warn" level by default
|
||||
3. **Terminal output**: ANSI colors (red/yellow/green) when outputting to terminal, plain markdown when piped/redirected
|
||||
4. **Foreign key naming**: Separate configurable rules for both FK column names and FK constraint names
|
||||
|
||||
### Architecture Rationale
|
||||
1. **Why YAML for config?**: Human-readable, supports comments, standard for config files
|
||||
2. **Why three levels (enforce/warn/off)?**: Flexibility for gradual adoption, different contexts
|
||||
3. **Why markdown + JSON?**: Markdown for human review, JSON for tooling integration
|
||||
4. **Why pkg/inspector?**: Follows existing package structure, separates concerns
|
||||
5. **Reuse readers**: Leverage existing reader infrastructure, supports all formats automatically
|
||||
6. **Exit codes**: Follow standard conventions (0=success, 1=validation fail, 2=error)
|
||||
|
||||
## Future Enhancements (Not in Scope)
|
||||
- Auto-fix mode (automatically rename columns, etc.)
|
||||
- Custom rule plugins
|
||||
- HTML report format
|
||||
- Rule templates for different databases
|
||||
- CI/CD integration examples
|
||||
- Performance metrics in report
|
||||
485
pkg/inspector/README.md
Normal file
485
pkg/inspector/README.md
Normal file
@@ -0,0 +1,485 @@
|
||||
# RelSpec Inspector
|
||||
|
||||
> Database Schema Validation and Linting Tool
|
||||
|
||||
The RelSpec Inspector validates database schemas against configurable rules, helping you maintain consistency, enforce naming conventions, and catch common schema design issues across your database models.
|
||||
|
||||
## Overview
|
||||
|
||||
The Inspector reads database schemas from any supported RelSpec format and validates them against a set of configurable rules. It generates detailed reports highlighting violations, warnings, and passed checks.
|
||||
|
||||
## Features
|
||||
|
||||
- **Flexible Rule Configuration**: YAML-based rules with three severity levels (enforce, warn, off)
|
||||
- **Generic Validators**: Reusable regex-based validators for custom naming conventions
|
||||
- **Multiple Input Formats**: Works with all RelSpec readers (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||
- **Multiple Output Formats**: Markdown with ANSI colors for terminals, JSON for tooling integration
|
||||
- **Smart Defaults**: Works out-of-the-box with sensible default rules
|
||||
- **Terminal-Aware**: Automatic color support detection for improved readability
|
||||
- **Exit Codes**: Proper exit codes for CI/CD integration
|
||||
|
||||
[Todo List of Features](./TODO.md)
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```bash
|
||||
# Inspect a PostgreSQL database with default rules
|
||||
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||
|
||||
# Inspect a DBML file
|
||||
relspec inspect --from dbml --from-path schema.dbml
|
||||
|
||||
# Inspect with custom rules
|
||||
relspec inspect --from json --from-path db.json --rules my-rules.yaml
|
||||
|
||||
# Output JSON report to file
|
||||
relspec inspect --from pgsql --from-conn "..." \
|
||||
--output-format json --output report.json
|
||||
|
||||
# Inspect specific schema only
|
||||
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Create a `.relspec-rules.yaml` file to customize validation rules. If the file doesn't exist, the inspector uses sensible defaults.
|
||||
|
||||
```yaml
|
||||
version: "1.0"
|
||||
|
||||
rules:
|
||||
# Primary key columns must start with "id_"
|
||||
primary_key_naming:
|
||||
enabled: enforce # enforce|warn|off
|
||||
function: primary_key_naming
|
||||
pattern: "^id_"
|
||||
message: "Primary key columns must start with 'id_'"
|
||||
|
||||
# Foreign key columns must start with "rid_"
|
||||
foreign_key_column_naming:
|
||||
enabled: warn
|
||||
function: foreign_key_column_naming
|
||||
pattern: "^rid_"
|
||||
message: "Foreign key columns should start with 'rid_'"
|
||||
|
||||
# Table names must be lowercase snake_case
|
||||
table_naming_case:
|
||||
enabled: warn
|
||||
function: table_regexpr # Generic regex validator
|
||||
pattern: "^[a-z][a-z0-9_]*$"
|
||||
message: "Table names should be lowercase with underscores"
|
||||
|
||||
# Ensure all tables have primary keys
|
||||
missing_primary_key:
|
||||
enabled: warn
|
||||
function: have_primary_key
|
||||
message: "Table is missing a primary key"
|
||||
```
|
||||
|
||||
## Built-in Validation Rules
|
||||
|
||||
### Primary Key Rules
|
||||
|
||||
| Rule | Function | Description |
|
||||
|------|----------|-------------|
|
||||
| `primary_key_naming` | `primary_key_naming` | Validate PK column names against regex pattern |
|
||||
| `primary_key_datatype` | `primary_key_datatype` | Enforce approved PK data types (bigint, serial, etc.) |
|
||||
| `primary_key_auto_increment` | `primary_key_auto_increment` | Check if PKs have auto-increment enabled |
|
||||
|
||||
### Foreign Key Rules
|
||||
|
||||
| Rule | Function | Description |
|
||||
|------|----------|-------------|
|
||||
| `foreign_key_column_naming` | `foreign_key_column_naming` | Validate FK column names against regex pattern |
|
||||
| `foreign_key_constraint_naming` | `foreign_key_constraint_naming` | Validate FK constraint names against regex pattern |
|
||||
| `foreign_key_index` | `foreign_key_index` | Ensure FK columns have indexes for performance |
|
||||
|
||||
### Naming Convention Rules
|
||||
|
||||
| Rule | Function | Description |
|
||||
|------|----------|-------------|
|
||||
| `table_naming_case` | `table_regexpr` | Generic regex validator for table names |
|
||||
| `column_naming_case` | `column_regexpr` | Generic regex validator for column names |
|
||||
|
||||
### Length Rules
|
||||
|
||||
| Rule | Function | Description |
|
||||
|------|----------|-------------|
|
||||
| `table_name_length` | `table_name_length` | Limit table name length (default: 64 chars) |
|
||||
| `column_name_length` | `column_name_length` | Limit column name length (default: 64 chars) |
|
||||
|
||||
### Reserved Keywords
|
||||
|
||||
| Rule | Function | Description |
|
||||
|------|----------|-------------|
|
||||
| `reserved_keywords` | `reserved_words` | Detect use of SQL reserved keywords as identifiers |
|
||||
|
||||
### Schema Integrity Rules
|
||||
|
||||
| Rule | Function | Description |
|
||||
|------|----------|-------------|
|
||||
| `missing_primary_key` | `have_primary_key` | Ensure tables have primary keys |
|
||||
| `orphaned_foreign_key` | `orphaned_foreign_key` | Detect FKs referencing non-existent tables |
|
||||
| `circular_dependency` | `circular_dependency` | Detect circular FK dependencies |
|
||||
|
||||
## Rule Configuration
|
||||
|
||||
### Severity Levels
|
||||
|
||||
Rules support three severity levels:
|
||||
|
||||
- **`enforce`**: Violations are errors (exit code 1)
|
||||
- **`warn`**: Violations are warnings (exit code 0)
|
||||
- **`off`**: Rule is disabled
|
||||
|
||||
### Rule Structure
|
||||
|
||||
```yaml
|
||||
rule_name:
|
||||
enabled: enforce|warn|off
|
||||
function: validator_function_name
|
||||
message: "Custom message shown on violation"
|
||||
# Rule-specific parameters
|
||||
pattern: "^regex_pattern$" # For pattern-based validators
|
||||
allowed_types: [type1, type2] # For type validators
|
||||
max_length: 64 # For length validators
|
||||
check_tables: true # For keyword validator
|
||||
check_columns: true # For keyword validator
|
||||
require_index: true # For FK index validator
|
||||
```
|
||||
|
||||
## Generic Validators
|
||||
|
||||
The inspector provides generic validator functions that can be reused for custom rules:
|
||||
|
||||
### `table_regexpr`
|
||||
|
||||
Generic regex validator for table names. Create custom table naming rules:
|
||||
|
||||
```yaml
|
||||
# Example: Ensure table names don't contain numbers
|
||||
table_no_numbers:
|
||||
enabled: warn
|
||||
function: table_regexpr
|
||||
pattern: "^[a-z_]+$"
|
||||
message: "Table names should not contain numbers"
|
||||
|
||||
# Example: Tables must start with "tbl_"
|
||||
table_prefix:
|
||||
enabled: enforce
|
||||
function: table_regexpr
|
||||
pattern: "^tbl_[a-z][a-z0-9_]*$"
|
||||
message: "Table names must start with 'tbl_'"
|
||||
```
|
||||
|
||||
### `column_regexpr`
|
||||
|
||||
Generic regex validator for column names. Create custom column naming rules:
|
||||
|
||||
```yaml
|
||||
# Example: Audit columns must end with "_audit"
|
||||
audit_column_suffix:
|
||||
enabled: enforce
|
||||
function: column_regexpr
|
||||
pattern: ".*_audit$"
|
||||
message: "Audit columns must end with '_audit'"
|
||||
|
||||
# Example: Timestamp columns must end with "_at"
|
||||
timestamp_suffix:
|
||||
enabled: warn
|
||||
function: column_regexpr
|
||||
pattern: ".*(created|updated|deleted)_at$"
|
||||
message: "Timestamp columns should end with '_at'"
|
||||
```
|
||||
|
||||
## Output Formats
|
||||
|
||||
### Markdown (Default)
|
||||
|
||||
Human-readable markdown report with ANSI colors when outputting to a terminal:
|
||||
|
||||
```
|
||||
# RelSpec Inspector Report
|
||||
|
||||
**Database:** my_database
|
||||
**Source Format:** pgsql
|
||||
**Generated:** 2025-12-31T10:30:45Z
|
||||
|
||||
## Summary
|
||||
- Rules Checked: 13
|
||||
- Errors: 2
|
||||
- Warnings: 5
|
||||
- Passed: 120
|
||||
|
||||
## Violations
|
||||
|
||||
### Errors (2)
|
||||
|
||||
#### primary_key_naming
|
||||
**Location:** public.users.user_id
|
||||
**Message:** Primary key columns must start with 'id_'
|
||||
**Details:** expected_pattern=^id_
|
||||
|
||||
### Warnings (5)
|
||||
|
||||
#### foreign_key_index
|
||||
**Location:** public.orders.customer_id
|
||||
**Message:** Foreign key columns should have indexes
|
||||
**Details:** has_index=false
|
||||
```
|
||||
|
||||
### JSON
|
||||
|
||||
Structured JSON output for tooling integration:
|
||||
|
||||
```json
|
||||
{
|
||||
"summary": {
|
||||
"total_rules": 13,
|
||||
"rules_checked": 13,
|
||||
"error_count": 2,
|
||||
"warning_count": 5,
|
||||
"passed_count": 120
|
||||
},
|
||||
"violations": [
|
||||
{
|
||||
"rule_name": "primary_key_naming",
|
||||
"level": "error",
|
||||
"message": "Primary key columns must start with 'id_'",
|
||||
"location": "public.users.user_id",
|
||||
"context": {
|
||||
"schema": "public",
|
||||
"table": "users",
|
||||
"column": "user_id",
|
||||
"expected_pattern": "^id_"
|
||||
},
|
||||
"passed": false
|
||||
}
|
||||
],
|
||||
"generated_at": "2025-12-31T10:30:45Z",
|
||||
"database": "my_database",
|
||||
"source_format": "pgsql"
|
||||
}
|
||||
```
|
||||
|
||||
## CLI Reference
|
||||
|
||||
### Flags
|
||||
|
||||
| Flag | Type | Description |
|
||||
|------|------|-------------|
|
||||
| `--from` | string | **Required**. Source format (dbml, pgsql, json, yaml, gorm, etc.) |
|
||||
| `--from-path` | string | Source file path (for file-based formats) |
|
||||
| `--from-conn` | string | Connection string (for database formats) |
|
||||
| `--rules` | string | Path to rules YAML file (default: `.relspec-rules.yaml`) |
|
||||
| `--output-format` | string | Output format: `markdown` or `json` (default: `markdown`) |
|
||||
| `--output` | string | Output file path (default: stdout) |
|
||||
| `--schema` | string | Filter to specific schema by name |
|
||||
|
||||
### Exit Codes
|
||||
|
||||
| Code | Meaning |
|
||||
|------|---------|
|
||||
| 0 | Success (no errors, only warnings or all passed) |
|
||||
| 1 | Validation errors found (rules with `enabled: enforce` failed) |
|
||||
| 2 | Runtime error (invalid config, reader error, etc.) |
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
### GitHub Actions Example
|
||||
|
||||
```yaml
|
||||
name: Schema Validation
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install RelSpec
|
||||
run: go install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
||||
|
||||
- name: Validate Schema
|
||||
run: |
|
||||
relspec inspect \
|
||||
--from dbml \
|
||||
--from-path schema.dbml \
|
||||
--rules .relspec-rules.yaml \
|
||||
--output-format json \
|
||||
--output validation-report.json
|
||||
|
||||
- name: Upload Report
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: validation-report
|
||||
path: validation-report.json
|
||||
```
|
||||
|
||||
### Pre-commit Hook Example
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# .git/hooks/pre-commit
|
||||
|
||||
echo "Running schema validation..."
|
||||
|
||||
relspec inspect \
|
||||
--from dbml \
|
||||
--from-path schema.dbml \
|
||||
--rules .relspec-rules.yaml
|
||||
|
||||
exit $?
|
||||
```
|
||||
|
||||
## Example Configuration File
|
||||
|
||||
See [`.relspec-rules.yaml.example`](../../.relspec-rules.yaml.example) for a fully documented example configuration with all available rules and customization options.
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Enforce Naming Standards
|
||||
|
||||
```yaml
|
||||
# Ensure consistent naming across your schema
|
||||
table_naming_case:
|
||||
enabled: enforce
|
||||
function: table_regexpr
|
||||
pattern: "^[a-z][a-z0-9_]*$"
|
||||
message: "Tables must use snake_case"
|
||||
|
||||
column_naming_case:
|
||||
enabled: enforce
|
||||
function: column_regexpr
|
||||
pattern: "^[a-z][a-z0-9_]*$"
|
||||
message: "Columns must use snake_case"
|
||||
|
||||
primary_key_naming:
|
||||
enabled: enforce
|
||||
function: primary_key_naming
|
||||
pattern: "^id$"
|
||||
message: "Primary key must be named 'id'"
|
||||
|
||||
foreign_key_column_naming:
|
||||
enabled: enforce
|
||||
function: foreign_key_column_naming
|
||||
pattern: "^[a-z]+_id$"
|
||||
message: "Foreign keys must end with '_id'"
|
||||
```
|
||||
|
||||
### Performance Best Practices
|
||||
|
||||
```yaml
|
||||
# Ensure optimal database performance
|
||||
foreign_key_index:
|
||||
enabled: enforce
|
||||
function: foreign_key_index
|
||||
require_index: true
|
||||
message: "Foreign keys must have indexes"
|
||||
|
||||
primary_key_datatype:
|
||||
enabled: enforce
|
||||
function: primary_key_datatype
|
||||
allowed_types: [bigserial, bigint]
|
||||
message: "Use bigserial or bigint for primary keys"
|
||||
```
|
||||
|
||||
### Schema Integrity
|
||||
|
||||
```yaml
|
||||
# Prevent common schema issues
|
||||
missing_primary_key:
|
||||
enabled: enforce
|
||||
function: have_primary_key
|
||||
message: "All tables must have a primary key"
|
||||
|
||||
orphaned_foreign_key:
|
||||
enabled: enforce
|
||||
function: orphaned_foreign_key
|
||||
message: "Foreign keys must reference existing tables"
|
||||
|
||||
circular_dependency:
|
||||
enabled: warn
|
||||
function: circular_dependency
|
||||
message: "Circular dependencies detected"
|
||||
```
|
||||
|
||||
### Avoid Reserved Keywords
|
||||
|
||||
```yaml
|
||||
reserved_keywords:
|
||||
enabled: warn
|
||||
function: reserved_words
|
||||
check_tables: true
|
||||
check_columns: true
|
||||
message: "Avoid using SQL reserved keywords"
|
||||
```
|
||||
|
||||
## Programmatic Usage
|
||||
|
||||
You can use the inspector programmatically in your Go code:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Load your database model
|
||||
db := &models.Database{
|
||||
Name: "my_database",
|
||||
Schemas: []*models.Schema{
|
||||
// ... your schema
|
||||
},
|
||||
}
|
||||
|
||||
// Load rules configuration
|
||||
config, err := inspector.LoadConfig(".relspec-rules.yaml")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create and run inspector
|
||||
insp := inspector.NewInspector(db, config)
|
||||
report, err := insp.Inspect()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Generate report
|
||||
formatter := inspector.NewMarkdownFormatter(os.Stdout)
|
||||
output, err := formatter.Format(report)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Println(output)
|
||||
|
||||
// Check for errors
|
||||
if report.HasErrors() {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! To add a new validator:
|
||||
|
||||
1. Add the validator function to `validators.go`
|
||||
2. Register it in `inspector.go` `getValidator()` function
|
||||
3. Add default configuration to `rules.go` `GetDefaultConfig()`
|
||||
4. Update this README with the new rule documentation
|
||||
|
||||
## License
|
||||
|
||||
Apache License 2.0 - See [LICENSE](../../LICENSE) for details.
|
||||
65
pkg/inspector/TODO.md
Normal file
65
pkg/inspector/TODO.md
Normal file
@@ -0,0 +1,65 @@
|
||||
## Inspector TODO
|
||||
|
||||
See the [Inspector README](./README.md) for complete documentation of implemented features.
|
||||
|
||||
### Implemented ✓
|
||||
|
||||
- [x] Core validation framework with configurable rules
|
||||
- [x] YAML configuration with three severity levels (enforce/warn/off)
|
||||
- [x] Generic validators (table_regexpr, column_regexpr)
|
||||
- [x] Primary key validation (naming, datatype, auto-increment)
|
||||
- [x] Foreign key validation (column naming, constraint naming, indexes)
|
||||
- [x] Naming convention validation (snake_case, custom patterns)
|
||||
- [x] Length validation (table names, column names)
|
||||
- [x] Reserved keywords detection
|
||||
- [x] Schema integrity checks (missing PKs, orphaned FKs, circular dependencies)
|
||||
- [x] Multiple output formats (Markdown with ANSI colors, JSON)
|
||||
- [x] Terminal-aware color output
|
||||
- [x] All input formats supported (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||
- [x] CI/CD integration support (proper exit codes)
|
||||
- [x] Comprehensive documentation and examples
|
||||
|
||||
### Future Enhancements
|
||||
|
||||
#### Reporting Enhancements
|
||||
- [ ] Add verbose mode to show all passing checks in detail
|
||||
- [ ] Add summary-only mode (suppress violation details)
|
||||
- [ ] Group violations by table/schema in report
|
||||
- [ ] Add statistics: most violated rules, tables with most issues
|
||||
- [ ] HTML report format with interactive filtering
|
||||
|
||||
#### Additional Validators
|
||||
- [ ] Optimal column order for space and storage efficiency
|
||||
- [ ] Similar-sounding column names detection (synonyms, typos)
|
||||
- [ ] Plural/singular table name consistency
|
||||
- [ ] Column order validation (PK first, FKs next, data columns, timestamps last)
|
||||
- [ ] Data type consistency across related columns
|
||||
- [ ] Index coverage analysis
|
||||
- [ ] Unused indexes detection
|
||||
- [ ] Missing indexes on commonly filtered columns
|
||||
- [ ] Table size estimates and warnings for large tables
|
||||
- [ ] Function naming conventions (here we have my rules used in Bitech etc. Work from a rules file.)
|
||||
- [ ] View naming conventions
|
||||
- [ ] Enum naming conventions
|
||||
- [ ] Custom type naming conventions
|
||||
- [ ] Table name consistency across related tables
|
||||
|
||||
#### Auto-Fix Capabilities
|
||||
- [ ] Auto-fix mode (`relspec inspect --fix`)
|
||||
- [ ] Update foreign key types to match primary key types
|
||||
- [ ] Rename foreign keys to match primary key names with configurable prefix/suffix
|
||||
- [ ] Reorder columns according to rules
|
||||
- [ ] Add missing indexes on foreign keys
|
||||
- [ ] Generate migration scripts for fixes
|
||||
- [ ] Dry-run mode to preview changes
|
||||
|
||||
#### Advanced Features
|
||||
- [ ] Custom validator plugins (Go plugin system)
|
||||
- [ ] Rule templates for different databases (PostgreSQL, MySQL, etc.)
|
||||
- [ ] Rule inheritance and composition
|
||||
- [ ] Conditional rules (apply only to certain schemas/tables)
|
||||
- [ ] Performance metrics in report (validation time per rule)
|
||||
- [ ] Caching for large databases
|
||||
- [ ] Incremental validation (only changed tables)
|
||||
- [ ] Watch mode for continuous validation
|
||||
|
||||
182
pkg/inspector/inspector.go
Normal file
182
pkg/inspector/inspector.go
Normal file
@@ -0,0 +1,182 @@
|
||||
package inspector
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// Inspector performs validation on database models
|
||||
type Inspector struct {
|
||||
config *Config
|
||||
db *models.Database
|
||||
}
|
||||
|
||||
// ValidationResult represents the result of a single validation check
|
||||
type ValidationResult struct {
|
||||
RuleName string `json:"rule_name"`
|
||||
Level string `json:"level"` // "error" or "warning"
|
||||
Message string `json:"message"`
|
||||
Location string `json:"location"` // e.g., "schema.table.column"
|
||||
Context map[string]interface{} `json:"context"`
|
||||
Passed bool `json:"passed"`
|
||||
}
|
||||
|
||||
// InspectorReport contains the complete validation report
|
||||
type InspectorReport struct {
|
||||
Summary ReportSummary `json:"summary"`
|
||||
Violations []ValidationResult `json:"violations"`
|
||||
GeneratedAt time.Time `json:"generated_at"`
|
||||
Database string `json:"database"`
|
||||
SourceFormat string `json:"source_format"`
|
||||
}
|
||||
|
||||
// ReportSummary contains aggregate statistics
|
||||
type ReportSummary struct {
|
||||
TotalRules int `json:"total_rules"`
|
||||
RulesChecked int `json:"rules_checked"`
|
||||
RulesSkipped int `json:"rules_skipped"`
|
||||
ErrorCount int `json:"error_count"`
|
||||
WarningCount int `json:"warning_count"`
|
||||
PassedCount int `json:"passed_count"`
|
||||
}
|
||||
|
||||
// NewInspector creates a new inspector with the given database and configuration
|
||||
func NewInspector(db *models.Database, config *Config) *Inspector {
|
||||
return &Inspector{
|
||||
config: config,
|
||||
db: db,
|
||||
}
|
||||
}
|
||||
|
||||
// Inspect runs all enabled validation rules and returns a report
|
||||
func (i *Inspector) Inspect() (*InspectorReport, error) {
|
||||
results := []ValidationResult{}
|
||||
|
||||
// Run all enabled validators
|
||||
for ruleName, rule := range i.config.Rules {
|
||||
if !rule.IsEnabled() {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the validator function for this rule using the function field
|
||||
validator, exists := getValidator(rule.Function)
|
||||
if !exists {
|
||||
// Skip unknown validator functions
|
||||
continue
|
||||
}
|
||||
|
||||
// Run the validator
|
||||
ruleResults := validator(i.db, rule, ruleName)
|
||||
|
||||
// Set the level based on rule configuration
|
||||
level := "warning"
|
||||
if rule.IsEnforced() {
|
||||
level = "error"
|
||||
}
|
||||
|
||||
for idx := range ruleResults {
|
||||
ruleResults[idx].Level = level
|
||||
}
|
||||
|
||||
results = append(results, ruleResults...)
|
||||
}
|
||||
|
||||
// Generate summary
|
||||
summary := i.generateSummary(results)
|
||||
|
||||
report := &InspectorReport{
|
||||
Summary: summary,
|
||||
Violations: results,
|
||||
GeneratedAt: time.Now(),
|
||||
Database: i.db.Name,
|
||||
SourceFormat: i.db.SourceFormat,
|
||||
}
|
||||
|
||||
return report, nil
|
||||
}
|
||||
|
||||
// generateSummary creates summary statistics from validation results
|
||||
func (i *Inspector) generateSummary(results []ValidationResult) ReportSummary {
|
||||
summary := ReportSummary{
|
||||
TotalRules: len(i.config.Rules),
|
||||
}
|
||||
|
||||
// Count enabled rules
|
||||
for _, rule := range i.config.Rules {
|
||||
if rule.IsEnabled() {
|
||||
summary.RulesChecked++
|
||||
} else {
|
||||
summary.RulesSkipped++
|
||||
}
|
||||
}
|
||||
|
||||
// Count violations by level
|
||||
for _, result := range results {
|
||||
if result.Passed {
|
||||
summary.PassedCount++
|
||||
} else {
|
||||
if result.Level == "error" {
|
||||
summary.ErrorCount++
|
||||
} else {
|
||||
summary.WarningCount++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return summary
|
||||
}
|
||||
|
||||
// HasErrors returns true if the report contains any errors
|
||||
func (r *InspectorReport) HasErrors() bool {
|
||||
return r.Summary.ErrorCount > 0
|
||||
}
|
||||
|
||||
// validatorFunc is a function that validates a rule against a database
|
||||
type validatorFunc func(*models.Database, Rule, string) []ValidationResult
|
||||
|
||||
// getValidator returns the validator function for a given function name
|
||||
func getValidator(functionName string) (validatorFunc, bool) {
|
||||
validators := map[string]validatorFunc{
|
||||
"primary_key_naming": validatePrimaryKeyNaming,
|
||||
"primary_key_datatype": validatePrimaryKeyDatatype,
|
||||
"primary_key_auto_increment": validatePrimaryKeyAutoIncrement,
|
||||
"foreign_key_column_naming": validateForeignKeyColumnNaming,
|
||||
"foreign_key_constraint_naming": validateForeignKeyConstraintNaming,
|
||||
"foreign_key_index": validateForeignKeyIndex,
|
||||
"table_regexpr": validateTableNamingCase,
|
||||
"column_regexpr": validateColumnNamingCase,
|
||||
"table_name_length": validateTableNameLength,
|
||||
"column_name_length": validateColumnNameLength,
|
||||
"reserved_words": validateReservedKeywords,
|
||||
"have_primary_key": validateMissingPrimaryKey,
|
||||
"orphaned_foreign_key": validateOrphanedForeignKey,
|
||||
"circular_dependency": validateCircularDependency,
|
||||
}
|
||||
|
||||
fn, exists := validators[functionName]
|
||||
return fn, exists
|
||||
}
|
||||
|
||||
// createResult is a helper to create a validation result
|
||||
func createResult(ruleName string, passed bool, message string, location string, context map[string]interface{}) ValidationResult {
|
||||
return ValidationResult{
|
||||
RuleName: ruleName,
|
||||
Message: message,
|
||||
Location: location,
|
||||
Context: context,
|
||||
Passed: passed,
|
||||
}
|
||||
}
|
||||
|
||||
// formatLocation creates a location string from schema, table, and optional column
|
||||
func formatLocation(schema, table, column string) string {
|
||||
if column != "" {
|
||||
return fmt.Sprintf("%s.%s.%s", schema, table, column)
|
||||
}
|
||||
if table != "" {
|
||||
return fmt.Sprintf("%s.%s", schema, table)
|
||||
}
|
||||
return schema
|
||||
}
|
||||
229
pkg/inspector/report.go
Normal file
229
pkg/inspector/report.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package inspector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ANSI color codes
|
||||
const (
|
||||
colorReset = "\033[0m"
|
||||
colorRed = "\033[31m"
|
||||
colorYellow = "\033[33m"
|
||||
colorGreen = "\033[32m"
|
||||
colorBold = "\033[1m"
|
||||
)
|
||||
|
||||
// ReportFormatter defines the interface for report formatters
|
||||
type ReportFormatter interface {
|
||||
Format(report *InspectorReport) (string, error)
|
||||
}
|
||||
|
||||
// MarkdownFormatter formats reports as markdown
|
||||
type MarkdownFormatter struct {
|
||||
UseColors bool
|
||||
}
|
||||
|
||||
// JSONFormatter formats reports as JSON
|
||||
type JSONFormatter struct{}
|
||||
|
||||
// NewMarkdownFormatter creates a markdown formatter with color support detection
|
||||
func NewMarkdownFormatter(writer io.Writer) *MarkdownFormatter {
|
||||
return &MarkdownFormatter{
|
||||
UseColors: isTerminal(writer),
|
||||
}
|
||||
}
|
||||
|
||||
// NewJSONFormatter creates a JSON formatter
|
||||
func NewJSONFormatter() *JSONFormatter {
|
||||
return &JSONFormatter{}
|
||||
}
|
||||
|
||||
// Format generates a markdown report
|
||||
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error) {
|
||||
var sb strings.Builder
|
||||
|
||||
// Header
|
||||
sb.WriteString(f.formatHeader("RelSpec Inspector Report"))
|
||||
sb.WriteString("\n\n")
|
||||
|
||||
// Metadata
|
||||
sb.WriteString(f.formatBold("Database:") + " " + report.Database + "\n")
|
||||
sb.WriteString(f.formatBold("Source Format:") + " " + report.SourceFormat + "\n")
|
||||
sb.WriteString(f.formatBold("Generated:") + " " + report.GeneratedAt.Format(time.RFC3339) + "\n")
|
||||
sb.WriteString("\n")
|
||||
|
||||
// Summary
|
||||
sb.WriteString(f.formatHeader("Summary"))
|
||||
sb.WriteString("\n")
|
||||
sb.WriteString(fmt.Sprintf("- Rules Checked: %d\n", report.Summary.RulesChecked))
|
||||
|
||||
// Color-code error and warning counts
|
||||
if report.Summary.ErrorCount > 0 {
|
||||
sb.WriteString(f.colorize(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount), colorRed))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount))
|
||||
}
|
||||
|
||||
if report.Summary.WarningCount > 0 {
|
||||
sb.WriteString(f.colorize(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount), colorYellow))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount))
|
||||
}
|
||||
|
||||
if report.Summary.PassedCount > 0 {
|
||||
sb.WriteString(f.colorize(fmt.Sprintf("- Passed: %d\n", report.Summary.PassedCount), colorGreen))
|
||||
}
|
||||
|
||||
sb.WriteString("\n")
|
||||
|
||||
// Group violations by level
|
||||
errors := []ValidationResult{}
|
||||
warnings := []ValidationResult{}
|
||||
|
||||
for _, v := range report.Violations {
|
||||
if !v.Passed {
|
||||
if v.Level == "error" {
|
||||
errors = append(errors, v)
|
||||
} else {
|
||||
warnings = append(warnings, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Report violations
|
||||
if len(errors) > 0 || len(warnings) > 0 {
|
||||
sb.WriteString(f.formatHeader("Violations"))
|
||||
sb.WriteString("\n")
|
||||
|
||||
// Errors
|
||||
if len(errors) > 0 {
|
||||
sb.WriteString(f.formatSubheader(fmt.Sprintf("Errors (%d)", len(errors)), colorRed))
|
||||
sb.WriteString("\n")
|
||||
for _, violation := range errors {
|
||||
sb.WriteString(f.formatViolation(violation, colorRed))
|
||||
}
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
|
||||
// Warnings
|
||||
if len(warnings) > 0 {
|
||||
sb.WriteString(f.formatSubheader(fmt.Sprintf("Warnings (%d)", len(warnings)), colorYellow))
|
||||
sb.WriteString("\n")
|
||||
for _, violation := range warnings {
|
||||
sb.WriteString(f.formatViolation(violation, colorYellow))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sb.WriteString(f.colorize("✓ No violations found!\n", colorGreen))
|
||||
}
|
||||
|
||||
return sb.String(), nil
|
||||
}
|
||||
|
||||
// Format generates a JSON report
|
||||
func (f *JSONFormatter) Format(report *InspectorReport) (string, error) {
|
||||
data, err := json.MarshalIndent(report, "", " ")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to marshal report to JSON: %w", err)
|
||||
}
|
||||
return string(data), nil
|
||||
}
|
||||
|
||||
// Helper methods for MarkdownFormatter
|
||||
|
||||
func (f *MarkdownFormatter) formatHeader(text string) string {
|
||||
return f.formatBold("# " + text)
|
||||
}
|
||||
|
||||
func (f *MarkdownFormatter) formatSubheader(text string, color string) string {
|
||||
header := "### " + text
|
||||
if f.UseColors {
|
||||
return color + colorBold + header + colorReset
|
||||
}
|
||||
return header
|
||||
}
|
||||
|
||||
func (f *MarkdownFormatter) formatBold(text string) string {
|
||||
if f.UseColors {
|
||||
return colorBold + text + colorReset
|
||||
}
|
||||
return "**" + text + "**"
|
||||
}
|
||||
|
||||
func (f *MarkdownFormatter) colorize(text string, color string) string {
|
||||
if f.UseColors {
|
||||
return color + text + colorReset
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
func (f *MarkdownFormatter) formatViolation(v ValidationResult, color string) string {
|
||||
var sb strings.Builder
|
||||
|
||||
// Rule name as header
|
||||
if f.UseColors {
|
||||
sb.WriteString(color + "#### " + v.RuleName + colorReset + "\n")
|
||||
} else {
|
||||
sb.WriteString("#### " + v.RuleName + "\n")
|
||||
}
|
||||
|
||||
// Location and message
|
||||
sb.WriteString(f.formatBold("Location:") + " " + v.Location + "\n")
|
||||
sb.WriteString(f.formatBold("Message:") + " " + v.Message + "\n")
|
||||
|
||||
// Context details (optional, only show interesting ones)
|
||||
if len(v.Context) > 0 {
|
||||
contextStr := f.formatContext(v.Context)
|
||||
if contextStr != "" {
|
||||
sb.WriteString(f.formatBold("Details:") + " " + contextStr + "\n")
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString("\n")
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (f *MarkdownFormatter) formatContext(context map[string]interface{}) string {
|
||||
// Extract relevant context information
|
||||
var parts []string
|
||||
|
||||
// Skip schema, table, column as they're in location
|
||||
skipKeys := map[string]bool{
|
||||
"schema": true,
|
||||
"table": true,
|
||||
"column": true,
|
||||
}
|
||||
|
||||
for key, value := range context {
|
||||
if skipKeys[key] {
|
||||
continue
|
||||
}
|
||||
|
||||
parts = append(parts, fmt.Sprintf("%s=%v", key, value))
|
||||
}
|
||||
|
||||
return strings.Join(parts, ", ")
|
||||
}
|
||||
|
||||
// isTerminal checks if the writer is a terminal (supports ANSI colors)
|
||||
func isTerminal(w io.Writer) bool {
|
||||
file, ok := w.(*os.File)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if the file descriptor is a terminal
|
||||
stat, err := file.Stat()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if it's a character device (terminal)
|
||||
// This works on Unix-like systems
|
||||
return (stat.Mode() & os.ModeCharDevice) != 0
|
||||
}
|
||||
169
pkg/inspector/rules.go
Normal file
169
pkg/inspector/rules.go
Normal file
@@ -0,0 +1,169 @@
|
||||
package inspector
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// Config represents the inspector rules configuration
|
||||
type Config struct {
|
||||
Version string `yaml:"version"`
|
||||
Rules map[string]Rule `yaml:"rules"`
|
||||
}
|
||||
|
||||
// Rule represents a single validation rule
|
||||
type Rule struct {
|
||||
Enabled string `yaml:"enabled"` // "enforce", "warn", "off"
|
||||
Function string `yaml:"function"` // validator function name
|
||||
Message string `yaml:"message"`
|
||||
Pattern string `yaml:"pattern,omitempty"`
|
||||
AllowedTypes []string `yaml:"allowed_types,omitempty"`
|
||||
MaxLength int `yaml:"max_length,omitempty"`
|
||||
Case string `yaml:"case,omitempty"`
|
||||
RequireIndex bool `yaml:"require_index,omitempty"`
|
||||
CheckTables bool `yaml:"check_tables,omitempty"`
|
||||
CheckColumns bool `yaml:"check_columns,omitempty"`
|
||||
RequireAutoIncrement bool `yaml:"require_auto_increment,omitempty"`
|
||||
}
|
||||
|
||||
// LoadConfig loads configuration from a YAML file
|
||||
// If the file doesn't exist, returns default configuration
|
||||
// If the file exists but is invalid, returns an error
|
||||
func LoadConfig(path string) (*Config, error) {
|
||||
// Check if file exists
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
// File doesn't exist, use defaults
|
||||
return GetDefaultConfig(), nil
|
||||
}
|
||||
|
||||
// Read file
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read config file: %w", err)
|
||||
}
|
||||
|
||||
// Parse YAML
|
||||
var config Config
|
||||
if err := yaml.Unmarshal(data, &config); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse config YAML: %w", err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
// GetDefaultConfig returns the default inspector configuration
|
||||
// All rules are enabled at "warn" level by default
|
||||
func GetDefaultConfig() *Config {
|
||||
return &Config{
|
||||
Version: "1.0",
|
||||
Rules: map[string]Rule{
|
||||
// Primary Key Rules
|
||||
"primary_key_naming": {
|
||||
Enabled: "warn",
|
||||
Function: "primary_key_naming",
|
||||
Pattern: "^id_",
|
||||
Message: "Primary key columns should start with 'id_'",
|
||||
},
|
||||
"primary_key_datatype": {
|
||||
Enabled: "warn",
|
||||
Function: "primary_key_datatype",
|
||||
AllowedTypes: []string{"bigserial", "bigint", "int", "serial", "integer", "int4", "int8"},
|
||||
Message: "Primary keys should use integer types (bigserial, bigint, int, serial)",
|
||||
},
|
||||
"primary_key_auto_increment": {
|
||||
Enabled: "off",
|
||||
Function: "primary_key_auto_increment",
|
||||
RequireAutoIncrement: true,
|
||||
Message: "Primary key without auto-increment detected",
|
||||
},
|
||||
|
||||
// Foreign Key Rules
|
||||
"foreign_key_column_naming": {
|
||||
Enabled: "warn",
|
||||
Function: "foreign_key_column_naming",
|
||||
Pattern: "^rid_",
|
||||
Message: "Foreign key columns should start with 'rid_'",
|
||||
},
|
||||
"foreign_key_constraint_naming": {
|
||||
Enabled: "warn",
|
||||
Function: "foreign_key_constraint_naming",
|
||||
Pattern: "^fk_",
|
||||
Message: "Foreign key constraint names should start with 'fk_'",
|
||||
},
|
||||
"foreign_key_index": {
|
||||
Enabled: "warn",
|
||||
Function: "foreign_key_index",
|
||||
RequireIndex: true,
|
||||
Message: "Foreign key columns should have indexes for optimal performance",
|
||||
},
|
||||
|
||||
// Naming Convention Rules
|
||||
"table_naming_case": {
|
||||
Enabled: "warn",
|
||||
Function: "table_regexpr",
|
||||
Case: "lowercase",
|
||||
Pattern: "^[a-z][a-z0-9_]*$",
|
||||
Message: "Table names should be lowercase with underscores (snake_case)",
|
||||
},
|
||||
"column_naming_case": {
|
||||
Enabled: "warn",
|
||||
Function: "column_regexpr",
|
||||
Case: "lowercase",
|
||||
Pattern: "^[a-z][a-z0-9_]*$",
|
||||
Message: "Column names should be lowercase with underscores (snake_case)",
|
||||
},
|
||||
|
||||
// Length Rules
|
||||
"table_name_length": {
|
||||
Enabled: "warn",
|
||||
Function: "table_name_length",
|
||||
MaxLength: 64,
|
||||
Message: "Table name exceeds recommended maximum length of 64 characters",
|
||||
},
|
||||
"column_name_length": {
|
||||
Enabled: "warn",
|
||||
Function: "column_name_length",
|
||||
MaxLength: 64,
|
||||
Message: "Column name exceeds recommended maximum length of 64 characters",
|
||||
},
|
||||
|
||||
// Reserved Keywords
|
||||
"reserved_keywords": {
|
||||
Enabled: "warn",
|
||||
Function: "reserved_words",
|
||||
CheckTables: true,
|
||||
CheckColumns: true,
|
||||
Message: "Using SQL reserved keywords as identifiers can cause issues",
|
||||
},
|
||||
|
||||
// Schema Integrity Rules
|
||||
"missing_primary_key": {
|
||||
Enabled: "warn",
|
||||
Function: "have_primary_key",
|
||||
Message: "Table is missing a primary key",
|
||||
},
|
||||
"orphaned_foreign_key": {
|
||||
Enabled: "warn",
|
||||
Function: "orphaned_foreign_key",
|
||||
Message: "Foreign key references a non-existent table",
|
||||
},
|
||||
"circular_dependency": {
|
||||
Enabled: "warn",
|
||||
Function: "circular_dependency",
|
||||
Message: "Circular foreign key dependency detected",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// IsEnabled returns true if the rule is enabled (either "enforce" or "warn")
|
||||
func (r *Rule) IsEnabled() bool {
|
||||
return r.Enabled == "enforce" || r.Enabled == "warn"
|
||||
}
|
||||
|
||||
// IsEnforced returns true if the rule is set to "enforce" level
|
||||
func (r *Rule) IsEnforced() bool {
|
||||
return r.Enabled == "enforce"
|
||||
}
|
||||
603
pkg/inspector/validators.go
Normal file
603
pkg/inspector/validators.go
Normal file
@@ -0,0 +1,603 @@
|
||||
package inspector
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
|
||||
)
|
||||
|
||||
// validatePrimaryKeyNaming checks that primary key column names match a pattern
|
||||
func validatePrimaryKeyNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
pattern, err := regexp.Compile(rule.Pattern)
|
||||
if err != nil {
|
||||
return results
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
for _, col := range table.Columns {
|
||||
if col.IsPrimaryKey {
|
||||
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||
passed := pattern.MatchString(col.Name)
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": col.Name,
|
||||
"expected_pattern": rule.Pattern,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validatePrimaryKeyDatatype checks that primary keys use approved data types
|
||||
func validatePrimaryKeyDatatype(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
for _, col := range table.Columns {
|
||||
if col.IsPrimaryKey {
|
||||
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||
|
||||
// Normalize type (remove size/precision)
|
||||
normalizedType := normalizeDataType(col.Type)
|
||||
passed := contains(rule.AllowedTypes, normalizedType)
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": col.Name,
|
||||
"current_type": col.Type,
|
||||
"allowed_types": rule.AllowedTypes,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validatePrimaryKeyAutoIncrement checks primary key auto-increment settings
|
||||
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
for _, col := range table.Columns {
|
||||
if col.IsPrimaryKey {
|
||||
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||
|
||||
// Check if auto-increment matches requirement
|
||||
passed := col.AutoIncrement == rule.RequireAutoIncrement
|
||||
|
||||
if !passed {
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
false,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": col.Name,
|
||||
"has_auto_increment": col.AutoIncrement,
|
||||
"require_auto_increment": rule.RequireAutoIncrement,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateForeignKeyColumnNaming checks that foreign key column names match a pattern
|
||||
func validateForeignKeyColumnNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
pattern, err := regexp.Compile(rule.Pattern)
|
||||
if err != nil {
|
||||
return results
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
// Check foreign key constraints
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint {
|
||||
for _, colName := range constraint.Columns {
|
||||
location := formatLocation(schema.Name, table.Name, colName)
|
||||
passed := pattern.MatchString(colName)
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": colName,
|
||||
"constraint": constraint.Name,
|
||||
"expected_pattern": rule.Pattern,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateForeignKeyConstraintNaming checks that foreign key constraint names match a pattern
|
||||
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
pattern, err := regexp.Compile(rule.Pattern)
|
||||
if err != nil {
|
||||
return results
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint {
|
||||
location := formatLocation(schema.Name, table.Name, "")
|
||||
passed := pattern.MatchString(constraint.Name)
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"constraint": constraint.Name,
|
||||
"expected_pattern": rule.Pattern,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateForeignKeyIndex checks that foreign key columns have indexes
|
||||
func validateForeignKeyIndex(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
if !rule.RequireIndex {
|
||||
return results
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
// Get all foreign key columns
|
||||
fkColumns := make(map[string]bool)
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint {
|
||||
for _, col := range constraint.Columns {
|
||||
fkColumns[col] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if each FK column has an index
|
||||
for fkCol := range fkColumns {
|
||||
hasIndex := false
|
||||
|
||||
// Check table indexes
|
||||
for _, index := range table.Indexes {
|
||||
// Index is good if FK column is the first column
|
||||
if len(index.Columns) > 0 && index.Columns[0] == fkCol {
|
||||
hasIndex = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
location := formatLocation(schema.Name, table.Name, fkCol)
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
hasIndex,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": fkCol,
|
||||
"has_index": hasIndex,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateTableNamingCase checks table name casing
|
||||
func validateTableNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
pattern, err := regexp.Compile(rule.Pattern)
|
||||
if err != nil {
|
||||
return results
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
location := formatLocation(schema.Name, table.Name, "")
|
||||
passed := pattern.MatchString(table.Name)
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"expected_case": rule.Case,
|
||||
"expected_pattern": rule.Pattern,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateColumnNamingCase checks column name casing
|
||||
func validateColumnNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
pattern, err := regexp.Compile(rule.Pattern)
|
||||
if err != nil {
|
||||
return results
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
for _, col := range table.Columns {
|
||||
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||
passed := pattern.MatchString(col.Name)
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": col.Name,
|
||||
"expected_case": rule.Case,
|
||||
"expected_pattern": rule.Pattern,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateTableNameLength checks table name length
|
||||
func validateTableNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
location := formatLocation(schema.Name, table.Name, "")
|
||||
passed := len(table.Name) <= rule.MaxLength
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"length": len(table.Name),
|
||||
"max_length": rule.MaxLength,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateColumnNameLength checks column name length
|
||||
func validateColumnNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
for _, col := range table.Columns {
|
||||
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||
passed := len(col.Name) <= rule.MaxLength
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": col.Name,
|
||||
"length": len(col.Name),
|
||||
"max_length": rule.MaxLength,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateReservedKeywords checks for reserved SQL keywords
|
||||
func validateReservedKeywords(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
// Build keyword map from PostgreSQL keywords
|
||||
keywordSlice := pgsql.GetPostgresKeywords()
|
||||
keywords := make(map[string]bool)
|
||||
for _, kw := range keywordSlice {
|
||||
keywords[strings.ToUpper(kw)] = true
|
||||
}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
// Check table name
|
||||
if rule.CheckTables {
|
||||
location := formatLocation(schema.Name, table.Name, "")
|
||||
passed := !keywords[strings.ToUpper(table.Name)]
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"object_type": "table",
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
// Check column names
|
||||
if rule.CheckColumns {
|
||||
for _, col := range table.Columns {
|
||||
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||
passed := !keywords[strings.ToUpper(col.Name)]
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"column": col.Name,
|
||||
"object_type": "column",
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateMissingPrimaryKey checks for tables without primary keys
|
||||
func validateMissingPrimaryKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
hasPrimaryKey := false
|
||||
|
||||
// Check columns for primary key
|
||||
for _, col := range table.Columns {
|
||||
if col.IsPrimaryKey {
|
||||
hasPrimaryKey = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Also check constraints
|
||||
if !hasPrimaryKey {
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type == models.PrimaryKeyConstraint {
|
||||
hasPrimaryKey = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
location := formatLocation(schema.Name, table.Name, "")
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
hasPrimaryKey,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateOrphanedForeignKey checks for foreign keys referencing non-existent tables
|
||||
func validateOrphanedForeignKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
// Build a map of existing tables for quick lookup
|
||||
tableExists := make(map[string]bool)
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
key := schema.Name + "." + table.Name
|
||||
tableExists[key] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Check all foreign key constraints
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint {
|
||||
// Build referenced table key
|
||||
refSchema := constraint.ReferencedSchema
|
||||
if refSchema == "" {
|
||||
refSchema = schema.Name
|
||||
}
|
||||
refKey := refSchema + "." + constraint.ReferencedTable
|
||||
|
||||
location := formatLocation(schema.Name, table.Name, "")
|
||||
passed := tableExists[refKey]
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
passed,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": schema.Name,
|
||||
"table": table.Name,
|
||||
"constraint": constraint.Name,
|
||||
"referenced_schema": refSchema,
|
||||
"referenced_table": constraint.ReferencedTable,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// validateCircularDependency checks for circular foreign key dependencies
|
||||
func validateCircularDependency(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||
results := []ValidationResult{}
|
||||
|
||||
// Build dependency graph
|
||||
dependencies := make(map[string][]string)
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
tableKey := schema.Name + "." + table.Name
|
||||
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint {
|
||||
refSchema := constraint.ReferencedSchema
|
||||
if refSchema == "" {
|
||||
refSchema = schema.Name
|
||||
}
|
||||
refKey := refSchema + "." + constraint.ReferencedTable
|
||||
|
||||
dependencies[tableKey] = append(dependencies[tableKey], refKey)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for cycles using DFS
|
||||
for tableKey := range dependencies {
|
||||
visited := make(map[string]bool)
|
||||
recStack := make(map[string]bool)
|
||||
|
||||
if hasCycle(tableKey, dependencies, visited, recStack) {
|
||||
parts := strings.Split(tableKey, ".")
|
||||
location := formatLocation(parts[0], parts[1], "")
|
||||
|
||||
results = append(results, createResult(
|
||||
ruleName,
|
||||
false,
|
||||
rule.Message,
|
||||
location,
|
||||
map[string]interface{}{
|
||||
"schema": parts[0],
|
||||
"table": parts[1],
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
|
||||
// hasCycle performs DFS to detect cycles in dependency graph
|
||||
func hasCycle(node string, graph map[string][]string, visited, recStack map[string]bool) bool {
|
||||
visited[node] = true
|
||||
recStack[node] = true
|
||||
|
||||
for _, neighbor := range graph[node] {
|
||||
if !visited[neighbor] {
|
||||
if hasCycle(neighbor, graph, visited, recStack) {
|
||||
return true
|
||||
}
|
||||
} else if recStack[neighbor] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
recStack[node] = false
|
||||
return false
|
||||
}
|
||||
|
||||
// normalizeDataType removes size/precision from data type
|
||||
func normalizeDataType(dataType string) string {
|
||||
// Remove everything in parentheses
|
||||
idx := strings.Index(dataType, "(")
|
||||
if idx > 0 {
|
||||
dataType = dataType[:idx]
|
||||
}
|
||||
return strings.ToLower(strings.TrimSpace(dataType))
|
||||
}
|
||||
|
||||
// contains checks if a string slice contains a value
|
||||
func contains(slice []string, value string) bool {
|
||||
for _, item := range slice {
|
||||
if strings.EqualFold(item, value) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
574
pkg/merge/merge.go
Normal file
574
pkg/merge/merge.go
Normal file
@@ -0,0 +1,574 @@
|
||||
// Package merge provides utilities for merging database schemas.
|
||||
// It allows combining schemas from multiple sources while avoiding duplicates,
|
||||
// supporting only additive operations (no deletion or modification of existing items).
|
||||
package merge
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// MergeResult represents the result of a merge operation
|
||||
type MergeResult struct {
|
||||
SchemasAdded int
|
||||
TablesAdded int
|
||||
ColumnsAdded int
|
||||
RelationsAdded int
|
||||
DomainsAdded int
|
||||
EnumsAdded int
|
||||
ViewsAdded int
|
||||
SequencesAdded int
|
||||
}
|
||||
|
||||
// MergeOptions contains options for merge operations
|
||||
type MergeOptions struct {
|
||||
SkipDomains bool
|
||||
SkipRelations bool
|
||||
SkipEnums bool
|
||||
SkipViews bool
|
||||
SkipSequences bool
|
||||
SkipTableNames map[string]bool // Tables to skip during merge (keyed by table name)
|
||||
}
|
||||
|
||||
// MergeDatabases merges the source database into the target database.
|
||||
// Only adds missing items; existing items are not modified.
|
||||
func MergeDatabases(target, source *models.Database, opts *MergeOptions) *MergeResult {
|
||||
if opts == nil {
|
||||
opts = &MergeOptions{}
|
||||
}
|
||||
|
||||
result := &MergeResult{}
|
||||
|
||||
if target == nil || source == nil {
|
||||
return result
|
||||
}
|
||||
|
||||
// Merge schemas and their contents
|
||||
result.merge(target, source, opts)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (r *MergeResult) merge(target, source *models.Database, opts *MergeOptions) {
|
||||
// Create maps of existing schemas for quick lookup
|
||||
existingSchemas := make(map[string]*models.Schema)
|
||||
for _, schema := range target.Schemas {
|
||||
existingSchemas[schema.SQLName()] = schema
|
||||
}
|
||||
|
||||
// Merge schemas
|
||||
for _, srcSchema := range source.Schemas {
|
||||
schemaName := srcSchema.SQLName()
|
||||
if tgtSchema, exists := existingSchemas[schemaName]; exists {
|
||||
// Schema exists, merge its contents
|
||||
r.mergeSchemaContents(tgtSchema, srcSchema, opts)
|
||||
} else {
|
||||
// Schema doesn't exist, add it
|
||||
newSchema := cloneSchema(srcSchema)
|
||||
target.Schemas = append(target.Schemas, newSchema)
|
||||
r.SchemasAdded++
|
||||
}
|
||||
}
|
||||
|
||||
// Merge domains if not skipped
|
||||
if !opts.SkipDomains {
|
||||
r.mergeDomains(target, source)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeSchemaContents(target, source *models.Schema, opts *MergeOptions) {
|
||||
// Merge tables
|
||||
r.mergeTables(target, source, opts)
|
||||
|
||||
// Merge views if not skipped
|
||||
if !opts.SkipViews {
|
||||
r.mergeViews(target, source)
|
||||
}
|
||||
|
||||
// Merge sequences if not skipped
|
||||
if !opts.SkipSequences {
|
||||
r.mergeSequences(target, source)
|
||||
}
|
||||
|
||||
// Merge enums if not skipped
|
||||
if !opts.SkipEnums {
|
||||
r.mergeEnums(target, source)
|
||||
}
|
||||
|
||||
// Merge relations if not skipped
|
||||
if !opts.SkipRelations {
|
||||
r.mergeRelations(target, source)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeTables(schema *models.Schema, source *models.Schema, opts *MergeOptions) {
|
||||
// Create map of existing tables
|
||||
existingTables := make(map[string]*models.Table)
|
||||
for _, table := range schema.Tables {
|
||||
existingTables[table.SQLName()] = table
|
||||
}
|
||||
|
||||
// Merge tables
|
||||
for _, srcTable := range source.Tables {
|
||||
tableName := srcTable.SQLName()
|
||||
|
||||
// Skip if table is in the skip list (case-insensitive)
|
||||
if opts != nil && opts.SkipTableNames != nil && opts.SkipTableNames[strings.ToLower(tableName)] {
|
||||
continue
|
||||
}
|
||||
|
||||
if tgtTable, exists := existingTables[tableName]; exists {
|
||||
// Table exists, merge its columns
|
||||
r.mergeColumns(tgtTable, srcTable)
|
||||
} else {
|
||||
// Table doesn't exist, add it
|
||||
newTable := cloneTable(srcTable)
|
||||
schema.Tables = append(schema.Tables, newTable)
|
||||
r.TablesAdded++
|
||||
// Count columns in the newly added table
|
||||
r.ColumnsAdded += len(newTable.Columns)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeColumns(table *models.Table, srcTable *models.Table) {
|
||||
// Create map of existing columns
|
||||
existingColumns := make(map[string]*models.Column)
|
||||
for colName := range table.Columns {
|
||||
existingColumns[colName] = table.Columns[colName]
|
||||
}
|
||||
|
||||
// Merge columns
|
||||
for colName, srcCol := range srcTable.Columns {
|
||||
if _, exists := existingColumns[colName]; !exists {
|
||||
// Column doesn't exist, add it
|
||||
newCol := cloneColumn(srcCol)
|
||||
table.Columns[colName] = newCol
|
||||
r.ColumnsAdded++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeViews(schema *models.Schema, source *models.Schema) {
|
||||
// Create map of existing views
|
||||
existingViews := make(map[string]*models.View)
|
||||
for _, view := range schema.Views {
|
||||
existingViews[view.SQLName()] = view
|
||||
}
|
||||
|
||||
// Merge views
|
||||
for _, srcView := range source.Views {
|
||||
viewName := srcView.SQLName()
|
||||
if _, exists := existingViews[viewName]; !exists {
|
||||
// View doesn't exist, add it
|
||||
newView := cloneView(srcView)
|
||||
schema.Views = append(schema.Views, newView)
|
||||
r.ViewsAdded++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeSequences(schema *models.Schema, source *models.Schema) {
|
||||
// Create map of existing sequences
|
||||
existingSequences := make(map[string]*models.Sequence)
|
||||
for _, seq := range schema.Sequences {
|
||||
existingSequences[seq.SQLName()] = seq
|
||||
}
|
||||
|
||||
// Merge sequences
|
||||
for _, srcSeq := range source.Sequences {
|
||||
seqName := srcSeq.SQLName()
|
||||
if _, exists := existingSequences[seqName]; !exists {
|
||||
// Sequence doesn't exist, add it
|
||||
newSeq := cloneSequence(srcSeq)
|
||||
schema.Sequences = append(schema.Sequences, newSeq)
|
||||
r.SequencesAdded++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeEnums(schema *models.Schema, source *models.Schema) {
|
||||
// Create map of existing enums
|
||||
existingEnums := make(map[string]*models.Enum)
|
||||
for _, enum := range schema.Enums {
|
||||
existingEnums[enum.SQLName()] = enum
|
||||
}
|
||||
|
||||
// Merge enums
|
||||
for _, srcEnum := range source.Enums {
|
||||
enumName := srcEnum.SQLName()
|
||||
if _, exists := existingEnums[enumName]; !exists {
|
||||
// Enum doesn't exist, add it
|
||||
newEnum := cloneEnum(srcEnum)
|
||||
schema.Enums = append(schema.Enums, newEnum)
|
||||
r.EnumsAdded++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeRelations(schema *models.Schema, source *models.Schema) {
|
||||
// Create map of existing relations
|
||||
existingRelations := make(map[string]*models.Relationship)
|
||||
for _, rel := range schema.Relations {
|
||||
existingRelations[rel.SQLName()] = rel
|
||||
}
|
||||
|
||||
// Merge relations
|
||||
for _, srcRel := range source.Relations {
|
||||
if _, exists := existingRelations[srcRel.SQLName()]; !exists {
|
||||
// Relation doesn't exist, add it
|
||||
newRel := cloneRelation(srcRel)
|
||||
schema.Relations = append(schema.Relations, newRel)
|
||||
r.RelationsAdded++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MergeResult) mergeDomains(target *models.Database, source *models.Database) {
|
||||
// Create map of existing domains
|
||||
existingDomains := make(map[string]*models.Domain)
|
||||
for _, domain := range target.Domains {
|
||||
existingDomains[domain.SQLName()] = domain
|
||||
}
|
||||
|
||||
// Merge domains
|
||||
for _, srcDomain := range source.Domains {
|
||||
domainName := srcDomain.SQLName()
|
||||
if _, exists := existingDomains[domainName]; !exists {
|
||||
// Domain doesn't exist, add it
|
||||
newDomain := cloneDomain(srcDomain)
|
||||
target.Domains = append(target.Domains, newDomain)
|
||||
r.DomainsAdded++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clone functions to create deep copies of models
|
||||
|
||||
func cloneSchema(schema *models.Schema) *models.Schema {
|
||||
if schema == nil {
|
||||
return nil
|
||||
}
|
||||
newSchema := &models.Schema{
|
||||
Name: schema.Name,
|
||||
Description: schema.Description,
|
||||
Owner: schema.Owner,
|
||||
Comment: schema.Comment,
|
||||
Sequence: schema.Sequence,
|
||||
UpdatedAt: schema.UpdatedAt,
|
||||
Tables: make([]*models.Table, 0),
|
||||
Views: make([]*models.View, 0),
|
||||
Sequences: make([]*models.Sequence, 0),
|
||||
Enums: make([]*models.Enum, 0),
|
||||
Relations: make([]*models.Relationship, 0),
|
||||
}
|
||||
|
||||
if schema.Permissions != nil {
|
||||
newSchema.Permissions = make(map[string]string)
|
||||
for k, v := range schema.Permissions {
|
||||
newSchema.Permissions[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
if schema.Metadata != nil {
|
||||
newSchema.Metadata = make(map[string]interface{})
|
||||
for k, v := range schema.Metadata {
|
||||
newSchema.Metadata[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
if schema.Scripts != nil {
|
||||
newSchema.Scripts = make([]*models.Script, len(schema.Scripts))
|
||||
copy(newSchema.Scripts, schema.Scripts)
|
||||
}
|
||||
|
||||
// Clone tables
|
||||
for _, table := range schema.Tables {
|
||||
newSchema.Tables = append(newSchema.Tables, cloneTable(table))
|
||||
}
|
||||
|
||||
// Clone views
|
||||
for _, view := range schema.Views {
|
||||
newSchema.Views = append(newSchema.Views, cloneView(view))
|
||||
}
|
||||
|
||||
// Clone sequences
|
||||
for _, seq := range schema.Sequences {
|
||||
newSchema.Sequences = append(newSchema.Sequences, cloneSequence(seq))
|
||||
}
|
||||
|
||||
// Clone enums
|
||||
for _, enum := range schema.Enums {
|
||||
newSchema.Enums = append(newSchema.Enums, cloneEnum(enum))
|
||||
}
|
||||
|
||||
// Clone relations
|
||||
for _, rel := range schema.Relations {
|
||||
newSchema.Relations = append(newSchema.Relations, cloneRelation(rel))
|
||||
}
|
||||
|
||||
return newSchema
|
||||
}
|
||||
|
||||
func cloneTable(table *models.Table) *models.Table {
|
||||
if table == nil {
|
||||
return nil
|
||||
}
|
||||
newTable := &models.Table{
|
||||
Name: table.Name,
|
||||
Description: table.Description,
|
||||
Schema: table.Schema,
|
||||
Comment: table.Comment,
|
||||
Sequence: table.Sequence,
|
||||
UpdatedAt: table.UpdatedAt,
|
||||
Columns: make(map[string]*models.Column),
|
||||
Constraints: make(map[string]*models.Constraint),
|
||||
Indexes: make(map[string]*models.Index),
|
||||
}
|
||||
|
||||
if table.Metadata != nil {
|
||||
newTable.Metadata = make(map[string]interface{})
|
||||
for k, v := range table.Metadata {
|
||||
newTable.Metadata[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// Clone columns
|
||||
for colName, col := range table.Columns {
|
||||
newTable.Columns[colName] = cloneColumn(col)
|
||||
}
|
||||
|
||||
// Clone constraints
|
||||
for constName, constraint := range table.Constraints {
|
||||
newTable.Constraints[constName] = cloneConstraint(constraint)
|
||||
}
|
||||
|
||||
// Clone indexes
|
||||
for idxName, index := range table.Indexes {
|
||||
newTable.Indexes[idxName] = cloneIndex(index)
|
||||
}
|
||||
|
||||
return newTable
|
||||
}
|
||||
|
||||
func cloneColumn(col *models.Column) *models.Column {
|
||||
if col == nil {
|
||||
return nil
|
||||
}
|
||||
newCol := &models.Column{
|
||||
Name: col.Name,
|
||||
Type: col.Type,
|
||||
Description: col.Description,
|
||||
Comment: col.Comment,
|
||||
IsPrimaryKey: col.IsPrimaryKey,
|
||||
NotNull: col.NotNull,
|
||||
Default: col.Default,
|
||||
Precision: col.Precision,
|
||||
Scale: col.Scale,
|
||||
Length: col.Length,
|
||||
Sequence: col.Sequence,
|
||||
AutoIncrement: col.AutoIncrement,
|
||||
Collation: col.Collation,
|
||||
}
|
||||
|
||||
return newCol
|
||||
}
|
||||
|
||||
func cloneConstraint(constraint *models.Constraint) *models.Constraint {
|
||||
if constraint == nil {
|
||||
return nil
|
||||
}
|
||||
newConstraint := &models.Constraint{
|
||||
Type: constraint.Type,
|
||||
Columns: make([]string, len(constraint.Columns)),
|
||||
ReferencedTable: constraint.ReferencedTable,
|
||||
ReferencedSchema: constraint.ReferencedSchema,
|
||||
ReferencedColumns: make([]string, len(constraint.ReferencedColumns)),
|
||||
OnUpdate: constraint.OnUpdate,
|
||||
OnDelete: constraint.OnDelete,
|
||||
Expression: constraint.Expression,
|
||||
Name: constraint.Name,
|
||||
Deferrable: constraint.Deferrable,
|
||||
InitiallyDeferred: constraint.InitiallyDeferred,
|
||||
Sequence: constraint.Sequence,
|
||||
}
|
||||
copy(newConstraint.Columns, constraint.Columns)
|
||||
copy(newConstraint.ReferencedColumns, constraint.ReferencedColumns)
|
||||
return newConstraint
|
||||
}
|
||||
|
||||
func cloneIndex(index *models.Index) *models.Index {
|
||||
if index == nil {
|
||||
return nil
|
||||
}
|
||||
newIndex := &models.Index{
|
||||
Name: index.Name,
|
||||
Description: index.Description,
|
||||
Table: index.Table,
|
||||
Schema: index.Schema,
|
||||
Columns: make([]string, len(index.Columns)),
|
||||
Unique: index.Unique,
|
||||
Type: index.Type,
|
||||
Where: index.Where,
|
||||
Concurrent: index.Concurrent,
|
||||
Include: make([]string, len(index.Include)),
|
||||
Comment: index.Comment,
|
||||
Sequence: index.Sequence,
|
||||
}
|
||||
copy(newIndex.Columns, index.Columns)
|
||||
copy(newIndex.Include, index.Include)
|
||||
return newIndex
|
||||
}
|
||||
|
||||
func cloneView(view *models.View) *models.View {
|
||||
if view == nil {
|
||||
return nil
|
||||
}
|
||||
newView := &models.View{
|
||||
Name: view.Name,
|
||||
Description: view.Description,
|
||||
Schema: view.Schema,
|
||||
Definition: view.Definition,
|
||||
Comment: view.Comment,
|
||||
Sequence: view.Sequence,
|
||||
Columns: make(map[string]*models.Column),
|
||||
}
|
||||
|
||||
if view.Metadata != nil {
|
||||
newView.Metadata = make(map[string]interface{})
|
||||
for k, v := range view.Metadata {
|
||||
newView.Metadata[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// Clone columns
|
||||
for colName, col := range view.Columns {
|
||||
newView.Columns[colName] = cloneColumn(col)
|
||||
}
|
||||
|
||||
return newView
|
||||
}
|
||||
|
||||
func cloneSequence(seq *models.Sequence) *models.Sequence {
|
||||
if seq == nil {
|
||||
return nil
|
||||
}
|
||||
newSeq := &models.Sequence{
|
||||
Name: seq.Name,
|
||||
Description: seq.Description,
|
||||
Schema: seq.Schema,
|
||||
StartValue: seq.StartValue,
|
||||
MinValue: seq.MinValue,
|
||||
MaxValue: seq.MaxValue,
|
||||
IncrementBy: seq.IncrementBy,
|
||||
CacheSize: seq.CacheSize,
|
||||
Cycle: seq.Cycle,
|
||||
OwnedByTable: seq.OwnedByTable,
|
||||
OwnedByColumn: seq.OwnedByColumn,
|
||||
Comment: seq.Comment,
|
||||
Sequence: seq.Sequence,
|
||||
}
|
||||
return newSeq
|
||||
}
|
||||
|
||||
func cloneEnum(enum *models.Enum) *models.Enum {
|
||||
if enum == nil {
|
||||
return nil
|
||||
}
|
||||
newEnum := &models.Enum{
|
||||
Name: enum.Name,
|
||||
Values: make([]string, len(enum.Values)),
|
||||
Schema: enum.Schema,
|
||||
}
|
||||
copy(newEnum.Values, enum.Values)
|
||||
return newEnum
|
||||
}
|
||||
|
||||
func cloneRelation(rel *models.Relationship) *models.Relationship {
|
||||
if rel == nil {
|
||||
return nil
|
||||
}
|
||||
newRel := &models.Relationship{
|
||||
Name: rel.Name,
|
||||
Type: rel.Type,
|
||||
FromTable: rel.FromTable,
|
||||
FromSchema: rel.FromSchema,
|
||||
FromColumns: make([]string, len(rel.FromColumns)),
|
||||
ToTable: rel.ToTable,
|
||||
ToSchema: rel.ToSchema,
|
||||
ToColumns: make([]string, len(rel.ToColumns)),
|
||||
ForeignKey: rel.ForeignKey,
|
||||
ThroughTable: rel.ThroughTable,
|
||||
ThroughSchema: rel.ThroughSchema,
|
||||
Description: rel.Description,
|
||||
Sequence: rel.Sequence,
|
||||
}
|
||||
|
||||
if rel.Properties != nil {
|
||||
newRel.Properties = make(map[string]string)
|
||||
for k, v := range rel.Properties {
|
||||
newRel.Properties[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
copy(newRel.FromColumns, rel.FromColumns)
|
||||
copy(newRel.ToColumns, rel.ToColumns)
|
||||
return newRel
|
||||
}
|
||||
|
||||
func cloneDomain(domain *models.Domain) *models.Domain {
|
||||
if domain == nil {
|
||||
return nil
|
||||
}
|
||||
newDomain := &models.Domain{
|
||||
Name: domain.Name,
|
||||
Description: domain.Description,
|
||||
Comment: domain.Comment,
|
||||
Sequence: domain.Sequence,
|
||||
Tables: make([]*models.DomainTable, len(domain.Tables)),
|
||||
}
|
||||
|
||||
if domain.Metadata != nil {
|
||||
newDomain.Metadata = make(map[string]interface{})
|
||||
for k, v := range domain.Metadata {
|
||||
newDomain.Metadata[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
copy(newDomain.Tables, domain.Tables)
|
||||
return newDomain
|
||||
}
|
||||
|
||||
// GetMergeSummary returns a human-readable summary of the merge result
|
||||
func GetMergeSummary(result *MergeResult) string {
|
||||
if result == nil {
|
||||
return "No merge result available"
|
||||
}
|
||||
|
||||
lines := []string{
|
||||
"=== Merge Summary ===",
|
||||
fmt.Sprintf("Schemas added: %d", result.SchemasAdded),
|
||||
fmt.Sprintf("Tables added: %d", result.TablesAdded),
|
||||
fmt.Sprintf("Columns added: %d", result.ColumnsAdded),
|
||||
fmt.Sprintf("Views added: %d", result.ViewsAdded),
|
||||
fmt.Sprintf("Sequences added: %d", result.SequencesAdded),
|
||||
fmt.Sprintf("Enums added: %d", result.EnumsAdded),
|
||||
fmt.Sprintf("Relations added: %d", result.RelationsAdded),
|
||||
fmt.Sprintf("Domains added: %d", result.DomainsAdded),
|
||||
}
|
||||
|
||||
totalAdded := result.SchemasAdded + result.TablesAdded + result.ColumnsAdded +
|
||||
result.ViewsAdded + result.SequencesAdded + result.EnumsAdded +
|
||||
result.RelationsAdded + result.DomainsAdded
|
||||
|
||||
lines = append(lines, fmt.Sprintf("Total items added: %d", totalAdded))
|
||||
|
||||
summary := ""
|
||||
for _, line := range lines {
|
||||
summary += line + "\n"
|
||||
}
|
||||
|
||||
return summary
|
||||
}
|
||||
@@ -2,7 +2,13 @@ package models
|
||||
|
||||
import "encoding/xml"
|
||||
|
||||
// DCTXDictionary represents the root element of a DCTX file
|
||||
// DCTX File Format Models
|
||||
//
|
||||
// This file defines the data structures for parsing and generating DCTX
|
||||
// (Data Dictionary) XML files, which are used by Clarion development tools
|
||||
// for database schema definitions.
|
||||
|
||||
// DCTXDictionary represents the root element of a DCTX file.
|
||||
type DCTXDictionary struct {
|
||||
XMLName xml.Name `xml:"Dictionary"`
|
||||
Name string `xml:"Name,attr"`
|
||||
@@ -11,7 +17,7 @@ type DCTXDictionary struct {
|
||||
Relations []DCTXRelation `xml:"Relation,omitempty"`
|
||||
}
|
||||
|
||||
// DCTXTable represents a table definition in DCTX
|
||||
// DCTXTable represents a table definition in DCTX format.
|
||||
type DCTXTable struct {
|
||||
Guid string `xml:"Guid,attr"`
|
||||
Name string `xml:"Name,attr"`
|
||||
@@ -25,7 +31,8 @@ type DCTXTable struct {
|
||||
Options []DCTXOption `xml:"Option,omitempty"`
|
||||
}
|
||||
|
||||
// DCTXField represents a field/column definition in DCTX
|
||||
// DCTXField represents a field/column definition in DCTX format.
|
||||
// Fields can be nested for GROUP structures.
|
||||
type DCTXField struct {
|
||||
Guid string `xml:"Guid,attr"`
|
||||
Name string `xml:"Name,attr"`
|
||||
@@ -37,7 +44,7 @@ type DCTXField struct {
|
||||
Options []DCTXOption `xml:"Option,omitempty"`
|
||||
}
|
||||
|
||||
// DCTXKey represents an index or key definition in DCTX
|
||||
// DCTXKey represents an index or key definition in DCTX format.
|
||||
type DCTXKey struct {
|
||||
Guid string `xml:"Guid,attr"`
|
||||
Name string `xml:"Name,attr"`
|
||||
@@ -49,7 +56,7 @@ type DCTXKey struct {
|
||||
Components []DCTXComponent `xml:"Component"`
|
||||
}
|
||||
|
||||
// DCTXComponent represents a component of a key (field reference)
|
||||
// DCTXComponent represents a component of a key, referencing a field in the index.
|
||||
type DCTXComponent struct {
|
||||
Guid string `xml:"Guid,attr"`
|
||||
FieldId string `xml:"FieldId,attr,omitempty"`
|
||||
@@ -57,14 +64,14 @@ type DCTXComponent struct {
|
||||
Ascend bool `xml:"Ascend,attr,omitempty"`
|
||||
}
|
||||
|
||||
// DCTXOption represents a property option in DCTX
|
||||
// DCTXOption represents a property option in DCTX format for metadata storage.
|
||||
type DCTXOption struct {
|
||||
Property string `xml:"Property,attr"`
|
||||
PropertyType string `xml:"PropertyType,attr,omitempty"`
|
||||
PropertyValue string `xml:"PropertyValue,attr"`
|
||||
}
|
||||
|
||||
// DCTXRelation represents a relationship/foreign key in DCTX
|
||||
// DCTXRelation represents a relationship/foreign key in DCTX format.
|
||||
type DCTXRelation struct {
|
||||
Guid string `xml:"Guid,attr"`
|
||||
PrimaryTable string `xml:"PrimaryTable,attr"`
|
||||
@@ -77,7 +84,7 @@ type DCTXRelation struct {
|
||||
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping,omitempty"`
|
||||
}
|
||||
|
||||
// DCTXFieldMapping represents a field mapping in a relation
|
||||
// DCTXFieldMapping represents a field mapping in a relation for multi-column foreign keys.
|
||||
type DCTXFieldMapping struct {
|
||||
Guid string `xml:"Guid,attr"`
|
||||
Field string `xml:"Field,attr"`
|
||||
|
||||
@@ -2,11 +2,14 @@ package models
|
||||
|
||||
import "fmt"
|
||||
|
||||
// =============================================================================
|
||||
// Flat/Denormalized Views - Flattened structures with fully qualified names
|
||||
// =============================================================================
|
||||
// Flat/Denormalized Views
|
||||
//
|
||||
// This file provides flattened data structures with fully qualified names
|
||||
// for easier querying and analysis of database schemas without navigating
|
||||
// nested hierarchies.
|
||||
|
||||
// FlatColumn represents a column with full context in a single structure
|
||||
// FlatColumn represents a column with full database context in a single structure.
|
||||
// It includes fully qualified names for easy identification and querying.
|
||||
type FlatColumn struct {
|
||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||
@@ -25,7 +28,7 @@ type FlatColumn struct {
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// ToFlatColumns converts a Database to a slice of FlatColumns
|
||||
// ToFlatColumns converts a Database to a slice of FlatColumns for denormalized access to all columns.
|
||||
func (d *Database) ToFlatColumns() []*FlatColumn {
|
||||
flatColumns := make([]*FlatColumn, 0)
|
||||
|
||||
@@ -56,7 +59,7 @@ func (d *Database) ToFlatColumns() []*FlatColumn {
|
||||
return flatColumns
|
||||
}
|
||||
|
||||
// FlatTable represents a table with full context
|
||||
// FlatTable represents a table with full database context and aggregated counts.
|
||||
type FlatTable struct {
|
||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||
@@ -70,7 +73,7 @@ type FlatTable struct {
|
||||
IndexCount int `json:"index_count" yaml:"index_count" xml:"index_count"`
|
||||
}
|
||||
|
||||
// ToFlatTables converts a Database to a slice of FlatTables
|
||||
// ToFlatTables converts a Database to a slice of FlatTables for denormalized access to all tables.
|
||||
func (d *Database) ToFlatTables() []*FlatTable {
|
||||
flatTables := make([]*FlatTable, 0)
|
||||
|
||||
@@ -94,7 +97,7 @@ func (d *Database) ToFlatTables() []*FlatTable {
|
||||
return flatTables
|
||||
}
|
||||
|
||||
// FlatConstraint represents a constraint with full context
|
||||
// FlatConstraint represents a constraint with full database context and resolved references.
|
||||
type FlatConstraint struct {
|
||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||
@@ -112,7 +115,7 @@ type FlatConstraint struct {
|
||||
OnUpdate string `json:"on_update,omitempty" yaml:"on_update,omitempty" xml:"on_update,omitempty"`
|
||||
}
|
||||
|
||||
// ToFlatConstraints converts a Database to a slice of FlatConstraints
|
||||
// ToFlatConstraints converts a Database to a slice of FlatConstraints for denormalized access to all constraints.
|
||||
func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
||||
flatConstraints := make([]*FlatConstraint, 0)
|
||||
|
||||
@@ -148,7 +151,7 @@ func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
||||
return flatConstraints
|
||||
}
|
||||
|
||||
// FlatRelationship represents a relationship with full context
|
||||
// FlatRelationship represents a relationship with full database context and fully qualified table names.
|
||||
type FlatRelationship struct {
|
||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||
RelationshipName string `json:"relationship_name" yaml:"relationship_name" xml:"relationship_name"`
|
||||
@@ -164,7 +167,7 @@ type FlatRelationship struct {
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
}
|
||||
|
||||
// ToFlatRelationships converts a Database to a slice of FlatRelationships
|
||||
// ToFlatRelationships converts a Database to a slice of FlatRelationships for denormalized access to all relationships.
|
||||
func (d *Database) ToFlatRelationships() []*FlatRelationship {
|
||||
flatRelationships := make([]*FlatRelationship, 0)
|
||||
|
||||
|
||||
@@ -1,13 +1,24 @@
|
||||
// Package models provides the core data structures for representing database schemas.
|
||||
// It defines types for databases, schemas, tables, columns, relationships, constraints,
|
||||
// indexes, views, sequences, and other database objects. These models serve as the
|
||||
// intermediate representation for converting between various database schema formats.
|
||||
package models
|
||||
|
||||
import "strings"
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
// DatabaseType represents the type of database system.
|
||||
type DatabaseType string
|
||||
|
||||
// Supported database types.
|
||||
const (
|
||||
PostgresqlDatabaseType DatabaseType = "pgsql"
|
||||
MSSQLDatabaseType DatabaseType = "mssql"
|
||||
SqlLiteDatabaseType DatabaseType = "sqlite"
|
||||
PostgresqlDatabaseType DatabaseType = "pgsql" // PostgreSQL database
|
||||
MSSQLDatabaseType DatabaseType = "mssql" // Microsoft SQL Server database
|
||||
SqlLiteDatabaseType DatabaseType = "sqlite" // SQLite database
|
||||
)
|
||||
|
||||
// Database represents the complete database schema
|
||||
@@ -15,17 +26,55 @@ type Database struct {
|
||||
Name string `json:"name" yaml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
Schemas []*Schema `json:"schemas" yaml:"schemas" xml:"schemas"`
|
||||
Domains []*Domain `json:"domains,omitempty" yaml:"domains,omitempty" xml:"domains,omitempty"`
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
DatabaseType DatabaseType `json:"database_type,omitempty" yaml:"database_type,omitempty" xml:"database_type,omitempty"`
|
||||
DatabaseVersion string `json:"database_version,omitempty" yaml:"database_version,omitempty" xml:"database_version,omitempty"`
|
||||
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
||||
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLNamer returns the database name in lowercase
|
||||
// SQLName returns the database name in lowercase for SQL compatibility.
|
||||
func (d *Database) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// UpdateDate sets the UpdatedAt field to the current time in RFC3339 format.
|
||||
func (d *Database) UpdateDate() {
|
||||
d.UpdatedAt = time.Now().Format(time.RFC3339)
|
||||
}
|
||||
|
||||
// Domain represents a logical business domain grouping multiple tables from potentially different schemas.
|
||||
// Domains allow for organizing database tables by functional areas (e.g., authentication, user data, financial).
|
||||
type Domain struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
Tables []*DomainTable `json:"tables" yaml:"tables" xml:"tables"`
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the domain name in lowercase for SQL compatibility.
|
||||
func (d *Domain) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// DomainTable represents a reference to a specific table within a domain.
|
||||
// It identifies the table by name and schema, allowing a single domain to include
|
||||
// tables from multiple schemas.
|
||||
type DomainTable struct {
|
||||
TableName string `json:"table_name" yaml:"table_name" xml:"table_name"`
|
||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
RefTable *Table `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// Schema represents a database schema, which is a logical grouping of database objects
|
||||
// such as tables, views, sequences, and relationships within a database.
|
||||
type Schema struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
@@ -40,13 +89,26 @@ type Schema struct {
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
|
||||
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
|
||||
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the schema name in lowercase
|
||||
// UpdaUpdateDateted sets the UpdatedAt field to the current time in RFC3339 format.
|
||||
func (d *Schema) UpdateDate() {
|
||||
d.UpdatedAt = time.Now().Format(time.RFC3339)
|
||||
if d.RefDatabase != nil {
|
||||
d.RefDatabase.UpdateDate()
|
||||
}
|
||||
}
|
||||
|
||||
// SQLName returns the schema name in lowercase for SQL compatibility.
|
||||
func (d *Schema) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// Table represents a database table with its columns, constraints, indexes,
|
||||
// and relationships. Tables are the primary data storage structures in a database.
|
||||
type Table struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
@@ -60,13 +122,24 @@ type Table struct {
|
||||
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the table name in lowercase
|
||||
// UpdateDate sets the UpdatedAt field to the current time in RFC3339 format.
|
||||
func (d *Table) UpdateDate() {
|
||||
d.UpdatedAt = time.Now().Format(time.RFC3339)
|
||||
if d.RefSchema != nil {
|
||||
d.RefSchema.UpdateDate()
|
||||
}
|
||||
}
|
||||
|
||||
// SQLName returns the table name in lowercase for SQL compatibility.
|
||||
func (d *Table) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// GetPrimaryKey returns the primary key column for the table, or nil if none exists.
|
||||
func (m Table) GetPrimaryKey() *Column {
|
||||
for _, column := range m.Columns {
|
||||
if column.IsPrimaryKey {
|
||||
@@ -76,6 +149,7 @@ func (m Table) GetPrimaryKey() *Column {
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetForeignKeys returns all foreign key constraints for the table.
|
||||
func (m Table) GetForeignKeys() []*Constraint {
|
||||
keys := make([]*Constraint, 0)
|
||||
|
||||
@@ -98,9 +172,10 @@ type View struct {
|
||||
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the view name in lowercase
|
||||
// SQLName returns the view name in lowercase for SQL compatibility.
|
||||
func (d *View) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
@@ -121,9 +196,10 @@ type Sequence struct {
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the sequence name in lowercase
|
||||
// SQLName returns the sequence name in lowercase for SQL compatibility.
|
||||
func (d *Sequence) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
@@ -145,13 +221,16 @@ type Column struct {
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
Collation string `json:"collation,omitempty" yaml:"collation,omitempty" xml:"collation,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the table name in lowercase
|
||||
// SQLName returns the column name in lowercase for SQL compatibility.
|
||||
func (d *Column) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// Index represents a database index for optimizing query performance.
|
||||
// Indexes can be unique, partial, or include additional columns.
|
||||
type Index struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
@@ -165,21 +244,26 @@ type Index struct {
|
||||
Include []string `json:"include,omitempty" yaml:"include,omitempty" xml:"include,omitempty"` // INCLUDE columns
|
||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the Indexin lowercase
|
||||
// SQLName returns the index name in lowercase for SQL compatibility.
|
||||
func (d *Index) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// RelationType represents the type of relationship between database tables.
|
||||
type RelationType string
|
||||
|
||||
// Supported relationship types.
|
||||
const (
|
||||
OneToOne RelationType = "one_to_one"
|
||||
OneToMany RelationType = "one_to_many"
|
||||
ManyToMany RelationType = "many_to_many"
|
||||
OneToOne RelationType = "one_to_one" // One record in table A relates to one record in table B
|
||||
OneToMany RelationType = "one_to_many" // One record in table A relates to many records in table B
|
||||
ManyToMany RelationType = "many_to_many" // Many records in table A relate to many records in table B
|
||||
)
|
||||
|
||||
// Relationship represents a relationship between two database tables.
|
||||
// Relationships can be one-to-one, one-to-many, or many-to-many.
|
||||
type Relationship struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Type RelationType `json:"type" yaml:"type" xml:"type"`
|
||||
@@ -195,13 +279,16 @@ type Relationship struct {
|
||||
ThroughSchema string `json:"through_schema,omitempty" yaml:"through_schema,omitempty" xml:"through_schema,omitempty"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the Relationship lowercase
|
||||
// SQLName returns the relationship name in lowercase for SQL compatibility.
|
||||
func (d *Relationship) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// Constraint represents a database constraint that enforces data integrity rules.
|
||||
// Constraints can be primary keys, foreign keys, unique constraints, check constraints, or not-null constraints.
|
||||
type Constraint struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Type ConstraintType `json:"type" yaml:"type" xml:"type"`
|
||||
@@ -217,22 +304,51 @@ type Constraint struct {
|
||||
Deferrable bool `json:"deferrable,omitempty" yaml:"deferrable,omitempty" xml:"deferrable,omitempty"`
|
||||
InitiallyDeferred bool `json:"initially_deferred,omitempty" yaml:"initially_deferred,omitempty" xml:"initially_deferred,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the constraint name in lowercase for SQL compatibility.
|
||||
func (d *Constraint) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// ConstraintType represents the type of database constraint.
|
||||
type ConstraintType string
|
||||
|
||||
// Enum represents a database enumeration type with a set of allowed values.
|
||||
type Enum struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Values []string `json:"values" yaml:"values" xml:"values"`
|
||||
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the enum name in lowercase for SQL compatibility.
|
||||
func (d *Enum) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// InitEnum initializes a new Enum with empty values slice
|
||||
func InitEnum(name, schema string) *Enum {
|
||||
return &Enum{
|
||||
Name: name,
|
||||
Schema: schema,
|
||||
Values: make([]string, 0),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
// Supported constraint types.
|
||||
const (
|
||||
PrimaryKeyConstraint ConstraintType = "primary_key"
|
||||
ForeignKeyConstraint ConstraintType = "foreign_key"
|
||||
UniqueConstraint ConstraintType = "unique"
|
||||
CheckConstraint ConstraintType = "check"
|
||||
NotNullConstraint ConstraintType = "not_null"
|
||||
PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record
|
||||
ForeignKeyConstraint ConstraintType = "foreign_key" // Foreign key references another table
|
||||
UniqueConstraint ConstraintType = "unique" // Unique constraint ensures all values are different
|
||||
CheckConstraint ConstraintType = "check" // Check constraint validates data against an expression
|
||||
NotNullConstraint ConstraintType = "not_null" // Not null constraint requires a value
|
||||
)
|
||||
|
||||
// Script represents a database migration or initialization script.
|
||||
// Scripts can have dependencies and rollback capabilities.
|
||||
type Script struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description" yaml:"description" xml:"description"`
|
||||
@@ -243,19 +359,23 @@ type Script struct {
|
||||
Version string `json:"version,omitempty" yaml:"version,omitempty" xml:"version,omitempty"`
|
||||
Priority int `json:"priority,omitempty" yaml:"priority,omitempty" xml:"priority,omitempty"`
|
||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||
GUID string `json:"guid" yaml:"guid" xml:"guid"`
|
||||
}
|
||||
|
||||
// SQLName returns the script name in lowercase for SQL compatibility.
|
||||
func (d *Script) SQLName() string {
|
||||
return strings.ToLower(d.Name)
|
||||
}
|
||||
|
||||
// Initialize functions
|
||||
// Initialization functions for creating new model instances with proper defaults.
|
||||
|
||||
// InitDatabase initializes a new Database with empty slices
|
||||
func InitDatabase(name string) *Database {
|
||||
return &Database{
|
||||
Name: name,
|
||||
Schemas: make([]*Schema, 0),
|
||||
Domains: make([]*Domain, 0),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -269,6 +389,7 @@ func InitSchema(name string) *Schema {
|
||||
Permissions: make(map[string]string),
|
||||
Metadata: make(map[string]any),
|
||||
Scripts: make([]*Script, 0),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -282,6 +403,7 @@ func InitTable(name, schema string) *Table {
|
||||
Indexes: make(map[string]*Index),
|
||||
Relationships: make(map[string]*Relationship),
|
||||
Metadata: make(map[string]any),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -291,6 +413,7 @@ func InitColumn(name, table, schema string) *Column {
|
||||
Name: name,
|
||||
Table: table,
|
||||
Schema: schema,
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,6 +425,7 @@ func InitIndex(name, table, schema string) *Index {
|
||||
Schema: schema,
|
||||
Columns: make([]string, 0),
|
||||
Include: make([]string, 0),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -314,6 +438,7 @@ func InitRelation(name, schema string) *Relationship {
|
||||
Properties: make(map[string]string),
|
||||
FromColumns: make([]string, 0),
|
||||
ToColumns: make([]string, 0),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -323,6 +448,7 @@ func InitRelationship(name string, relType RelationType) *Relationship {
|
||||
Name: name,
|
||||
Type: relType,
|
||||
Properties: make(map[string]string),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -333,6 +459,7 @@ func InitConstraint(name string, constraintType ConstraintType) *Constraint {
|
||||
Type: constraintType,
|
||||
Columns: make([]string, 0),
|
||||
ReferencedColumns: make([]string, 0),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -341,6 +468,7 @@ func InitScript(name string) *Script {
|
||||
return &Script{
|
||||
Name: name,
|
||||
RunAfter: make([]string, 0),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -351,6 +479,7 @@ func InitView(name, schema string) *View {
|
||||
Schema: schema,
|
||||
Columns: make(map[string]*Column),
|
||||
Metadata: make(map[string]any),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -361,5 +490,25 @@ func InitSequence(name, schema string) *Sequence {
|
||||
Schema: schema,
|
||||
IncrementBy: 1,
|
||||
StartValue: 1,
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
// InitDomain initializes a new Domain with empty slices and maps
|
||||
func InitDomain(name string) *Domain {
|
||||
return &Domain{
|
||||
Name: name,
|
||||
Tables: make([]*DomainTable, 0),
|
||||
Metadata: make(map[string]any),
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
// InitDomainTable initializes a new DomainTable reference
|
||||
func InitDomainTable(tableName, schemaName string) *DomainTable {
|
||||
return &DomainTable{
|
||||
TableName: tableName,
|
||||
SchemaName: schemaName,
|
||||
GUID: uuid.New().String(),
|
||||
}
|
||||
}
|
||||
|
||||
282
pkg/models/sorting.go
Normal file
282
pkg/models/sorting.go
Normal file
@@ -0,0 +1,282 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// SortOrder represents the sort direction
|
||||
type SortOrder bool
|
||||
|
||||
const (
|
||||
// Ascending sort order
|
||||
Ascending SortOrder = false
|
||||
// Descending sort order
|
||||
Descending SortOrder = true
|
||||
)
|
||||
|
||||
// Schema Sorting
|
||||
|
||||
// SortSchemasByName sorts schemas by name
|
||||
func SortSchemasByName(schemas []*Schema, desc bool) error {
|
||||
sort.SliceStable(schemas, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(schemas[i].Name), strings.ToLower(schemas[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// SortSchemasBySequence sorts schemas by sequence number
|
||||
func SortSchemasBySequence(schemas []*Schema, desc bool) error {
|
||||
sort.SliceStable(schemas, func(i, j int) bool {
|
||||
if desc {
|
||||
return schemas[i].Sequence > schemas[j].Sequence
|
||||
}
|
||||
return schemas[i].Sequence < schemas[j].Sequence
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Table Sorting
|
||||
|
||||
// SortTablesByName sorts tables by name
|
||||
func SortTablesByName(tables []*Table, desc bool) error {
|
||||
sort.SliceStable(tables, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(tables[i].Name), strings.ToLower(tables[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// SortTablesBySequence sorts tables by sequence number
|
||||
func SortTablesBySequence(tables []*Table, desc bool) error {
|
||||
sort.SliceStable(tables, func(i, j int) bool {
|
||||
if desc {
|
||||
return tables[i].Sequence > tables[j].Sequence
|
||||
}
|
||||
return tables[i].Sequence < tables[j].Sequence
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Column Sorting
|
||||
|
||||
// SortColumnsMapByName converts column map to sorted slice by name
|
||||
func SortColumnsMapByName(columns map[string]*Column, desc bool) []*Column {
|
||||
result := make([]*Column, 0, len(columns))
|
||||
for _, col := range columns {
|
||||
result = append(result, col)
|
||||
}
|
||||
_ = SortColumnsByName(result, desc)
|
||||
return result
|
||||
}
|
||||
|
||||
// SortColumnsMapBySequence converts column map to sorted slice by sequence
|
||||
func SortColumnsMapBySequence(columns map[string]*Column, desc bool) []*Column {
|
||||
result := make([]*Column, 0, len(columns))
|
||||
for _, col := range columns {
|
||||
result = append(result, col)
|
||||
}
|
||||
_ = SortColumnsBySequence(result, desc)
|
||||
return result
|
||||
}
|
||||
|
||||
// SortColumnsByName sorts columns by name
|
||||
func SortColumnsByName(columns []*Column, desc bool) error {
|
||||
sort.SliceStable(columns, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(columns[i].Name), strings.ToLower(columns[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// SortColumnsBySequence sorts columns by sequence number
|
||||
func SortColumnsBySequence(columns []*Column, desc bool) error {
|
||||
sort.SliceStable(columns, func(i, j int) bool {
|
||||
if desc {
|
||||
return columns[i].Sequence > columns[j].Sequence
|
||||
}
|
||||
return columns[i].Sequence < columns[j].Sequence
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// View Sorting
|
||||
|
||||
// SortViewsByName sorts views by name
|
||||
func SortViewsByName(views []*View, desc bool) error {
|
||||
sort.SliceStable(views, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(views[i].Name), strings.ToLower(views[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// SortViewsBySequence sorts views by sequence number
|
||||
func SortViewsBySequence(views []*View, desc bool) error {
|
||||
sort.SliceStable(views, func(i, j int) bool {
|
||||
if desc {
|
||||
return views[i].Sequence > views[j].Sequence
|
||||
}
|
||||
return views[i].Sequence < views[j].Sequence
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Sequence Sorting
|
||||
|
||||
// SortSequencesByName sorts sequences by name
|
||||
func SortSequencesByName(sequences []*Sequence, desc bool) error {
|
||||
sort.SliceStable(sequences, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(sequences[i].Name), strings.ToLower(sequences[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// SortSequencesBySequence sorts sequences by sequence number
|
||||
func SortSequencesBySequence(sequences []*Sequence, desc bool) error {
|
||||
sort.SliceStable(sequences, func(i, j int) bool {
|
||||
if desc {
|
||||
return sequences[i].Sequence > sequences[j].Sequence
|
||||
}
|
||||
return sequences[i].Sequence < sequences[j].Sequence
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Index Sorting
|
||||
|
||||
// SortIndexesMapByName converts index map to sorted slice by name
|
||||
func SortIndexesMapByName(indexes map[string]*Index, desc bool) []*Index {
|
||||
result := make([]*Index, 0, len(indexes))
|
||||
for _, idx := range indexes {
|
||||
result = append(result, idx)
|
||||
}
|
||||
_ = SortIndexesByName(result, desc)
|
||||
return result
|
||||
}
|
||||
|
||||
// SortIndexesMapBySequence converts index map to sorted slice by sequence
|
||||
func SortIndexesMapBySequence(indexes map[string]*Index, desc bool) []*Index {
|
||||
result := make([]*Index, 0, len(indexes))
|
||||
for _, idx := range indexes {
|
||||
result = append(result, idx)
|
||||
}
|
||||
_ = SortIndexesBySequence(result, desc)
|
||||
return result
|
||||
}
|
||||
|
||||
// SortIndexesByName sorts indexes by name
|
||||
func SortIndexesByName(indexes []*Index, desc bool) error {
|
||||
sort.SliceStable(indexes, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(indexes[i].Name), strings.ToLower(indexes[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// SortIndexesBySequence sorts indexes by sequence number
|
||||
func SortIndexesBySequence(indexes []*Index, desc bool) error {
|
||||
sort.SliceStable(indexes, func(i, j int) bool {
|
||||
if desc {
|
||||
return indexes[i].Sequence > indexes[j].Sequence
|
||||
}
|
||||
return indexes[i].Sequence < indexes[j].Sequence
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Constraint Sorting
|
||||
|
||||
// SortConstraintsMapByName converts constraint map to sorted slice by name
|
||||
func SortConstraintsMapByName(constraints map[string]*Constraint, desc bool) []*Constraint {
|
||||
result := make([]*Constraint, 0, len(constraints))
|
||||
for _, c := range constraints {
|
||||
result = append(result, c)
|
||||
}
|
||||
_ = SortConstraintsByName(result, desc)
|
||||
return result
|
||||
}
|
||||
|
||||
// SortConstraintsByName sorts constraints by name
|
||||
func SortConstraintsByName(constraints []*Constraint, desc bool) error {
|
||||
sort.SliceStable(constraints, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(constraints[i].Name), strings.ToLower(constraints[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Relationship Sorting
|
||||
|
||||
// SortRelationshipsMapByName converts relationship map to sorted slice by name
|
||||
func SortRelationshipsMapByName(relationships map[string]*Relationship, desc bool) []*Relationship {
|
||||
result := make([]*Relationship, 0, len(relationships))
|
||||
for _, r := range relationships {
|
||||
result = append(result, r)
|
||||
}
|
||||
_ = SortRelationshipsByName(result, desc)
|
||||
return result
|
||||
}
|
||||
|
||||
// SortRelationshipsByName sorts relationships by name
|
||||
func SortRelationshipsByName(relationships []*Relationship, desc bool) error {
|
||||
sort.SliceStable(relationships, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(relationships[i].Name), strings.ToLower(relationships[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Script Sorting
|
||||
|
||||
// SortScriptsByName sorts scripts by name
|
||||
func SortScriptsByName(scripts []*Script, desc bool) error {
|
||||
sort.SliceStable(scripts, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(scripts[i].Name), strings.ToLower(scripts[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum Sorting
|
||||
|
||||
// SortEnumsByName sorts enums by name
|
||||
func SortEnumsByName(enums []*Enum, desc bool) error {
|
||||
sort.SliceStable(enums, func(i, j int) bool {
|
||||
cmp := strings.Compare(strings.ToLower(enums[i].Name), strings.ToLower(enums[j].Name))
|
||||
if desc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
})
|
||||
return nil
|
||||
}
|
||||
@@ -1,10 +1,12 @@
|
||||
package models
|
||||
|
||||
// =============================================================================
|
||||
// Summary/Compact Views - Lightweight views with essential fields
|
||||
// =============================================================================
|
||||
// Summary/Compact Views
|
||||
//
|
||||
// This file provides lightweight summary structures with essential fields
|
||||
// and aggregated counts for quick database schema overviews without loading
|
||||
// full object graphs.
|
||||
|
||||
// DatabaseSummary provides a compact overview of a database
|
||||
// DatabaseSummary provides a compact overview of a database with aggregated statistics.
|
||||
type DatabaseSummary struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
@@ -15,7 +17,7 @@ type DatabaseSummary struct {
|
||||
TotalColumns int `json:"total_columns" yaml:"total_columns" xml:"total_columns"`
|
||||
}
|
||||
|
||||
// ToSummary converts a Database to a DatabaseSummary
|
||||
// ToSummary converts a Database to a DatabaseSummary with calculated counts.
|
||||
func (d *Database) ToSummary() *DatabaseSummary {
|
||||
summary := &DatabaseSummary{
|
||||
Name: d.Name,
|
||||
@@ -36,7 +38,7 @@ func (d *Database) ToSummary() *DatabaseSummary {
|
||||
return summary
|
||||
}
|
||||
|
||||
// SchemaSummary provides a compact overview of a schema
|
||||
// SchemaSummary provides a compact overview of a schema with aggregated statistics.
|
||||
type SchemaSummary struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||
@@ -47,7 +49,7 @@ type SchemaSummary struct {
|
||||
TotalConstraints int `json:"total_constraints" yaml:"total_constraints" xml:"total_constraints"`
|
||||
}
|
||||
|
||||
// ToSummary converts a Schema to a SchemaSummary
|
||||
// ToSummary converts a Schema to a SchemaSummary with calculated counts.
|
||||
func (s *Schema) ToSummary() *SchemaSummary {
|
||||
summary := &SchemaSummary{
|
||||
Name: s.Name,
|
||||
@@ -66,7 +68,7 @@ func (s *Schema) ToSummary() *SchemaSummary {
|
||||
return summary
|
||||
}
|
||||
|
||||
// TableSummary provides a compact overview of a table
|
||||
// TableSummary provides a compact overview of a table with aggregated statistics.
|
||||
type TableSummary struct {
|
||||
Name string `json:"name" yaml:"name" xml:"name"`
|
||||
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
||||
@@ -79,7 +81,7 @@ type TableSummary struct {
|
||||
ForeignKeyCount int `json:"foreign_key_count" yaml:"foreign_key_count" xml:"foreign_key_count"`
|
||||
}
|
||||
|
||||
// ToSummary converts a Table to a TableSummary
|
||||
// ToSummary converts a Table to a TableSummary with calculated counts.
|
||||
func (t *Table) ToSummary() *TableSummary {
|
||||
summary := &TableSummary{
|
||||
Name: t.Name,
|
||||
|
||||
106
pkg/readers/bun/README.md
Normal file
106
pkg/readers/bun/README.md
Normal file
@@ -0,0 +1,106 @@
|
||||
# Bun Reader
|
||||
|
||||
Reads Go source files containing Bun model definitions and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The Bun Reader parses Go source code files that define Bun models (structs with `bun` struct tags) and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses Bun struct tags to extract column definitions
|
||||
- Extracts table names from `bun:"table:tablename"` tags
|
||||
- Identifies primary keys, foreign keys, and indexes
|
||||
- Supports relationship detection
|
||||
- Handles both single files and directories
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/models.go",
|
||||
}
|
||||
|
||||
reader := bun.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read Bun models and convert to JSON
|
||||
relspec --input bun --in-file models/ --output json --out-file schema.json
|
||||
|
||||
# Convert Bun models to GORM
|
||||
relspec --input bun --in-file models.go --output gorm --out-file gorm_models.go
|
||||
```
|
||||
|
||||
## Supported Bun Tags
|
||||
|
||||
The reader recognizes the following Bun struct tags:
|
||||
|
||||
- `table` - Table name
|
||||
- `column` - Column name
|
||||
- `type` - SQL data type
|
||||
- `pk` - Primary key
|
||||
- `notnull` - NOT NULL constraint
|
||||
- `autoincrement` - Auto-increment column
|
||||
- `default` - Default value
|
||||
- `unique` - Unique constraint
|
||||
- `rel` - Relationship definition
|
||||
|
||||
## Example Bun Model
|
||||
|
||||
```go
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
bun.BaseModel `bun:"table:users,alias:u"`
|
||||
|
||||
ID int64 `bun:"id,pk,autoincrement"`
|
||||
Username string `bun:"username,notnull,unique"`
|
||||
Email string `bun:"email,notnull"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull,default:now()"`
|
||||
|
||||
Posts []*Post `bun:"rel:has-many,join:id=user_id"`
|
||||
}
|
||||
|
||||
type Post struct {
|
||||
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||
|
||||
ID int64 `bun:"id,pk"`
|
||||
UserID int64 `bun:"user_id,notnull"`
|
||||
Title string `bun:"title,notnull"`
|
||||
Content string `bun:"content"`
|
||||
|
||||
User *User `bun:"rel:belongs-to,join:user_id=id"`
|
||||
}
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Test files (ending in `_test.go`) are automatically excluded
|
||||
- The `bun.BaseModel` embedded struct is automatically recognized
|
||||
- Schema defaults to `public` if not specified
|
||||
@@ -382,6 +382,23 @@ func (r *Reader) isRelationship(tag string) bool {
|
||||
return strings.Contains(tag, "bun:\"rel:") || strings.Contains(tag, ",rel:")
|
||||
}
|
||||
|
||||
// getRelationType extracts the relationship type from a bun tag
|
||||
func (r *Reader) getRelationType(bunTag string) string {
|
||||
if strings.Contains(bunTag, "rel:has-many") {
|
||||
return "has-many"
|
||||
}
|
||||
if strings.Contains(bunTag, "rel:belongs-to") {
|
||||
return "belongs-to"
|
||||
}
|
||||
if strings.Contains(bunTag, "rel:has-one") {
|
||||
return "has-one"
|
||||
}
|
||||
if strings.Contains(bunTag, "rel:many-to-many") {
|
||||
return "many-to-many"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// parseRelationshipConstraints parses relationship fields to extract foreign key constraints
|
||||
func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *ast.StructType, structMap map[string]*models.Table) {
|
||||
for _, field := range structType.Fields.List {
|
||||
@@ -409,27 +426,51 @@ func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *a
|
||||
}
|
||||
|
||||
// Parse the join information: join:user_id=id
|
||||
// This means: referencedTable.user_id = thisTable.id
|
||||
// This means: thisTable.user_id = referencedTable.id
|
||||
joinInfo := r.parseJoinInfo(bunTag)
|
||||
if joinInfo == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// The FK is on the referenced table
|
||||
// Determine which table gets the FK based on relationship type
|
||||
relType := r.getRelationType(bunTag)
|
||||
|
||||
var fkTable *models.Table
|
||||
var fkColumn, refTable, refColumn string
|
||||
|
||||
switch strings.ToLower(relType) {
|
||||
case "belongs-to":
|
||||
// For belongs-to: FK is on the current table
|
||||
// join:user_id=id means table.user_id references referencedTable.id
|
||||
fkTable = table
|
||||
fkColumn = joinInfo.ForeignKey
|
||||
refTable = referencedTable.Name
|
||||
refColumn = joinInfo.ReferencedKey
|
||||
case "has-many":
|
||||
// For has-many: FK is on the referenced table
|
||||
// join:id=user_id means referencedTable.user_id references table.id
|
||||
fkTable = referencedTable
|
||||
fkColumn = joinInfo.ReferencedKey
|
||||
refTable = table.Name
|
||||
refColumn = joinInfo.ForeignKey
|
||||
default:
|
||||
continue
|
||||
}
|
||||
|
||||
constraint := &models.Constraint{
|
||||
Name: fmt.Sprintf("fk_%s_%s", referencedTable.Name, table.Name),
|
||||
Name: fmt.Sprintf("fk_%s_%s", fkTable.Name, refTable),
|
||||
Type: models.ForeignKeyConstraint,
|
||||
Table: referencedTable.Name,
|
||||
Schema: referencedTable.Schema,
|
||||
Columns: []string{joinInfo.ForeignKey},
|
||||
ReferencedTable: table.Name,
|
||||
ReferencedSchema: table.Schema,
|
||||
ReferencedColumns: []string{joinInfo.ReferencedKey},
|
||||
Table: fkTable.Name,
|
||||
Schema: fkTable.Schema,
|
||||
Columns: []string{fkColumn},
|
||||
ReferencedTable: refTable,
|
||||
ReferencedSchema: fkTable.Schema,
|
||||
ReferencedColumns: []string{refColumn},
|
||||
OnDelete: "NO ACTION", // Bun doesn't specify this in tags
|
||||
OnUpdate: "NO ACTION",
|
||||
}
|
||||
|
||||
referencedTable.Constraints[constraint.Name] = constraint
|
||||
fkTable.Constraints[constraint.Name] = constraint
|
||||
}
|
||||
}
|
||||
|
||||
@@ -626,17 +667,14 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
||||
// - nullzero tag means the field is nullable (can be NULL in DB)
|
||||
// - absence of nullzero means the field is NOT NULL
|
||||
// - primitive types (int64, bool, string) are NOT NULL by default
|
||||
column.NotNull = true
|
||||
// Primary keys are always NOT NULL
|
||||
|
||||
if strings.Contains(bunTag, "nullzero") {
|
||||
column.NotNull = false
|
||||
} else if r.isNullableGoType(fieldType) {
|
||||
// SqlString, SqlInt, etc. without nullzero tag means NOT NULL
|
||||
column.NotNull = true
|
||||
} else {
|
||||
// Primitive types are NOT NULL by default
|
||||
column.NotNull = true
|
||||
column.NotNull = !r.isNullableGoType(fieldType)
|
||||
}
|
||||
|
||||
// Primary keys are always NOT NULL
|
||||
if column.IsPrimaryKey {
|
||||
column.NotNull = true
|
||||
}
|
||||
|
||||
522
pkg/readers/bun/reader_test.go
Normal file
522
pkg/readers/bun/reader_test.go
Normal file
@@ -0,0 +1,522 @@
|
||||
package bun
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if db == nil {
|
||||
t.Fatal("ReadDatabase() returned nil database")
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
t.Fatal("Expected at least one schema")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
table := schema.Tables[0]
|
||||
if table.Name != "users" {
|
||||
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||
}
|
||||
|
||||
if len(table.Columns) != 6 {
|
||||
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||
}
|
||||
|
||||
// Verify id column - primary key should be NOT NULL
|
||||
idCol, exists := table.Columns["id"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'id' not found")
|
||||
}
|
||||
if !idCol.IsPrimaryKey {
|
||||
t.Error("Column 'id' should be primary key")
|
||||
}
|
||||
if !idCol.AutoIncrement {
|
||||
t.Error("Column 'id' should be auto-increment")
|
||||
}
|
||||
if !idCol.NotNull {
|
||||
t.Error("Column 'id' should be NOT NULL (primary keys are always NOT NULL)")
|
||||
}
|
||||
if idCol.Type != "bigint" {
|
||||
t.Errorf("Expected id type 'bigint', got '%s'", idCol.Type)
|
||||
}
|
||||
|
||||
// Verify email column - explicit notnull tag should be NOT NULL
|
||||
emailCol, exists := table.Columns["email"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'email' not found")
|
||||
}
|
||||
if !emailCol.NotNull {
|
||||
t.Error("Column 'email' should be NOT NULL (explicit 'notnull' tag)")
|
||||
}
|
||||
if emailCol.Type != "varchar" || emailCol.Length != 255 {
|
||||
t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
|
||||
}
|
||||
|
||||
// Verify name column - primitive string type should be NOT NULL by default in Bun
|
||||
nameCol, exists := table.Columns["name"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'name' not found")
|
||||
}
|
||||
if !nameCol.NotNull {
|
||||
t.Error("Column 'name' should be NOT NULL (primitive string type, no nullzero tag)")
|
||||
}
|
||||
if nameCol.Type != "text" {
|
||||
t.Errorf("Expected name type 'text', got '%s'", nameCol.Type)
|
||||
}
|
||||
|
||||
// Verify age column - pointer type should be nullable (NOT NULL = false)
|
||||
ageCol, exists := table.Columns["age"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'age' not found")
|
||||
}
|
||||
if ageCol.NotNull {
|
||||
t.Error("Column 'age' should be nullable (pointer type *int)")
|
||||
}
|
||||
if ageCol.Type != "integer" {
|
||||
t.Errorf("Expected age type 'integer', got '%s'", ageCol.Type)
|
||||
}
|
||||
|
||||
// Verify is_active column - primitive bool type should be NOT NULL by default
|
||||
isActiveCol, exists := table.Columns["is_active"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'is_active' not found")
|
||||
}
|
||||
if !isActiveCol.NotNull {
|
||||
t.Error("Column 'is_active' should be NOT NULL (primitive bool type, no nullzero tag)")
|
||||
}
|
||||
if isActiveCol.Type != "boolean" {
|
||||
t.Errorf("Expected is_active type 'boolean', got '%s'", isActiveCol.Type)
|
||||
}
|
||||
|
||||
// Verify created_at column - time.Time should be NOT NULL by default
|
||||
createdAtCol, exists := table.Columns["created_at"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'created_at' not found")
|
||||
}
|
||||
if !createdAtCol.NotNull {
|
||||
t.Error("Column 'created_at' should be NOT NULL (time.Time is NOT NULL by default)")
|
||||
}
|
||||
if createdAtCol.Type != "timestamp" {
|
||||
t.Errorf("Expected created_at type 'timestamp', got '%s'", createdAtCol.Type)
|
||||
}
|
||||
|
||||
// Verify unique index on email
|
||||
if len(table.Indexes) < 1 {
|
||||
t.Error("Expected at least 1 index on users table")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "complex.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if db == nil {
|
||||
t.Fatal("ReadDatabase() returned nil database")
|
||||
}
|
||||
|
||||
// Verify schema
|
||||
if len(db.Schemas) != 1 {
|
||||
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
// Verify tables
|
||||
if len(schema.Tables) != 3 {
|
||||
t.Fatalf("Expected 3 tables, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
// Find tables
|
||||
var usersTable, postsTable, commentsTable *models.Table
|
||||
for _, table := range schema.Tables {
|
||||
switch table.Name {
|
||||
case "users":
|
||||
usersTable = table
|
||||
case "posts":
|
||||
postsTable = table
|
||||
case "comments":
|
||||
commentsTable = table
|
||||
}
|
||||
}
|
||||
|
||||
if usersTable == nil {
|
||||
t.Fatal("Users table not found")
|
||||
}
|
||||
if postsTable == nil {
|
||||
t.Fatal("Posts table not found")
|
||||
}
|
||||
if commentsTable == nil {
|
||||
t.Fatal("Comments table not found")
|
||||
}
|
||||
|
||||
// Verify users table - test NOT NULL logic for various field types
|
||||
if len(usersTable.Columns) != 10 {
|
||||
t.Errorf("Expected 10 columns in users table, got %d", len(usersTable.Columns))
|
||||
}
|
||||
|
||||
// username - NOT NULL (explicit notnull tag)
|
||||
usernameCol, exists := usersTable.Columns["username"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'username' not found")
|
||||
}
|
||||
if !usernameCol.NotNull {
|
||||
t.Error("Column 'username' should be NOT NULL (explicit 'notnull' tag)")
|
||||
}
|
||||
|
||||
// first_name - nullable (pointer type)
|
||||
firstNameCol, exists := usersTable.Columns["first_name"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'first_name' not found")
|
||||
}
|
||||
if firstNameCol.NotNull {
|
||||
t.Error("Column 'first_name' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// last_name - nullable (pointer type)
|
||||
lastNameCol, exists := usersTable.Columns["last_name"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'last_name' not found")
|
||||
}
|
||||
if lastNameCol.NotNull {
|
||||
t.Error("Column 'last_name' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// bio - nullable (pointer type)
|
||||
bioCol, exists := usersTable.Columns["bio"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'bio' not found")
|
||||
}
|
||||
if bioCol.NotNull {
|
||||
t.Error("Column 'bio' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// is_active - NOT NULL (primitive bool without nullzero)
|
||||
isActiveCol, exists := usersTable.Columns["is_active"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'is_active' not found")
|
||||
}
|
||||
if !isActiveCol.NotNull {
|
||||
t.Error("Column 'is_active' should be NOT NULL (primitive bool type without nullzero)")
|
||||
}
|
||||
|
||||
// Verify users table indexes
|
||||
if len(usersTable.Indexes) < 1 {
|
||||
t.Error("Expected at least 1 index on users table")
|
||||
}
|
||||
|
||||
// Verify posts table
|
||||
if len(postsTable.Columns) != 11 {
|
||||
t.Errorf("Expected 11 columns in posts table, got %d", len(postsTable.Columns))
|
||||
}
|
||||
|
||||
// excerpt - nullable (pointer type)
|
||||
excerptCol, exists := postsTable.Columns["excerpt"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'excerpt' not found")
|
||||
}
|
||||
if excerptCol.NotNull {
|
||||
t.Error("Column 'excerpt' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// published - NOT NULL (primitive bool without nullzero)
|
||||
publishedCol, exists := postsTable.Columns["published"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'published' not found")
|
||||
}
|
||||
if !publishedCol.NotNull {
|
||||
t.Error("Column 'published' should be NOT NULL (primitive bool type without nullzero)")
|
||||
}
|
||||
|
||||
// published_at - nullable (has nullzero tag)
|
||||
publishedAtCol, exists := postsTable.Columns["published_at"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'published_at' not found")
|
||||
}
|
||||
if publishedAtCol.NotNull {
|
||||
t.Error("Column 'published_at' should be nullable (has nullzero tag)")
|
||||
}
|
||||
|
||||
// view_count - NOT NULL (primitive int64 without nullzero)
|
||||
viewCountCol, exists := postsTable.Columns["view_count"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'view_count' not found")
|
||||
}
|
||||
if !viewCountCol.NotNull {
|
||||
t.Error("Column 'view_count' should be NOT NULL (primitive int64 type without nullzero)")
|
||||
}
|
||||
|
||||
// Verify posts table indexes
|
||||
if len(postsTable.Indexes) < 1 {
|
||||
t.Error("Expected at least 1 index on posts table")
|
||||
}
|
||||
|
||||
// Verify comments table
|
||||
if len(commentsTable.Columns) != 6 {
|
||||
t.Errorf("Expected 6 columns in comments table, got %d", len(commentsTable.Columns))
|
||||
}
|
||||
|
||||
// user_id - nullable (pointer type)
|
||||
userIDCol, exists := commentsTable.Columns["user_id"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'user_id' not found in comments table")
|
||||
}
|
||||
if userIDCol.NotNull {
|
||||
t.Error("Column 'user_id' should be nullable (pointer type *int64)")
|
||||
}
|
||||
|
||||
// post_id - NOT NULL (explicit notnull tag)
|
||||
postIDCol, exists := commentsTable.Columns["post_id"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'post_id' not found in comments table")
|
||||
}
|
||||
if !postIDCol.NotNull {
|
||||
t.Error("Column 'post_id' should be NOT NULL (explicit 'notnull' tag)")
|
||||
}
|
||||
|
||||
// Verify foreign key constraints are created from relationship tags
|
||||
// In Bun, relationships are defined with rel: tags
|
||||
// The constraints should be created on the referenced tables
|
||||
if len(postsTable.Constraints) > 0 {
|
||||
// Check if FK constraint exists
|
||||
var fkPostsUser *models.Constraint
|
||||
for _, c := range postsTable.Constraints {
|
||||
if c.Type == models.ForeignKeyConstraint && c.ReferencedTable == "users" {
|
||||
fkPostsUser = c
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if fkPostsUser != nil {
|
||||
if len(fkPostsUser.Columns) != 1 || fkPostsUser.Columns[0] != "user_id" {
|
||||
t.Error("Expected FK column 'user_id'")
|
||||
}
|
||||
if len(fkPostsUser.ReferencedColumns) != 1 || fkPostsUser.ReferencedColumns[0] != "id" {
|
||||
t.Error("Expected FK referenced column 'id'")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(commentsTable.Constraints) > 0 {
|
||||
// Check if FK constraints exist
|
||||
var fkCommentsPost, fkCommentsUser *models.Constraint
|
||||
for _, c := range commentsTable.Constraints {
|
||||
if c.Type == models.ForeignKeyConstraint {
|
||||
if c.ReferencedTable == "posts" {
|
||||
fkCommentsPost = c
|
||||
} else if c.ReferencedTable == "users" {
|
||||
fkCommentsUser = c
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fkCommentsPost != nil {
|
||||
if len(fkCommentsPost.Columns) != 1 || fkCommentsPost.Columns[0] != "post_id" {
|
||||
t.Error("Expected FK column 'post_id'")
|
||||
}
|
||||
}
|
||||
|
||||
if fkCommentsUser != nil {
|
||||
if len(fkCommentsUser.Columns) != 1 || fkCommentsUser.Columns[0] != "user_id" {
|
||||
t.Error("Expected FK column 'user_id'")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadSchema(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadSchema() error = %v", err)
|
||||
}
|
||||
|
||||
if schema == nil {
|
||||
t.Fatal("ReadSchema() returned nil schema")
|
||||
}
|
||||
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadTable(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
table, err := reader.ReadTable()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadTable() error = %v", err)
|
||||
}
|
||||
|
||||
if table == nil {
|
||||
t.Fatal("ReadTable() returned nil table")
|
||||
}
|
||||
|
||||
if table.Name != "users" {
|
||||
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||
}
|
||||
|
||||
if len(table.Columns) != 6 {
|
||||
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_Directory(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if db == nil {
|
||||
t.Fatal("ReadDatabase() returned nil database")
|
||||
}
|
||||
|
||||
// Should read both simple.go and complex.go
|
||||
if len(db.Schemas) == 0 {
|
||||
t.Fatal("Expected at least one schema")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
// Should have at least 3 tables from complex.go (users, posts, comments)
|
||||
// plus 1 from simple.go (users) - but same table name, so may be overwritten
|
||||
if len(schema.Tables) < 3 {
|
||||
t.Errorf("Expected at least 3 tables, got %d", len(schema.Tables))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_InvalidPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "/nonexistent/file.go",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid file path")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_EmptyPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty file path")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_NullableTypes(t *testing.T) {
|
||||
// This test specifically verifies the NOT NULL logic changes
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "complex.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
// Find posts table
|
||||
var postsTable *models.Table
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
if table.Name == "posts" {
|
||||
postsTable = table
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if postsTable == nil {
|
||||
t.Fatal("Posts table not found")
|
||||
}
|
||||
|
||||
// Test all nullability scenarios
|
||||
tests := []struct {
|
||||
column string
|
||||
notNull bool
|
||||
reason string
|
||||
}{
|
||||
{"id", true, "primary key"},
|
||||
{"user_id", true, "explicit notnull tag"},
|
||||
{"title", true, "explicit notnull tag"},
|
||||
{"slug", true, "explicit notnull tag"},
|
||||
{"content", true, "explicit notnull tag"},
|
||||
{"excerpt", false, "pointer type *string"},
|
||||
{"published", true, "primitive bool without nullzero"},
|
||||
{"view_count", true, "primitive int64 without nullzero"},
|
||||
{"published_at", false, "has nullzero tag"},
|
||||
{"created_at", true, "time.Time without nullzero"},
|
||||
{"updated_at", true, "time.Time without nullzero"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
col, exists := postsTable.Columns[tt.column]
|
||||
if !exists {
|
||||
t.Errorf("Column '%s' not found", tt.column)
|
||||
continue
|
||||
}
|
||||
|
||||
if col.NotNull != tt.notNull {
|
||||
if tt.notNull {
|
||||
t.Errorf("Column '%s' should be NOT NULL (%s), but NotNull=%v",
|
||||
tt.column, tt.reason, col.NotNull)
|
||||
} else {
|
||||
t.Errorf("Column '%s' should be nullable (%s), but NotNull=%v",
|
||||
tt.column, tt.reason, col.NotNull)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
101
pkg/readers/dbml/README.md
Normal file
101
pkg/readers/dbml/README.md
Normal file
@@ -0,0 +1,101 @@
|
||||
# DBML Reader
|
||||
|
||||
Reads Database Markup Language (DBML) files and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The DBML Reader parses `.dbml` files that define database schemas using the DBML syntax (used by dbdiagram.io) and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses DBML syntax
|
||||
- Extracts tables, columns, and relationships
|
||||
- Supports DBML-specific features:
|
||||
- Table groups and notes
|
||||
- Enum definitions
|
||||
- Indexes
|
||||
- Foreign key relationships
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/schema.dbml",
|
||||
}
|
||||
|
||||
reader := dbml.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read DBML file and convert to JSON
|
||||
relspec --input dbml --in-file schema.dbml --output json --out-file schema.json
|
||||
|
||||
# Convert DBML to GORM models
|
||||
relspec --input dbml --in-file database.dbml --output gorm --out-file models.go
|
||||
```
|
||||
|
||||
## Example DBML File
|
||||
|
||||
```dbml
|
||||
Table users {
|
||||
id bigserial [pk, increment]
|
||||
username varchar(50) [not null, unique]
|
||||
email varchar(100) [not null]
|
||||
created_at timestamp [not null, default: `now()`]
|
||||
|
||||
Note: 'Users table'
|
||||
}
|
||||
|
||||
Table posts {
|
||||
id bigserial [pk]
|
||||
user_id bigint [not null, ref: > users.id]
|
||||
title varchar(200) [not null]
|
||||
content text
|
||||
|
||||
indexes {
|
||||
user_id
|
||||
(user_id, created_at) [name: 'idx_user_posts']
|
||||
}
|
||||
}
|
||||
|
||||
Ref: posts.user_id > users.id [delete: cascade]
|
||||
```
|
||||
|
||||
## DBML Features Supported
|
||||
|
||||
- Table definitions with columns
|
||||
- Primary keys (`pk`)
|
||||
- Not null constraints (`not null`)
|
||||
- Unique constraints (`unique`)
|
||||
- Default values (`default`)
|
||||
- Inline references (`ref`)
|
||||
- Standalone `Ref` blocks
|
||||
- Indexes and composite indexes
|
||||
- Table notes and column notes
|
||||
- Enums
|
||||
|
||||
## Notes
|
||||
|
||||
- DBML is designed for database documentation and diagramming
|
||||
- Schema name defaults to `public`
|
||||
- Relationship cardinality is preserved
|
||||
96
pkg/readers/dctx/README.md
Normal file
96
pkg/readers/dctx/README.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# DCTX Reader
|
||||
|
||||
Reads Clarion database dictionary (DCTX) files and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The DCTX Reader parses Clarion dictionary files (`.dctx`) that define database structures in the Clarion development system and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses Clarion DCTX XML format
|
||||
- Extracts file (table) and field (column) definitions
|
||||
- Supports Clarion data types
|
||||
- Handles keys (indexes) and relationships
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/database.dctx",
|
||||
}
|
||||
|
||||
reader := dctx.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read DCTX file and convert to JSON
|
||||
relspec --input dctx --in-file legacy.dctx --output json --out-file schema.json
|
||||
|
||||
# Convert DCTX to GORM models for migration
|
||||
relspec --input dctx --in-file app.dctx --output gorm --out-file models.go
|
||||
|
||||
# Export DCTX to PostgreSQL DDL
|
||||
relspec --input dctx --in-file database.dctx --output pgsql --out-file schema.sql
|
||||
```
|
||||
|
||||
## Example DCTX Structure
|
||||
|
||||
DCTX files are XML-based Clarion dictionary files that define:
|
||||
|
||||
- Files (equivalent to tables)
|
||||
- Fields (columns) with Clarion-specific types
|
||||
- Keys (indexes)
|
||||
- Relationships between files
|
||||
|
||||
Common Clarion data types:
|
||||
- `STRING` - Fixed-length string
|
||||
- `CSTRING` - C-style null-terminated string
|
||||
- `LONG` - 32-bit integer
|
||||
- `SHORT` - 16-bit integer
|
||||
- `DECIMAL` - Decimal number
|
||||
- `REAL` - Floating point
|
||||
- `DATE` - Date field
|
||||
- `TIME` - Time field
|
||||
|
||||
## Type Mapping
|
||||
|
||||
The reader automatically maps Clarion data types to standard SQL types:
|
||||
|
||||
| Clarion Type | SQL Type |
|
||||
|--------------|----------|
|
||||
| STRING | VARCHAR |
|
||||
| CSTRING | VARCHAR |
|
||||
| LONG | INTEGER |
|
||||
| SHORT | SMALLINT |
|
||||
| DECIMAL | NUMERIC |
|
||||
| REAL | REAL |
|
||||
| DATE | DATE |
|
||||
| TIME | TIME |
|
||||
|
||||
## Notes
|
||||
|
||||
- DCTX is specific to Clarion development platform
|
||||
- Useful for migrating legacy Clarion applications
|
||||
- Schema name defaults to `public`
|
||||
- Preserves field properties and constraints where possible
|
||||
@@ -79,6 +79,8 @@ func (r *Reader) convertToDatabase(dctx *models.DCTXDictionary) (*models.Databas
|
||||
db := models.InitDatabase(dbName)
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
// Note: DCTX doesn't have database GUID, but schema can use dictionary name if available
|
||||
|
||||
// Create GUID mappings for tables and keys
|
||||
tableGuidMap := make(map[string]string) // GUID -> table name
|
||||
keyGuidMap := make(map[string]*models.DCTXKey) // GUID -> key definition
|
||||
@@ -162,6 +164,10 @@ func (r *Reader) convertTable(dctxTable *models.DCTXTable) (*models.Table, map[s
|
||||
tableName := r.sanitizeName(dctxTable.Name)
|
||||
table := models.InitTable(tableName, "public")
|
||||
table.Description = dctxTable.Description
|
||||
// Assign GUID from DCTX table
|
||||
if dctxTable.Guid != "" {
|
||||
table.GUID = dctxTable.Guid
|
||||
}
|
||||
|
||||
fieldGuidMap := make(map[string]string)
|
||||
|
||||
@@ -202,6 +208,10 @@ func (r *Reader) convertField(dctxField *models.DCTXField, tableName string) ([]
|
||||
|
||||
// Convert single field
|
||||
column := models.InitColumn(r.sanitizeName(dctxField.Name), tableName, "public")
|
||||
// Assign GUID from DCTX field
|
||||
if dctxField.Guid != "" {
|
||||
column.GUID = dctxField.Guid
|
||||
}
|
||||
|
||||
// Map Clarion data types
|
||||
dataType, length := r.mapDataType(dctxField.DataType, dctxField.Size)
|
||||
@@ -346,6 +356,10 @@ func (r *Reader) convertKey(dctxKey *models.DCTXKey, table *models.Table, fieldG
|
||||
constraint.Table = table.Name
|
||||
constraint.Schema = table.Schema
|
||||
constraint.Columns = columns
|
||||
// Assign GUID from DCTX key
|
||||
if dctxKey.Guid != "" {
|
||||
constraint.GUID = dctxKey.Guid
|
||||
}
|
||||
|
||||
table.Constraints[constraint.Name] = constraint
|
||||
|
||||
@@ -366,6 +380,10 @@ func (r *Reader) convertKey(dctxKey *models.DCTXKey, table *models.Table, fieldG
|
||||
index.Columns = columns
|
||||
index.Unique = dctxKey.Unique
|
||||
index.Type = "btree"
|
||||
// Assign GUID from DCTX key
|
||||
if dctxKey.Guid != "" {
|
||||
index.GUID = dctxKey.Guid
|
||||
}
|
||||
|
||||
table.Indexes[index.Name] = index
|
||||
return nil
|
||||
@@ -460,6 +478,10 @@ func (r *Reader) processRelations(dctx *models.DCTXDictionary, schema *models.Sc
|
||||
constraint.ReferencedColumns = pkColumns
|
||||
constraint.OnDelete = r.mapReferentialAction(relation.Delete)
|
||||
constraint.OnUpdate = r.mapReferentialAction(relation.Update)
|
||||
// Assign GUID from DCTX relation
|
||||
if relation.Guid != "" {
|
||||
constraint.GUID = relation.Guid
|
||||
}
|
||||
|
||||
foreignTable.Constraints[fkName] = constraint
|
||||
|
||||
@@ -473,6 +495,10 @@ func (r *Reader) processRelations(dctx *models.DCTXDictionary, schema *models.Sc
|
||||
relationship.ForeignKey = fkName
|
||||
relationship.Properties["on_delete"] = constraint.OnDelete
|
||||
relationship.Properties["on_update"] = constraint.OnUpdate
|
||||
// Assign GUID from DCTX relation
|
||||
if relation.Guid != "" {
|
||||
relationship.GUID = relation.Guid
|
||||
}
|
||||
|
||||
foreignTable.Relationships[relationshipName] = relationship
|
||||
}
|
||||
|
||||
96
pkg/readers/drawdb/README.md
Normal file
96
pkg/readers/drawdb/README.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# DrawDB Reader
|
||||
|
||||
Reads DrawDB schema files and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The DrawDB Reader parses JSON files exported from DrawDB (a free online database design tool) and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses DrawDB JSON format
|
||||
- Extracts tables, fields, and relationships
|
||||
- Supports DrawDB-specific metadata
|
||||
- Preserves visual layout information
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/diagram.json",
|
||||
}
|
||||
|
||||
reader := drawdb.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read DrawDB export and convert to JSON schema
|
||||
relspec --input drawdb --in-file diagram.json --output json --out-file schema.json
|
||||
|
||||
# Convert DrawDB design to GORM models
|
||||
relspec --input drawdb --in-file design.json --output gorm --out-file models.go
|
||||
```
|
||||
|
||||
## Example DrawDB Export
|
||||
|
||||
DrawDB exports database designs as JSON files containing:
|
||||
|
||||
```json
|
||||
{
|
||||
"tables": [
|
||||
{
|
||||
"id": "1",
|
||||
"name": "users",
|
||||
"fields": [
|
||||
{
|
||||
"name": "id",
|
||||
"type": "BIGINT",
|
||||
"primary": true,
|
||||
"autoIncrement": true
|
||||
},
|
||||
{
|
||||
"name": "username",
|
||||
"type": "VARCHAR",
|
||||
"size": 50,
|
||||
"notNull": true,
|
||||
"unique": true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"relationships": [
|
||||
{
|
||||
"source": "posts",
|
||||
"target": "users",
|
||||
"type": "many-to-one"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- DrawDB is a free online database designer at drawdb.vercel.app
|
||||
- Export format preserves visual design metadata
|
||||
- Useful for converting visual designs to code
|
||||
- Schema defaults to `public`
|
||||
@@ -140,6 +140,32 @@ func (r *Reader) convertToDatabase(drawSchema *drawdb.DrawDBSchema) (*models.Dat
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
}
|
||||
|
||||
// Convert DrawDB subject areas to domains
|
||||
for _, area := range drawSchema.SubjectAreas {
|
||||
domain := models.InitDomain(area.Name)
|
||||
|
||||
// Find all tables that visually belong to this area
|
||||
// A table belongs to an area if its position is within the area bounds
|
||||
for _, drawTable := range drawSchema.Tables {
|
||||
if drawTable.X >= area.X && drawTable.X <= (area.X+area.Width) &&
|
||||
drawTable.Y >= area.Y && drawTable.Y <= (area.Y+area.Height) {
|
||||
|
||||
schemaName := drawTable.Schema
|
||||
if schemaName == "" {
|
||||
schemaName = "public"
|
||||
}
|
||||
|
||||
domainTable := models.InitDomainTable(drawTable.Name, schemaName)
|
||||
domain.Tables = append(domain.Tables, domainTable)
|
||||
}
|
||||
}
|
||||
|
||||
// Only add domain if it has tables
|
||||
if len(domain.Tables) > 0 {
|
||||
db.Domains = append(db.Domains, domain)
|
||||
}
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
|
||||
90
pkg/readers/drizzle/README.md
Normal file
90
pkg/readers/drizzle/README.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Drizzle Reader
|
||||
|
||||
Reads TypeScript/JavaScript files containing Drizzle ORM schema definitions and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The Drizzle Reader parses Drizzle ORM schema files (TypeScript/JavaScript) that define database tables using Drizzle's schema builder and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses Drizzle schema definitions
|
||||
- Extracts table, column, and relationship information
|
||||
- Supports various Drizzle column types
|
||||
- Handles constraints and indexes
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/schema.ts",
|
||||
}
|
||||
|
||||
reader := drizzle.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read Drizzle schema and convert to JSON
|
||||
relspec --input drizzle --in-file schema.ts --output json --out-file schema.json
|
||||
|
||||
# Convert Drizzle to GORM models
|
||||
relspec --input drizzle --in-file schema/ --output gorm --out-file models.go
|
||||
```
|
||||
|
||||
## Example Drizzle Schema
|
||||
|
||||
```typescript
|
||||
import { pgTable, serial, varchar, text, timestamp, integer } from 'drizzle-orm/pg-core';
|
||||
import { relations } from 'drizzle-orm';
|
||||
|
||||
export const users = pgTable('users', {
|
||||
id: serial('id').primaryKey(),
|
||||
username: varchar('username', { length: 50 }).notNull().unique(),
|
||||
email: varchar('email', { length: 100 }).notNull(),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
});
|
||||
|
||||
export const posts = pgTable('posts', {
|
||||
id: serial('id').primaryKey(),
|
||||
userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }),
|
||||
title: varchar('title', { length: 200 }).notNull(),
|
||||
content: text('content'),
|
||||
});
|
||||
|
||||
export const usersRelations = relations(users, ({ many }) => ({
|
||||
posts: many(posts),
|
||||
}));
|
||||
|
||||
export const postsRelations = relations(posts, ({ one }) => ({
|
||||
user: one(users, {
|
||||
fields: [posts.userId],
|
||||
references: [users.id],
|
||||
}),
|
||||
}));
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Supports both PostgreSQL and MySQL Drizzle schemas
|
||||
- Extracts relationship information from `relations` definitions
|
||||
- Schema defaults to `public` for PostgreSQL
|
||||
617
pkg/readers/drizzle/reader.go
Normal file
617
pkg/readers/drizzle/reader.go
Normal file
@@ -0,0 +1,617 @@
|
||||
package drizzle
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
// Reader implements the readers.Reader interface for Drizzle schema format
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
// NewReader creates a new Drizzle reader with the given options
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadDatabase reads and parses Drizzle schema input, returning a Database model
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Drizzle reader")
|
||||
}
|
||||
|
||||
// Check if it's a file or directory
|
||||
info, err := os.Stat(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to stat path: %w", err)
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
// Read all .ts files in the directory
|
||||
return r.readDirectory(r.options.FilePath)
|
||||
}
|
||||
|
||||
// Read single file
|
||||
content, err := os.ReadFile(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
return r.parseDrizzle(string(content))
|
||||
}
|
||||
|
||||
// ReadSchema reads and parses Drizzle schema input, returning a Schema model
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas found in Drizzle schema")
|
||||
}
|
||||
|
||||
// Return the first schema
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
// ReadTable reads and parses Drizzle schema input, returning a Table model
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
schema, err := r.ReadSchema()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(schema.Tables) == 0 {
|
||||
return nil, fmt.Errorf("no tables found in Drizzle schema")
|
||||
}
|
||||
|
||||
// Return the first table
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
// readDirectory reads all .ts files in a directory and parses them
|
||||
func (r *Reader) readDirectory(dirPath string) (*models.Database, error) {
|
||||
db := models.InitDatabase("database")
|
||||
|
||||
if r.options.Metadata != nil {
|
||||
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||
db.Name = name
|
||||
}
|
||||
}
|
||||
|
||||
// Default schema for Drizzle
|
||||
schema := models.InitSchema("public")
|
||||
schema.Enums = make([]*models.Enum, 0)
|
||||
|
||||
// Read all .ts files
|
||||
files, err := filepath.Glob(filepath.Join(dirPath, "*.ts"))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to glob directory: %w", err)
|
||||
}
|
||||
|
||||
// Parse each file
|
||||
for _, file := range files {
|
||||
content, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file %s: %w", file, err)
|
||||
}
|
||||
|
||||
// Parse and merge into schema
|
||||
fileDB, err := r.parseDrizzle(string(content))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse file %s: %w", file, err)
|
||||
}
|
||||
|
||||
// Merge schemas
|
||||
if len(fileDB.Schemas) > 0 {
|
||||
fileSchema := fileDB.Schemas[0]
|
||||
schema.Tables = append(schema.Tables, fileSchema.Tables...)
|
||||
schema.Enums = append(schema.Enums, fileSchema.Enums...)
|
||||
}
|
||||
}
|
||||
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// parseDrizzle parses Drizzle schema content and returns a Database model
|
||||
func (r *Reader) parseDrizzle(content string) (*models.Database, error) {
|
||||
db := models.InitDatabase("database")
|
||||
|
||||
if r.options.Metadata != nil {
|
||||
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||
db.Name = name
|
||||
}
|
||||
}
|
||||
|
||||
// Default schema for Drizzle (PostgreSQL)
|
||||
schema := models.InitSchema("public")
|
||||
schema.Enums = make([]*models.Enum, 0)
|
||||
db.DatabaseType = models.PostgresqlDatabaseType
|
||||
|
||||
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||
|
||||
// Regex patterns
|
||||
// Match: export const users = pgTable('users', {
|
||||
pgTableRegex := regexp.MustCompile(`export\s+const\s+(\w+)\s*=\s*pgTable\s*\(\s*['"](\w+)['"]`)
|
||||
// Match: export const userRole = pgEnum('UserRole', ['admin', 'user']);
|
||||
pgEnumRegex := regexp.MustCompile(`export\s+const\s+(\w+)\s*=\s*pgEnum\s*\(\s*['"](\w+)['"]`)
|
||||
|
||||
// State tracking
|
||||
var currentTable *models.Table
|
||||
var currentTableVarName string
|
||||
var inTableBlock bool
|
||||
var blockDepth int
|
||||
var tableLines []string
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
// Skip empty lines and comments
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for pgEnum definition
|
||||
if matches := pgEnumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
enum := r.parsePgEnum(trimmed, matches)
|
||||
if enum != nil {
|
||||
schema.Enums = append(schema.Enums, enum)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for pgTable definition
|
||||
if matches := pgTableRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
varName := matches[1]
|
||||
tableName := matches[2]
|
||||
|
||||
currentTableVarName = varName
|
||||
currentTable = models.InitTable(tableName, "public")
|
||||
inTableBlock = true
|
||||
// Count braces in the first line
|
||||
blockDepth = strings.Count(line, "{") - strings.Count(line, "}")
|
||||
tableLines = []string{line}
|
||||
continue
|
||||
}
|
||||
|
||||
// If we're in a table block, accumulate lines
|
||||
if inTableBlock {
|
||||
tableLines = append(tableLines, line)
|
||||
|
||||
// Track brace depth
|
||||
blockDepth += strings.Count(line, "{")
|
||||
blockDepth -= strings.Count(line, "}")
|
||||
|
||||
// Check if we've closed the table definition
|
||||
if blockDepth < 0 || (blockDepth == 0 && strings.Contains(line, ");")) {
|
||||
// Parse the complete table block
|
||||
if currentTable != nil {
|
||||
r.parseTableBlock(tableLines, currentTable, currentTableVarName)
|
||||
schema.Tables = append(schema.Tables, currentTable)
|
||||
currentTable = nil
|
||||
}
|
||||
inTableBlock = false
|
||||
tableLines = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// parsePgEnum parses a pgEnum definition
|
||||
func (r *Reader) parsePgEnum(line string, matches []string) *models.Enum {
|
||||
// matches[1] = variable name
|
||||
// matches[2] = enum name
|
||||
|
||||
enumName := matches[2]
|
||||
|
||||
// Extract values from the array
|
||||
// Example: pgEnum('UserRole', ['admin', 'user', 'guest'])
|
||||
valuesRegex := regexp.MustCompile(`\[(.*?)\]`)
|
||||
valuesMatch := valuesRegex.FindStringSubmatch(line)
|
||||
if valuesMatch == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
valuesStr := valuesMatch[1]
|
||||
// Split by comma and clean up
|
||||
valueParts := strings.Split(valuesStr, ",")
|
||||
values := make([]string, 0)
|
||||
for _, part := range valueParts {
|
||||
// Remove quotes and whitespace
|
||||
cleaned := strings.TrimSpace(part)
|
||||
cleaned = strings.Trim(cleaned, "'\"")
|
||||
if cleaned != "" {
|
||||
values = append(values, cleaned)
|
||||
}
|
||||
}
|
||||
|
||||
enum := models.InitEnum(enumName, "public")
|
||||
enum.Values = values
|
||||
return enum
|
||||
}
|
||||
|
||||
// parseTableBlock parses a complete pgTable definition block
|
||||
func (r *Reader) parseTableBlock(lines []string, table *models.Table, tableVarName string) {
|
||||
// Join all lines into a single string for easier parsing
|
||||
fullText := strings.Join(lines, "\n")
|
||||
|
||||
// Extract the columns block and index callback separately
|
||||
// The structure is: pgTable('name', { columns }, (table) => [indexes])
|
||||
|
||||
// Find the main object block (columns)
|
||||
columnsStart := strings.Index(fullText, "{")
|
||||
if columnsStart == -1 {
|
||||
return
|
||||
}
|
||||
|
||||
// Find matching closing brace for columns
|
||||
depth := 0
|
||||
columnsEnd := -1
|
||||
for i := columnsStart; i < len(fullText); i++ {
|
||||
if fullText[i] == '{' {
|
||||
depth++
|
||||
} else if fullText[i] == '}' {
|
||||
depth--
|
||||
if depth == 0 {
|
||||
columnsEnd = i
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if columnsEnd == -1 {
|
||||
return
|
||||
}
|
||||
|
||||
columnsBlock := fullText[columnsStart+1 : columnsEnd]
|
||||
|
||||
// Parse columns
|
||||
r.parseColumnsBlock(columnsBlock, table, tableVarName)
|
||||
|
||||
// Check for index callback: , (table) => [ or , ({ col1, col2 }) => [
|
||||
// Match: }, followed by arrow function with any parameters
|
||||
// Use (?s) flag to make . match newlines
|
||||
indexCallbackRegex := regexp.MustCompile(`(?s)}\s*,\s*\(.*?\)\s*=>\s*\[`)
|
||||
if indexCallbackRegex.MatchString(fullText[columnsEnd:]) {
|
||||
// Find the index array
|
||||
indexStart := strings.Index(fullText[columnsEnd:], "[")
|
||||
if indexStart != -1 {
|
||||
indexStart += columnsEnd
|
||||
indexDepth := 0
|
||||
indexEnd := -1
|
||||
for i := indexStart; i < len(fullText); i++ {
|
||||
if fullText[i] == '[' {
|
||||
indexDepth++
|
||||
} else if fullText[i] == ']' {
|
||||
indexDepth--
|
||||
if indexDepth == 0 {
|
||||
indexEnd = i
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if indexEnd != -1 {
|
||||
indexBlock := fullText[indexStart+1 : indexEnd]
|
||||
r.parseIndexBlock(indexBlock, table, tableVarName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseColumnsBlock parses the columns block of a table
|
||||
func (r *Reader) parseColumnsBlock(block string, table *models.Table, tableVarName string) {
|
||||
// Split by lines and parse each column definition
|
||||
lines := strings.Split(block, "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Match: fieldName: columnType('columnName').modifier().modifier(),
|
||||
// Example: id: integer('id').primaryKey(),
|
||||
columnRegex := regexp.MustCompile(`(\w+):\s*(\w+)\s*\(`)
|
||||
matches := columnRegex.FindStringSubmatch(trimmed)
|
||||
if matches == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
fieldName := matches[1]
|
||||
columnType := matches[2]
|
||||
|
||||
// Parse the column definition
|
||||
col := r.parseColumnDefinition(trimmed, fieldName, columnType, table)
|
||||
if col != nil {
|
||||
table.Columns[col.Name] = col
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseColumnDefinition parses a single column definition line
|
||||
func (r *Reader) parseColumnDefinition(line, fieldName, drizzleType string, table *models.Table) *models.Column {
|
||||
// Check for enum column syntax: pgEnum('EnumName')('column_name')
|
||||
enumRegex := regexp.MustCompile(`pgEnum\s*\(['"](\w+)['"]\)\s*\(['"](\w+)['"]\)`)
|
||||
if enumMatch := enumRegex.FindStringSubmatch(line); enumMatch != nil {
|
||||
enumName := enumMatch[1]
|
||||
columnName := enumMatch[2]
|
||||
|
||||
column := models.InitColumn(columnName, table.Name, table.Schema)
|
||||
column.Type = enumName
|
||||
column.NotNull = false
|
||||
|
||||
// Parse modifiers
|
||||
r.parseColumnModifiers(line, column, table)
|
||||
return column
|
||||
}
|
||||
|
||||
// Extract column name from the first argument
|
||||
// Example: integer('id')
|
||||
nameRegex := regexp.MustCompile(`\w+\s*\(['"](\w+)['"]\)`)
|
||||
nameMatch := nameRegex.FindStringSubmatch(line)
|
||||
if nameMatch == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
columnName := nameMatch[1]
|
||||
column := models.InitColumn(columnName, table.Name, table.Schema)
|
||||
|
||||
// Map Drizzle type to SQL type
|
||||
column.Type = r.drizzleTypeToSQL(drizzleType)
|
||||
|
||||
// Default: columns are nullable unless specified
|
||||
column.NotNull = false
|
||||
|
||||
// Parse modifiers
|
||||
r.parseColumnModifiers(line, column, table)
|
||||
|
||||
return column
|
||||
}
|
||||
|
||||
// drizzleTypeToSQL converts Drizzle column types to SQL types
|
||||
func (r *Reader) drizzleTypeToSQL(drizzleType string) string {
|
||||
typeMap := map[string]string{
|
||||
// Integer types
|
||||
"integer": "integer",
|
||||
"bigint": "bigint",
|
||||
"smallint": "smallint",
|
||||
|
||||
// Serial types
|
||||
"serial": "serial",
|
||||
"bigserial": "bigserial",
|
||||
"smallserial": "smallserial",
|
||||
|
||||
// Numeric types
|
||||
"numeric": "numeric",
|
||||
"real": "real",
|
||||
"doublePrecision": "double precision",
|
||||
|
||||
// Character types
|
||||
"text": "text",
|
||||
"varchar": "varchar",
|
||||
"char": "char",
|
||||
|
||||
// Boolean
|
||||
"boolean": "boolean",
|
||||
|
||||
// Binary
|
||||
"bytea": "bytea",
|
||||
|
||||
// JSON
|
||||
"json": "json",
|
||||
"jsonb": "jsonb",
|
||||
|
||||
// Date/Time
|
||||
"time": "time",
|
||||
"timestamp": "timestamp",
|
||||
"date": "date",
|
||||
"interval": "interval",
|
||||
|
||||
// UUID
|
||||
"uuid": "uuid",
|
||||
|
||||
// Geometric
|
||||
"point": "point",
|
||||
"line": "line",
|
||||
}
|
||||
|
||||
if sqlType, ok := typeMap[drizzleType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
|
||||
// If not found, might be an enum - return as-is
|
||||
return drizzleType
|
||||
}
|
||||
|
||||
// parseColumnModifiers parses column modifiers like .primaryKey(), .notNull(), etc.
|
||||
func (r *Reader) parseColumnModifiers(line string, column *models.Column, table *models.Table) {
|
||||
// Check for .primaryKey()
|
||||
if strings.Contains(line, ".primaryKey()") {
|
||||
column.IsPrimaryKey = true
|
||||
column.NotNull = true
|
||||
}
|
||||
|
||||
// Check for .notNull()
|
||||
if strings.Contains(line, ".notNull()") {
|
||||
column.NotNull = true
|
||||
}
|
||||
|
||||
// Check for .unique()
|
||||
if strings.Contains(line, ".unique()") {
|
||||
uniqueConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("uq_%s", column.Name),
|
||||
models.UniqueConstraint,
|
||||
)
|
||||
uniqueConstraint.Schema = table.Schema
|
||||
uniqueConstraint.Table = table.Name
|
||||
uniqueConstraint.Columns = []string{column.Name}
|
||||
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||
}
|
||||
|
||||
// Check for .default(...)
|
||||
// Need to handle nested backticks and parentheses in SQL expressions
|
||||
defaultIdx := strings.Index(line, ".default(")
|
||||
if defaultIdx != -1 {
|
||||
start := defaultIdx + len(".default(")
|
||||
depth := 1
|
||||
inBacktick := false
|
||||
i := start
|
||||
|
||||
for i < len(line) && depth > 0 {
|
||||
ch := line[i]
|
||||
if ch == '`' {
|
||||
inBacktick = !inBacktick
|
||||
} else if !inBacktick {
|
||||
switch ch {
|
||||
case '(':
|
||||
depth++
|
||||
case ')':
|
||||
depth--
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
if depth == 0 {
|
||||
defaultValue := strings.TrimSpace(line[start : i-1])
|
||||
r.parseDefaultValue(defaultValue, column)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for .generatedAlwaysAsIdentity()
|
||||
if strings.Contains(line, ".generatedAlwaysAsIdentity()") {
|
||||
column.AutoIncrement = true
|
||||
}
|
||||
|
||||
// Check for .references(() => otherTable.column)
|
||||
referencesRegex := regexp.MustCompile(`\.references\(\(\)\s*=>\s*(\w+)\.(\w+)\)`)
|
||||
if matches := referencesRegex.FindStringSubmatch(line); matches != nil {
|
||||
refTableVar := matches[1]
|
||||
refColumn := matches[2]
|
||||
|
||||
// Create FK constraint
|
||||
constraintName := fmt.Sprintf("fk_%s_%s", table.Name, column.Name)
|
||||
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||
constraint.Schema = table.Schema
|
||||
constraint.Table = table.Name
|
||||
constraint.Columns = []string{column.Name}
|
||||
constraint.ReferencedSchema = table.Schema // Assume same schema
|
||||
constraint.ReferencedTable = r.varNameToTableName(refTableVar)
|
||||
constraint.ReferencedColumns = []string{refColumn}
|
||||
|
||||
table.Constraints[constraint.Name] = constraint
|
||||
}
|
||||
}
|
||||
|
||||
// parseDefaultValue parses a default value expression
|
||||
func (r *Reader) parseDefaultValue(defaultExpr string, column *models.Column) {
|
||||
defaultExpr = strings.TrimSpace(defaultExpr)
|
||||
|
||||
// Handle SQL expressions like sql`now()`
|
||||
sqlRegex := regexp.MustCompile("sql`([^`]+)`")
|
||||
if match := sqlRegex.FindStringSubmatch(defaultExpr); match != nil {
|
||||
column.Default = match[1]
|
||||
return
|
||||
}
|
||||
|
||||
// Handle boolean values
|
||||
if defaultExpr == "true" {
|
||||
column.Default = true
|
||||
return
|
||||
}
|
||||
if defaultExpr == "false" {
|
||||
column.Default = false
|
||||
return
|
||||
}
|
||||
|
||||
// Handle string literals
|
||||
if strings.HasPrefix(defaultExpr, "'") && strings.HasSuffix(defaultExpr, "'") {
|
||||
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(defaultExpr, "\"") && strings.HasSuffix(defaultExpr, "\"") {
|
||||
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||
return
|
||||
}
|
||||
|
||||
// Try to parse as number
|
||||
column.Default = defaultExpr
|
||||
}
|
||||
|
||||
// parseIndexBlock parses the index callback block
|
||||
func (r *Reader) parseIndexBlock(block string, table *models.Table, tableVarName string) {
|
||||
// Split by lines
|
||||
lines := strings.Split(block, "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Match: index('index_name').on(table.col1, table.col2)
|
||||
// or: uniqueIndex('index_name').on(table.col1, table.col2)
|
||||
indexRegex := regexp.MustCompile(`(uniqueIndex|index)\s*\(['"](\w+)['"]\)\s*\.on\s*\((.*?)\)`)
|
||||
matches := indexRegex.FindStringSubmatch(trimmed)
|
||||
if matches == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
indexType := matches[1]
|
||||
indexName := matches[2]
|
||||
columnsStr := matches[3]
|
||||
|
||||
// Parse column list
|
||||
columnParts := strings.Split(columnsStr, ",")
|
||||
columns := make([]string, 0)
|
||||
for _, part := range columnParts {
|
||||
// Remove table prefix: table.column -> column
|
||||
cleaned := strings.TrimSpace(part)
|
||||
if strings.Contains(cleaned, ".") {
|
||||
parts := strings.Split(cleaned, ".")
|
||||
cleaned = parts[len(parts)-1]
|
||||
}
|
||||
columns = append(columns, cleaned)
|
||||
}
|
||||
|
||||
if indexType == "uniqueIndex" {
|
||||
// Create unique constraint
|
||||
constraint := models.InitConstraint(indexName, models.UniqueConstraint)
|
||||
constraint.Schema = table.Schema
|
||||
constraint.Table = table.Name
|
||||
constraint.Columns = columns
|
||||
table.Constraints[constraint.Name] = constraint
|
||||
} else {
|
||||
// Create index
|
||||
index := models.InitIndex(indexName, table.Name, table.Schema)
|
||||
index.Columns = columns
|
||||
index.Unique = false
|
||||
table.Indexes[index.Name] = index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// varNameToTableName converts a variable name to a table name
|
||||
// For now, just return as-is (could add inflection later)
|
||||
func (r *Reader) varNameToTableName(varName string) string {
|
||||
// TODO: Could add conversion logic here if needed
|
||||
// For now, assume variable name matches table name
|
||||
return varName
|
||||
}
|
||||
141
pkg/readers/gorm/README.md
Normal file
141
pkg/readers/gorm/README.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# GORM Reader
|
||||
|
||||
Reads Go source files containing GORM model definitions and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The GORM Reader parses Go source code files that define GORM models (structs with `gorm` struct tags) and converts them into RelSpec's internal database model representation. It supports reading from individual files or entire directories.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses GORM struct tags to extract column definitions
|
||||
- Extracts table names from `TableName()` methods
|
||||
- Identifies primary keys, foreign keys, and indexes
|
||||
- Supports relationship detection (has-many, belongs-to)
|
||||
- Handles both single files and directories
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Read from a single file
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/models.go",
|
||||
}
|
||||
|
||||
reader := gorm.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### Reading from Directory
|
||||
|
||||
```go
|
||||
// Read all .go files from a directory
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/models/",
|
||||
}
|
||||
|
||||
reader := gorm.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read GORM models and convert to JSON
|
||||
relspec --input gorm --in-file models/ --output json --out-file schema.json
|
||||
|
||||
# Convert GORM models to Bun
|
||||
relspec --input gorm --in-file models.go --output bun --out-file bun_models.go
|
||||
```
|
||||
|
||||
## Supported GORM Tags
|
||||
|
||||
The reader recognizes the following GORM struct tags:
|
||||
|
||||
- `column` - Column name
|
||||
- `type` - SQL data type (e.g., `varchar(255)`, `bigint`)
|
||||
- `primaryKey` or `primary_key` - Mark as primary key
|
||||
- `not null` - NOT NULL constraint
|
||||
- `autoIncrement` - Auto-increment column
|
||||
- `default` - Default value
|
||||
- `size` - Column size/length
|
||||
- `index` - Create index
|
||||
- `uniqueIndex` - Create unique index
|
||||
- `unique` - Unique constraint
|
||||
- `foreignKey` - Foreign key column
|
||||
- `references` - Referenced column
|
||||
- `constraint` - Constraint behavior (OnDelete, OnUpdate)
|
||||
|
||||
## Example GORM Model
|
||||
|
||||
```go
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type ModelUser struct {
|
||||
gorm.Model
|
||||
ID int64 `gorm:"column:id;type:bigint;primaryKey;autoIncrement"`
|
||||
Username string `gorm:"column:username;type:varchar(50);not null;uniqueIndex"`
|
||||
Email string `gorm:"column:email;type:varchar(100);not null"`
|
||||
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;not null;default:now()"`
|
||||
|
||||
// Relationships
|
||||
Posts []*ModelPost `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE"`
|
||||
}
|
||||
|
||||
func (ModelUser) TableName() string {
|
||||
return "public.users"
|
||||
}
|
||||
|
||||
type ModelPost struct {
|
||||
ID int64 `gorm:"column:id;type:bigint;primaryKey"`
|
||||
UserID int64 `gorm:"column:user_id;type:bigint;not null"`
|
||||
Title string `gorm:"column:title;type:varchar(200);not null"`
|
||||
Content string `gorm:"column:content;type:text"`
|
||||
|
||||
// Belongs-to relationship
|
||||
User *ModelUser `gorm:"foreignKey:UserID;references:ID"`
|
||||
}
|
||||
|
||||
func (ModelPost) TableName() string {
|
||||
return "public.posts"
|
||||
}
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Test files (ending in `_test.go`) are automatically excluded
|
||||
- The `gorm.Model` embedded struct is automatically recognized and skipped
|
||||
- Table names are derived from struct names if `TableName()` method is not present
|
||||
- Schema defaults to `public` if not specified in `TableName()`
|
||||
- Relationships are inferred from GORM relationship tags
|
||||
|
||||
## Limitations
|
||||
|
||||
- Complex relationship types (many-to-many with join tables) may need manual verification
|
||||
- Custom GORM types may not be fully supported
|
||||
- Some advanced GORM features may not be captured
|
||||
@@ -693,7 +693,7 @@ func (r *Reader) deriveTableName(structName string) string {
|
||||
|
||||
// parseColumn parses a struct field into a Column model
|
||||
// Returns the column and any inline reference information (e.g., "mainaccount(id_mainaccount)")
|
||||
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) (*models.Column, string) {
|
||||
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) (col *models.Column, ref string) {
|
||||
// Extract gorm tag
|
||||
gormTag := r.extractGormTag(tag)
|
||||
if gormTag == "" {
|
||||
@@ -756,20 +756,14 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
||||
// - explicit "not null" tag means NOT NULL
|
||||
// - absence of "not null" tag with sql_types means nullable
|
||||
// - primitive types (string, int64, bool) default to NOT NULL unless explicitly nullable
|
||||
// Primary keys are always NOT NULL
|
||||
column.NotNull = false
|
||||
if _, hasNotNull := parts["not null"]; hasNotNull {
|
||||
column.NotNull = true
|
||||
} else {
|
||||
// If no explicit "not null" tag, check the Go type
|
||||
if r.isNullableGoType(fieldType) {
|
||||
// sql_types.SqlString, etc. are nullable by default
|
||||
column.NotNull = false
|
||||
} else {
|
||||
// Primitive types default to NOT NULL
|
||||
column.NotNull = false // Default to nullable unless explicitly set
|
||||
}
|
||||
// sql_types.SqlString, etc. are nullable by default
|
||||
column.NotNull = !r.isNullableGoType(fieldType)
|
||||
}
|
||||
|
||||
// Primary keys are always NOT NULL
|
||||
if column.IsPrimaryKey {
|
||||
column.NotNull = true
|
||||
}
|
||||
|
||||
464
pkg/readers/gorm/reader_test.go
Normal file
464
pkg/readers/gorm/reader_test.go
Normal file
@@ -0,0 +1,464 @@
|
||||
package gorm
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if db == nil {
|
||||
t.Fatal("ReadDatabase() returned nil database")
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
t.Fatal("Expected at least one schema")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
table := schema.Tables[0]
|
||||
if table.Name != "users" {
|
||||
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||
}
|
||||
|
||||
if len(table.Columns) != 6 {
|
||||
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||
}
|
||||
|
||||
// Verify id column - primary key should be NOT NULL
|
||||
idCol, exists := table.Columns["id"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'id' not found")
|
||||
}
|
||||
if !idCol.IsPrimaryKey {
|
||||
t.Error("Column 'id' should be primary key")
|
||||
}
|
||||
if !idCol.AutoIncrement {
|
||||
t.Error("Column 'id' should be auto-increment")
|
||||
}
|
||||
if !idCol.NotNull {
|
||||
t.Error("Column 'id' should be NOT NULL (primary keys are always NOT NULL)")
|
||||
}
|
||||
if idCol.Type != "bigint" {
|
||||
t.Errorf("Expected id type 'bigint', got '%s'", idCol.Type)
|
||||
}
|
||||
|
||||
// Verify email column - explicit "not null" tag should be NOT NULL
|
||||
emailCol, exists := table.Columns["email"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'email' not found")
|
||||
}
|
||||
if !emailCol.NotNull {
|
||||
t.Error("Column 'email' should be NOT NULL (explicit 'not null' tag)")
|
||||
}
|
||||
if emailCol.Type != "varchar" || emailCol.Length != 255 {
|
||||
t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
|
||||
}
|
||||
|
||||
// Verify name column - primitive string type should be NOT NULL by default
|
||||
nameCol, exists := table.Columns["name"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'name' not found")
|
||||
}
|
||||
if !nameCol.NotNull {
|
||||
t.Error("Column 'name' should be NOT NULL (primitive string type defaults to NOT NULL)")
|
||||
}
|
||||
if nameCol.Type != "text" {
|
||||
t.Errorf("Expected name type 'text', got '%s'", nameCol.Type)
|
||||
}
|
||||
|
||||
// Verify age column - pointer type should be nullable (NOT NULL = false)
|
||||
ageCol, exists := table.Columns["age"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'age' not found")
|
||||
}
|
||||
if ageCol.NotNull {
|
||||
t.Error("Column 'age' should be nullable (pointer type *int)")
|
||||
}
|
||||
if ageCol.Type != "integer" {
|
||||
t.Errorf("Expected age type 'integer', got '%s'", ageCol.Type)
|
||||
}
|
||||
|
||||
// Verify is_active column - primitive bool type should be NOT NULL by default
|
||||
isActiveCol, exists := table.Columns["is_active"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'is_active' not found")
|
||||
}
|
||||
if !isActiveCol.NotNull {
|
||||
t.Error("Column 'is_active' should be NOT NULL (primitive bool type defaults to NOT NULL)")
|
||||
}
|
||||
if isActiveCol.Type != "boolean" {
|
||||
t.Errorf("Expected is_active type 'boolean', got '%s'", isActiveCol.Type)
|
||||
}
|
||||
|
||||
// Verify created_at column - time.Time should be NOT NULL by default
|
||||
createdAtCol, exists := table.Columns["created_at"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'created_at' not found")
|
||||
}
|
||||
if !createdAtCol.NotNull {
|
||||
t.Error("Column 'created_at' should be NOT NULL (time.Time is NOT NULL by default)")
|
||||
}
|
||||
if createdAtCol.Type != "timestamp" {
|
||||
t.Errorf("Expected created_at type 'timestamp', got '%s'", createdAtCol.Type)
|
||||
}
|
||||
if createdAtCol.Default != "now()" {
|
||||
t.Errorf("Expected created_at default 'now()', got '%v'", createdAtCol.Default)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "complex.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if db == nil {
|
||||
t.Fatal("ReadDatabase() returned nil database")
|
||||
}
|
||||
|
||||
// Verify schema
|
||||
if len(db.Schemas) != 1 {
|
||||
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
// Verify tables
|
||||
if len(schema.Tables) != 3 {
|
||||
t.Fatalf("Expected 3 tables, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
// Find tables
|
||||
var usersTable, postsTable, commentsTable *models.Table
|
||||
for _, table := range schema.Tables {
|
||||
switch table.Name {
|
||||
case "users":
|
||||
usersTable = table
|
||||
case "posts":
|
||||
postsTable = table
|
||||
case "comments":
|
||||
commentsTable = table
|
||||
}
|
||||
}
|
||||
|
||||
if usersTable == nil {
|
||||
t.Fatal("Users table not found")
|
||||
}
|
||||
if postsTable == nil {
|
||||
t.Fatal("Posts table not found")
|
||||
}
|
||||
if commentsTable == nil {
|
||||
t.Fatal("Comments table not found")
|
||||
}
|
||||
|
||||
// Verify users table - test NOT NULL logic for various field types
|
||||
if len(usersTable.Columns) != 10 {
|
||||
t.Errorf("Expected 10 columns in users table, got %d", len(usersTable.Columns))
|
||||
}
|
||||
|
||||
// username - NOT NULL (explicit tag)
|
||||
usernameCol, exists := usersTable.Columns["username"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'username' not found")
|
||||
}
|
||||
if !usernameCol.NotNull {
|
||||
t.Error("Column 'username' should be NOT NULL (explicit 'not null' tag)")
|
||||
}
|
||||
|
||||
// first_name - nullable (pointer type)
|
||||
firstNameCol, exists := usersTable.Columns["first_name"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'first_name' not found")
|
||||
}
|
||||
if firstNameCol.NotNull {
|
||||
t.Error("Column 'first_name' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// last_name - nullable (pointer type)
|
||||
lastNameCol, exists := usersTable.Columns["last_name"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'last_name' not found")
|
||||
}
|
||||
if lastNameCol.NotNull {
|
||||
t.Error("Column 'last_name' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// bio - nullable (pointer type)
|
||||
bioCol, exists := usersTable.Columns["bio"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'bio' not found")
|
||||
}
|
||||
if bioCol.NotNull {
|
||||
t.Error("Column 'bio' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// is_active - NOT NULL (primitive bool)
|
||||
isActiveCol, exists := usersTable.Columns["is_active"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'is_active' not found")
|
||||
}
|
||||
if !isActiveCol.NotNull {
|
||||
t.Error("Column 'is_active' should be NOT NULL (primitive bool type)")
|
||||
}
|
||||
|
||||
// Verify users table indexes
|
||||
if len(usersTable.Indexes) < 1 {
|
||||
t.Error("Expected at least 1 index on users table")
|
||||
}
|
||||
|
||||
// Verify posts table
|
||||
if len(postsTable.Columns) != 11 {
|
||||
t.Errorf("Expected 11 columns in posts table, got %d", len(postsTable.Columns))
|
||||
}
|
||||
|
||||
// excerpt - nullable (pointer type)
|
||||
excerptCol, exists := postsTable.Columns["excerpt"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'excerpt' not found")
|
||||
}
|
||||
if excerptCol.NotNull {
|
||||
t.Error("Column 'excerpt' should be nullable (pointer type *string)")
|
||||
}
|
||||
|
||||
// published - NOT NULL (primitive bool with default)
|
||||
publishedCol, exists := postsTable.Columns["published"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'published' not found")
|
||||
}
|
||||
if !publishedCol.NotNull {
|
||||
t.Error("Column 'published' should be NOT NULL (primitive bool type)")
|
||||
}
|
||||
if publishedCol.Default != "false" {
|
||||
t.Errorf("Expected published default 'false', got '%v'", publishedCol.Default)
|
||||
}
|
||||
|
||||
// published_at - nullable (pointer to time.Time)
|
||||
publishedAtCol, exists := postsTable.Columns["published_at"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'published_at' not found")
|
||||
}
|
||||
if publishedAtCol.NotNull {
|
||||
t.Error("Column 'published_at' should be nullable (pointer type *time.Time)")
|
||||
}
|
||||
|
||||
// view_count - NOT NULL (primitive int64 with default)
|
||||
viewCountCol, exists := postsTable.Columns["view_count"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'view_count' not found")
|
||||
}
|
||||
if !viewCountCol.NotNull {
|
||||
t.Error("Column 'view_count' should be NOT NULL (primitive int64 type)")
|
||||
}
|
||||
if viewCountCol.Default != "0" {
|
||||
t.Errorf("Expected view_count default '0', got '%v'", viewCountCol.Default)
|
||||
}
|
||||
|
||||
// Verify posts table indexes
|
||||
if len(postsTable.Indexes) < 1 {
|
||||
t.Error("Expected at least 1 index on posts table")
|
||||
}
|
||||
|
||||
// Verify comments table
|
||||
if len(commentsTable.Columns) != 6 {
|
||||
t.Errorf("Expected 6 columns in comments table, got %d", len(commentsTable.Columns))
|
||||
}
|
||||
|
||||
// user_id - nullable (pointer type)
|
||||
userIDCol, exists := commentsTable.Columns["user_id"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'user_id' not found in comments table")
|
||||
}
|
||||
if userIDCol.NotNull {
|
||||
t.Error("Column 'user_id' should be nullable (pointer type *int64)")
|
||||
}
|
||||
|
||||
// post_id - NOT NULL (explicit tag)
|
||||
postIDCol, exists := commentsTable.Columns["post_id"]
|
||||
if !exists {
|
||||
t.Fatal("Column 'post_id' not found in comments table")
|
||||
}
|
||||
if !postIDCol.NotNull {
|
||||
t.Error("Column 'post_id' should be NOT NULL (explicit 'not null' tag)")
|
||||
}
|
||||
|
||||
// Verify foreign key constraints
|
||||
if len(postsTable.Constraints) == 0 {
|
||||
t.Error("Expected at least one constraint on posts table")
|
||||
}
|
||||
|
||||
// Find FK constraint to users
|
||||
var fkPostsUser *models.Constraint
|
||||
for _, c := range postsTable.Constraints {
|
||||
if c.Type == models.ForeignKeyConstraint && c.ReferencedTable == "users" {
|
||||
fkPostsUser = c
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if fkPostsUser != nil {
|
||||
if fkPostsUser.OnDelete != "CASCADE" {
|
||||
t.Errorf("Expected ON DELETE CASCADE for posts->users FK, got '%s'", fkPostsUser.OnDelete)
|
||||
}
|
||||
if fkPostsUser.OnUpdate != "CASCADE" {
|
||||
t.Errorf("Expected ON UPDATE CASCADE for posts->users FK, got '%s'", fkPostsUser.OnUpdate)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify comments table constraints
|
||||
if len(commentsTable.Constraints) == 0 {
|
||||
t.Error("Expected at least one constraint on comments table")
|
||||
}
|
||||
|
||||
// Find FK constraints
|
||||
var fkCommentsPost, fkCommentsUser *models.Constraint
|
||||
for _, c := range commentsTable.Constraints {
|
||||
if c.Type == models.ForeignKeyConstraint {
|
||||
if c.ReferencedTable == "posts" {
|
||||
fkCommentsPost = c
|
||||
} else if c.ReferencedTable == "users" {
|
||||
fkCommentsUser = c
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fkCommentsPost != nil {
|
||||
if fkCommentsPost.OnDelete != "CASCADE" {
|
||||
t.Errorf("Expected ON DELETE CASCADE for comments->posts FK, got '%s'", fkCommentsPost.OnDelete)
|
||||
}
|
||||
}
|
||||
|
||||
if fkCommentsUser != nil {
|
||||
if fkCommentsUser.OnDelete != "SET NULL" {
|
||||
t.Errorf("Expected ON DELETE SET NULL for comments->users FK, got '%s'", fkCommentsUser.OnDelete)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadSchema(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadSchema() error = %v", err)
|
||||
}
|
||||
|
||||
if schema == nil {
|
||||
t.Fatal("ReadSchema() returned nil schema")
|
||||
}
|
||||
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadTable(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
table, err := reader.ReadTable()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadTable() error = %v", err)
|
||||
}
|
||||
|
||||
if table == nil {
|
||||
t.Fatal("ReadTable() returned nil table")
|
||||
}
|
||||
|
||||
if table.Name != "users" {
|
||||
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||
}
|
||||
|
||||
if len(table.Columns) != 6 {
|
||||
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_Directory(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if db == nil {
|
||||
t.Fatal("ReadDatabase() returned nil database")
|
||||
}
|
||||
|
||||
// Should read both simple.go and complex.go
|
||||
if len(db.Schemas) == 0 {
|
||||
t.Fatal("Expected at least one schema")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
// Should have at least 3 tables from complex.go (users, posts, comments)
|
||||
// plus 1 from simple.go (users) - but same table name, so may be overwritten
|
||||
if len(schema.Tables) < 3 {
|
||||
t.Errorf("Expected at least 3 tables, got %d", len(schema.Tables))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_InvalidPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "/nonexistent/file.go",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid file path")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_EmptyPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty file path")
|
||||
}
|
||||
}
|
||||
203
pkg/readers/graphql/README.md
Normal file
203
pkg/readers/graphql/README.md
Normal file
@@ -0,0 +1,203 @@
|
||||
# GraphQL Schema Reader
|
||||
|
||||
The GraphQL reader parses GraphQL Schema Definition Language (SDL) files and converts them into RelSpec's internal database model.
|
||||
|
||||
## Features
|
||||
|
||||
- **Standard GraphQL SDL** support (generic, non-framework-specific)
|
||||
- **Type to Table mapping**: GraphQL types become database tables
|
||||
- **Field to Column mapping**: GraphQL fields become table columns
|
||||
- **Enum support**: GraphQL enums are preserved
|
||||
- **Custom scalars**: DateTime, JSON, Date automatically mapped to appropriate SQL types
|
||||
- **Implicit relationships**: Detects relationships from field types
|
||||
- **Many-to-many support**: Creates junction tables for bidirectional array relationships
|
||||
- **Configurable ID mapping**: Choose between bigint (default) or UUID for ID fields
|
||||
|
||||
## Supported GraphQL Features
|
||||
|
||||
### Built-in Scalars
|
||||
- `ID` → bigint (default) or uuid (configurable)
|
||||
- `String` → text
|
||||
- `Int` → integer
|
||||
- `Float` → double precision
|
||||
- `Boolean` → boolean
|
||||
|
||||
### Custom Scalars
|
||||
- `DateTime` → timestamp
|
||||
- `JSON` → jsonb
|
||||
- `Date` → date
|
||||
- `Time` → time
|
||||
- `Decimal` → numeric
|
||||
|
||||
Additional custom scalars can be mapped via metadata.
|
||||
|
||||
### Relationships
|
||||
|
||||
Relationships are inferred from field types:
|
||||
|
||||
```graphql
|
||||
type Post {
|
||||
id: ID!
|
||||
title: String!
|
||||
author: User! # Many-to-one (creates authorId FK column, NOT NULL)
|
||||
reviewer: User # Many-to-one nullable (creates reviewerId FK column, NULL)
|
||||
tags: [Tag!]! # One-to-many or many-to-many (depending on reverse)
|
||||
}
|
||||
|
||||
type User {
|
||||
id: ID!
|
||||
posts: [Post!]! # Reverse of Post.author (no FK created)
|
||||
}
|
||||
|
||||
type Tag {
|
||||
id: ID!
|
||||
posts: [Post!]! # Many-to-many with Post (creates PostTag junction table)
|
||||
}
|
||||
```
|
||||
|
||||
**Relationship Detection Rules:**
|
||||
- Single type reference (`user: User`) → Creates FK column (e.g., `userId`)
|
||||
- Array type reference (`posts: [Post!]!`) → One-to-many reverse (no FK on this table)
|
||||
- Bidirectional arrays → Many-to-many (creates junction table)
|
||||
|
||||
### Enums
|
||||
|
||||
```graphql
|
||||
enum Role {
|
||||
ADMIN
|
||||
USER
|
||||
GUEST
|
||||
}
|
||||
|
||||
type User {
|
||||
role: Role!
|
||||
}
|
||||
```
|
||||
|
||||
Enums are preserved in the schema and can be used as column types.
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||
)
|
||||
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
}
|
||||
|
||||
reader := graphql.NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
```
|
||||
|
||||
### With UUID ID Type
|
||||
|
||||
```go
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
Metadata: map[string]interface{}{
|
||||
"idType": "uuid", // Map ID scalar to uuid instead of bigint
|
||||
},
|
||||
}
|
||||
|
||||
reader := graphql.NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
```
|
||||
|
||||
### With Per-Type ID Mapping
|
||||
|
||||
```go
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
Metadata: map[string]interface{}{
|
||||
"typeIdMappings": map[string]string{
|
||||
"User": "uuid", // User.id → uuid
|
||||
"Post": "bigint", // Post.id → bigint
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### With Custom Scalar Mappings
|
||||
|
||||
```go
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "schema.graphql",
|
||||
Metadata: map[string]interface{}{
|
||||
"customScalarMappings": map[string]string{
|
||||
"Upload": "bytea",
|
||||
"Decimal": "numeric(10,2)",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## CLI Usage
|
||||
|
||||
```bash
|
||||
# Convert GraphQL to JSON
|
||||
relspec convert --from graphql --from-path schema.graphql \
|
||||
--to json --to-path schema.json
|
||||
|
||||
# Convert GraphQL to GORM models
|
||||
relspec convert --from graphql --from-path schema.graphql \
|
||||
--to gorm --to-path models/ --package models
|
||||
|
||||
# Convert GraphQL to PostgreSQL SQL
|
||||
relspec convert --from graphql --from-path schema.graphql \
|
||||
--to pgsql --to-path schema.sql
|
||||
```
|
||||
|
||||
## Metadata Options
|
||||
|
||||
| Option | Type | Description | Default |
|
||||
|--------|------|-------------|---------|
|
||||
| `idType` | string | Global ID type mapping ("bigint" or "uuid") | "bigint" |
|
||||
| `typeIdMappings` | map[string]string | Per-type ID mappings | {} |
|
||||
| `customScalarMappings` | map[string]string | Custom scalar to SQL type mappings | {} |
|
||||
| `schemaName` | string | Schema name for all tables | "public" |
|
||||
|
||||
## Limitations
|
||||
|
||||
- Only supports GraphQL SDL (Schema Definition Language), not queries or mutations
|
||||
- Directives are ignored (except for future extensibility)
|
||||
- Interfaces and Unions are not supported
|
||||
- GraphQL's concept of "schema" is different from database schemas; all types go into a single database schema (default: "public")
|
||||
|
||||
## Example
|
||||
|
||||
**Input** (`schema.graphql`):
|
||||
```graphql
|
||||
scalar DateTime
|
||||
|
||||
enum Role {
|
||||
ADMIN
|
||||
USER
|
||||
}
|
||||
|
||||
type User {
|
||||
id: ID!
|
||||
email: String!
|
||||
role: Role!
|
||||
createdAt: DateTime!
|
||||
posts: [Post!]!
|
||||
}
|
||||
|
||||
type Post {
|
||||
id: ID!
|
||||
title: String!
|
||||
content: String
|
||||
published: Boolean!
|
||||
author: User!
|
||||
}
|
||||
```
|
||||
|
||||
**Result**: Database with:
|
||||
- 2 tables: `User` and `Post`
|
||||
- `Post` table has `authorId` foreign key to `User.id`
|
||||
- `Role` enum with values: ADMIN, USER
|
||||
- Custom scalar `DateTime` mapped to `timestamp`
|
||||
275
pkg/readers/graphql/reader.go
Normal file
275
pkg/readers/graphql/reader.go
Normal file
@@ -0,0 +1,275 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for GraphQL reader")
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
return r.parseGraphQL(string(content))
|
||||
}
|
||||
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas found")
|
||||
}
|
||||
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
schema, err := r.ReadSchema()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(schema.Tables) == 0 {
|
||||
return nil, fmt.Errorf("no tables found")
|
||||
}
|
||||
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
type parseContext struct {
|
||||
inType bool
|
||||
inEnum bool
|
||||
currentType string
|
||||
typeLines []string
|
||||
currentEnum string
|
||||
enumLines []string
|
||||
customScalars map[string]bool
|
||||
}
|
||||
|
||||
func (r *Reader) parseGraphQL(content string) (*models.Database, error) {
|
||||
dbName := "database"
|
||||
if r.options.Metadata != nil {
|
||||
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||
dbName = name
|
||||
}
|
||||
}
|
||||
|
||||
db := models.InitDatabase(dbName)
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
ctx := &parseContext{
|
||||
customScalars: make(map[string]bool),
|
||||
}
|
||||
|
||||
// First pass: collect custom scalars and enums
|
||||
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||
scalarRegex := regexp.MustCompile(`^\s*scalar\s+(\w+)`)
|
||||
enumRegex := regexp.MustCompile(`^\s*enum\s+(\w+)\s*\{`)
|
||||
closingBraceRegex := regexp.MustCompile(`^\s*\}`)
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := scalarRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
ctx.customScalars[matches[1]] = true
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
ctx.inEnum = true
|
||||
ctx.currentEnum = matches[1]
|
||||
ctx.enumLines = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
if closingBraceRegex.MatchString(trimmed) && ctx.inEnum {
|
||||
r.parseEnum(ctx.currentEnum, ctx.enumLines, schema)
|
||||
// Add enum name to custom scalars for type detection
|
||||
ctx.customScalars[ctx.currentEnum] = true
|
||||
ctx.inEnum = false
|
||||
ctx.currentEnum = ""
|
||||
ctx.enumLines = nil
|
||||
continue
|
||||
}
|
||||
|
||||
if ctx.inEnum {
|
||||
ctx.enumLines = append(ctx.enumLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
// Second pass: parse types
|
||||
scanner = bufio.NewScanner(strings.NewReader(content))
|
||||
typeRegex := regexp.MustCompile(`^\s*type\s+(\w+)\s*\{`)
|
||||
ctx.inType = false
|
||||
ctx.inEnum = false
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := typeRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
ctx.inType = true
|
||||
ctx.currentType = matches[1]
|
||||
ctx.typeLines = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
if closingBraceRegex.MatchString(trimmed) && ctx.inType {
|
||||
if err := r.parseType(ctx.currentType, ctx.typeLines, schema, ctx); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse type %s: %w", ctx.currentType, err)
|
||||
}
|
||||
ctx.inType = false
|
||||
ctx.currentType = ""
|
||||
ctx.typeLines = nil
|
||||
continue
|
||||
}
|
||||
|
||||
if ctx.inType {
|
||||
ctx.typeLines = append(ctx.typeLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
db.Schemas = []*models.Schema{schema}
|
||||
|
||||
// Third pass: detect and create relationships
|
||||
if err := r.detectAndCreateRelationships(schema, ctx); err != nil {
|
||||
return nil, fmt.Errorf("failed to create relationships: %w", err)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
type fieldInfo struct {
|
||||
name string
|
||||
typeName string
|
||||
isArray bool
|
||||
isNullable bool
|
||||
innerNullable bool
|
||||
}
|
||||
|
||||
func (r *Reader) parseType(typeName string, lines []string, schema *models.Schema, ctx *parseContext) error {
|
||||
table := models.InitTable(typeName, schema.Name)
|
||||
table.Metadata = make(map[string]any)
|
||||
|
||||
// Store field info for relationship detection
|
||||
relationFields := make(map[string]*fieldInfo)
|
||||
|
||||
fieldRegex := regexp.MustCompile(`^\s*(\w+)\s*:\s*(\[)?(\w+)(!)?(\])?(!)?\s*`)
|
||||
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
matches := fieldRegex.FindStringSubmatch(trimmed)
|
||||
if matches == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
fieldName := matches[1]
|
||||
hasOpenBracket := matches[2] == "["
|
||||
baseType := matches[3]
|
||||
innerNonNull := matches[4] == "!"
|
||||
hasCloseBracket := matches[5] == "]"
|
||||
outerNonNull := matches[6] == "!"
|
||||
|
||||
isArray := hasOpenBracket && hasCloseBracket
|
||||
|
||||
// Determine if this is a scalar or a relation
|
||||
if r.isScalarType(baseType, ctx) {
|
||||
// This is a scalar field
|
||||
column := models.InitColumn(fieldName, table.Name, schema.Name)
|
||||
column.Type = r.graphQLTypeToSQL(baseType, fieldName, typeName)
|
||||
|
||||
if isArray {
|
||||
// Array of scalars: use array type
|
||||
column.Type += "[]"
|
||||
column.NotNull = outerNonNull
|
||||
} else {
|
||||
column.NotNull = !isArray && innerNonNull
|
||||
}
|
||||
|
||||
// Check if this is a primary key (convention: field named "id")
|
||||
if fieldName == "id" {
|
||||
column.IsPrimaryKey = true
|
||||
column.AutoIncrement = true
|
||||
}
|
||||
|
||||
table.Columns[fieldName] = column
|
||||
} else {
|
||||
// This is a relation field - store for later processing
|
||||
relationFields[fieldName] = &fieldInfo{
|
||||
name: fieldName,
|
||||
typeName: baseType,
|
||||
isArray: isArray,
|
||||
isNullable: !innerNonNull && !isArray,
|
||||
innerNullable: !innerNonNull && isArray,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store relation fields in table metadata for relationship detection
|
||||
if len(relationFields) > 0 {
|
||||
table.Metadata["relationFields"] = relationFields
|
||||
}
|
||||
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
|
||||
enum := models.InitEnum(enumName, schema.Name)
|
||||
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||
continue
|
||||
}
|
||||
// Enum values are simple identifiers
|
||||
enum.Values = append(enum.Values, trimmed)
|
||||
}
|
||||
|
||||
schema.Enums = append(schema.Enums, enum)
|
||||
}
|
||||
362
pkg/readers/graphql/reader_test.go
Normal file
362
pkg/readers/graphql/reader_test.go
Normal file
@@ -0,0 +1,362 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
t.Fatal("Expected at least one schema")
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
userTable := schema.Tables[0]
|
||||
if userTable.Name != "User" {
|
||||
t.Errorf("Expected table name 'User', got '%s'", userTable.Name)
|
||||
}
|
||||
|
||||
// Verify columns
|
||||
expectedColumns := map[string]struct {
|
||||
sqlType string
|
||||
notNull bool
|
||||
isPK bool
|
||||
}{
|
||||
"id": {"bigint", true, true},
|
||||
"email": {"text", true, false},
|
||||
"name": {"text", false, false},
|
||||
"age": {"integer", false, false},
|
||||
"active": {"boolean", true, false},
|
||||
}
|
||||
|
||||
if len(userTable.Columns) != len(expectedColumns) {
|
||||
t.Fatalf("Expected %d columns, got %d", len(expectedColumns), len(userTable.Columns))
|
||||
}
|
||||
|
||||
for colName, expected := range expectedColumns {
|
||||
col, exists := userTable.Columns[colName]
|
||||
if !exists {
|
||||
t.Errorf("Expected column '%s' not found", colName)
|
||||
continue
|
||||
}
|
||||
|
||||
if col.Type != expected.sqlType {
|
||||
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expected.sqlType, col.Type)
|
||||
}
|
||||
|
||||
if col.NotNull != expected.notNull {
|
||||
t.Errorf("Column '%s': expected NotNull=%v, got %v", colName, expected.notNull, col.NotNull)
|
||||
}
|
||||
|
||||
if col.IsPrimaryKey != expected.isPK {
|
||||
t.Errorf("Column '%s': expected IsPrimaryKey=%v, got %v", colName, expected.isPK, col.IsPrimaryKey)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_WithRelations(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "relations.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
|
||||
if len(schema.Tables) != 2 {
|
||||
t.Fatalf("Expected 2 tables, got %d", len(schema.Tables))
|
||||
}
|
||||
|
||||
// Find Post table (should have FK to User)
|
||||
var postTable *models.Table
|
||||
for _, table := range schema.Tables {
|
||||
if table.Name == "Post" {
|
||||
postTable = table
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if postTable == nil {
|
||||
t.Fatal("Post table not found")
|
||||
}
|
||||
|
||||
// Verify authorId FK column was created
|
||||
authorIdCol, exists := postTable.Columns["authorId"]
|
||||
if !exists {
|
||||
t.Fatal("Expected 'authorId' FK column not found in Post table")
|
||||
}
|
||||
|
||||
if authorIdCol.Type != "bigint" {
|
||||
t.Errorf("Expected authorId type 'bigint', got '%s'", authorIdCol.Type)
|
||||
}
|
||||
|
||||
if !authorIdCol.NotNull {
|
||||
t.Error("Expected authorId to be NOT NULL")
|
||||
}
|
||||
|
||||
// Verify FK constraint
|
||||
fkConstraintFound := false
|
||||
for _, constraint := range postTable.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint {
|
||||
if constraint.ReferencedTable == "User" && len(constraint.Columns) > 0 && constraint.Columns[0] == "authorId" {
|
||||
fkConstraintFound = true
|
||||
if constraint.OnDelete != "CASCADE" {
|
||||
t.Errorf("Expected OnDelete CASCADE, got %s", constraint.OnDelete)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !fkConstraintFound {
|
||||
t.Error("Foreign key constraint from Post to User not found")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_WithEnums(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "enums.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
|
||||
if len(schema.Enums) != 1 {
|
||||
t.Fatalf("Expected 1 enum, got %d", len(schema.Enums))
|
||||
}
|
||||
|
||||
roleEnum := schema.Enums[0]
|
||||
if roleEnum.Name != "Role" {
|
||||
t.Errorf("Expected enum name 'Role', got '%s'", roleEnum.Name)
|
||||
}
|
||||
|
||||
expectedValues := []string{"ADMIN", "USER", "GUEST"}
|
||||
if len(roleEnum.Values) != len(expectedValues) {
|
||||
t.Fatalf("Expected %d enum values, got %d", len(expectedValues), len(roleEnum.Values))
|
||||
}
|
||||
|
||||
for i, expected := range expectedValues {
|
||||
if roleEnum.Values[i] != expected {
|
||||
t.Errorf("Expected enum value '%s' at index %d, got '%s'", expected, i, roleEnum.Values[i])
|
||||
}
|
||||
}
|
||||
|
||||
// Verify role column in User table
|
||||
userTable := schema.Tables[0]
|
||||
roleCol, exists := userTable.Columns["role"]
|
||||
if !exists {
|
||||
t.Fatal("Expected 'role' column not found")
|
||||
}
|
||||
|
||||
if roleCol.Type != "Role" {
|
||||
t.Errorf("Expected role type 'Role', got '%s'", roleCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_CustomScalars(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "custom_scalars.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
userTable := schema.Tables[0]
|
||||
|
||||
// Verify custom scalar mappings
|
||||
expectedTypes := map[string]string{
|
||||
"createdAt": "timestamp",
|
||||
"metadata": "jsonb",
|
||||
"birthDate": "date",
|
||||
}
|
||||
|
||||
for colName, expectedType := range expectedTypes {
|
||||
col, exists := userTable.Columns[colName]
|
||||
if !exists {
|
||||
t.Errorf("Expected column '%s' not found", colName)
|
||||
continue
|
||||
}
|
||||
|
||||
if col.Type != expectedType {
|
||||
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expectedType, col.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_UUIDMetadata(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
Metadata: map[string]interface{}{
|
||||
"idType": "uuid",
|
||||
},
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
userTable := schema.Tables[0]
|
||||
|
||||
idCol, exists := userTable.Columns["id"]
|
||||
if !exists {
|
||||
t.Fatal("Expected 'id' column not found")
|
||||
}
|
||||
|
||||
if idCol.Type != "uuid" {
|
||||
t.Errorf("Expected id type 'uuid' with metadata, got '%s'", idCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "complex.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
|
||||
// Should have 5 tables: User, Profile, Post, Tag, and PostTag (join table)
|
||||
expectedTableCount := 5
|
||||
if len(schema.Tables) != expectedTableCount {
|
||||
t.Fatalf("Expected %d tables, got %d", expectedTableCount, len(schema.Tables))
|
||||
}
|
||||
|
||||
// Verify PostTag join table exists (many-to-many between Post and Tag)
|
||||
var joinTable *models.Table
|
||||
for _, table := range schema.Tables {
|
||||
if table.Name == "PostTag" {
|
||||
joinTable = table
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if joinTable == nil {
|
||||
t.Fatal("Expected PostTag join table not found")
|
||||
}
|
||||
|
||||
// Verify join table has both FK columns
|
||||
if _, exists := joinTable.Columns["postId"]; !exists {
|
||||
t.Error("Expected 'postId' column in PostTag join table")
|
||||
}
|
||||
|
||||
if _, exists := joinTable.Columns["tagId"]; !exists {
|
||||
t.Error("Expected 'tagId' column in PostTag join table")
|
||||
}
|
||||
|
||||
// Verify composite primary key
|
||||
pkFound := false
|
||||
for _, constraint := range joinTable.Constraints {
|
||||
if constraint.Type == models.PrimaryKeyConstraint {
|
||||
if len(constraint.Columns) == 2 {
|
||||
pkFound = true
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !pkFound {
|
||||
t.Error("Expected composite primary key in PostTag join table")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadSchema(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadSchema() error = %v", err)
|
||||
}
|
||||
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Tables) != 1 {
|
||||
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadTable(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
table, err := reader.ReadTable()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadTable() error = %v", err)
|
||||
}
|
||||
|
||||
if table.Name != "User" {
|
||||
t.Errorf("Expected table name 'User', got '%s'", table.Name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_InvalidPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "/nonexistent/path.graphql",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid path, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_EmptyPath(t *testing.T) {
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: "",
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty path, got nil")
|
||||
}
|
||||
}
|
||||
225
pkg/readers/graphql/relationships.go
Normal file
225
pkg/readers/graphql/relationships.go
Normal file
@@ -0,0 +1,225 @@
|
||||
package graphql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
func (r *Reader) detectAndCreateRelationships(schema *models.Schema, ctx *parseContext) error {
|
||||
// Build table lookup map
|
||||
tableMap := make(map[string]*models.Table)
|
||||
for _, table := range schema.Tables {
|
||||
tableMap[table.Name] = table
|
||||
}
|
||||
|
||||
// Process each table's relation fields
|
||||
for _, table := range schema.Tables {
|
||||
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||
if !ok || len(relationFields) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
for fieldName, fieldInfo := range relationFields {
|
||||
targetTable, exists := tableMap[fieldInfo.typeName]
|
||||
if !exists {
|
||||
// Referenced type doesn't exist - might be an interface/union, skip
|
||||
continue
|
||||
}
|
||||
|
||||
if fieldInfo.isArray {
|
||||
// This is a one-to-many or many-to-many reverse side
|
||||
// Check if target table has a reverse array field
|
||||
if r.hasReverseArrayField(targetTable, table.Name) {
|
||||
// Bidirectional array = many-to-many
|
||||
// Only create join table once (lexicographically first table creates it)
|
||||
if table.Name < targetTable.Name {
|
||||
if err := r.createManyToManyJoinTable(schema, table, targetTable, fieldName, tableMap); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
// For one-to-many, no action needed (FK is on the other table)
|
||||
} else {
|
||||
// This is a many-to-one or one-to-one
|
||||
// Create FK column on this table
|
||||
if err := r.createForeignKeyColumn(table, targetTable, fieldName, fieldInfo.isNullable, schema); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up metadata
|
||||
for _, table := range schema.Tables {
|
||||
delete(table.Metadata, "relationFields")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Reader) hasReverseArrayField(table *models.Table, targetTypeName string) bool {
|
||||
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, fieldInfo := range relationFields {
|
||||
if fieldInfo.typeName == targetTypeName && fieldInfo.isArray {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *Reader) createForeignKeyColumn(fromTable, toTable *models.Table, fieldName string, nullable bool, schema *models.Schema) error {
|
||||
// Get primary key from target table
|
||||
pkCol := toTable.GetPrimaryKey()
|
||||
if pkCol == nil {
|
||||
return fmt.Errorf("target table %s has no primary key for relationship", toTable.Name)
|
||||
}
|
||||
|
||||
// Create FK column name: {fieldName}Id
|
||||
fkColName := fieldName + "Id"
|
||||
|
||||
// Check if column already exists (shouldn't happen but be safe)
|
||||
if _, exists := fromTable.Columns[fkColName]; exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create FK column
|
||||
fkCol := models.InitColumn(fkColName, fromTable.Name, schema.Name)
|
||||
fkCol.Type = pkCol.Type
|
||||
fkCol.NotNull = !nullable
|
||||
|
||||
fromTable.Columns[fkColName] = fkCol
|
||||
|
||||
// Create FK constraint
|
||||
constraint := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", fromTable.Name, fieldName),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
constraint.Schema = schema.Name
|
||||
constraint.Table = fromTable.Name
|
||||
constraint.Columns = []string{fkColName}
|
||||
constraint.ReferencedSchema = schema.Name
|
||||
constraint.ReferencedTable = toTable.Name
|
||||
constraint.ReferencedColumns = []string{pkCol.Name}
|
||||
constraint.OnDelete = "CASCADE"
|
||||
constraint.OnUpdate = "RESTRICT"
|
||||
|
||||
fromTable.Constraints[constraint.Name] = constraint
|
||||
|
||||
// Create relationship
|
||||
relationship := models.InitRelationship(
|
||||
fmt.Sprintf("rel_%s_%s", fromTable.Name, fieldName),
|
||||
models.OneToMany,
|
||||
)
|
||||
relationship.FromTable = fromTable.Name
|
||||
relationship.FromSchema = schema.Name
|
||||
relationship.FromColumns = []string{fkColName}
|
||||
relationship.ToTable = toTable.Name
|
||||
relationship.ToSchema = schema.Name
|
||||
relationship.ToColumns = []string{pkCol.Name}
|
||||
relationship.ForeignKey = constraint.Name
|
||||
|
||||
fromTable.Relationships[relationship.Name] = relationship
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Reader) createManyToManyJoinTable(schema *models.Schema, table1, table2 *models.Table, fieldName string, tableMap map[string]*models.Table) error {
|
||||
// Create join table name
|
||||
joinTableName := table1.Name + table2.Name
|
||||
|
||||
// Check if join table already exists
|
||||
if _, exists := tableMap[joinTableName]; exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get primary keys
|
||||
pk1 := table1.GetPrimaryKey()
|
||||
pk2 := table2.GetPrimaryKey()
|
||||
|
||||
if pk1 == nil || pk2 == nil {
|
||||
return fmt.Errorf("cannot create many-to-many: tables must have primary keys")
|
||||
}
|
||||
|
||||
// Create join table
|
||||
joinTable := models.InitTable(joinTableName, schema.Name)
|
||||
|
||||
// Create FK column for table1
|
||||
fkCol1Name := strings.ToLower(table1.Name) + "Id"
|
||||
fkCol1 := models.InitColumn(fkCol1Name, joinTable.Name, schema.Name)
|
||||
fkCol1.Type = pk1.Type
|
||||
fkCol1.NotNull = true
|
||||
joinTable.Columns[fkCol1Name] = fkCol1
|
||||
|
||||
// Create FK column for table2
|
||||
fkCol2Name := strings.ToLower(table2.Name) + "Id"
|
||||
fkCol2 := models.InitColumn(fkCol2Name, joinTable.Name, schema.Name)
|
||||
fkCol2.Type = pk2.Type
|
||||
fkCol2.NotNull = true
|
||||
joinTable.Columns[fkCol2Name] = fkCol2
|
||||
|
||||
// Create composite primary key
|
||||
pkConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("pk_%s", joinTableName),
|
||||
models.PrimaryKeyConstraint,
|
||||
)
|
||||
pkConstraint.Schema = schema.Name
|
||||
pkConstraint.Table = joinTable.Name
|
||||
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||
|
||||
// Create FK constraint to table1
|
||||
fk1 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, table1.Name),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk1.Schema = schema.Name
|
||||
fk1.Table = joinTable.Name
|
||||
fk1.Columns = []string{fkCol1Name}
|
||||
fk1.ReferencedSchema = schema.Name
|
||||
fk1.ReferencedTable = table1.Name
|
||||
fk1.ReferencedColumns = []string{pk1.Name}
|
||||
fk1.OnDelete = "CASCADE"
|
||||
fk1.OnUpdate = "RESTRICT"
|
||||
joinTable.Constraints[fk1.Name] = fk1
|
||||
|
||||
// Create FK constraint to table2
|
||||
fk2 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, table2.Name),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk2.Schema = schema.Name
|
||||
fk2.Table = joinTable.Name
|
||||
fk2.Columns = []string{fkCol2Name}
|
||||
fk2.ReferencedSchema = schema.Name
|
||||
fk2.ReferencedTable = table2.Name
|
||||
fk2.ReferencedColumns = []string{pk2.Name}
|
||||
fk2.OnDelete = "CASCADE"
|
||||
fk2.OnUpdate = "RESTRICT"
|
||||
joinTable.Constraints[fk2.Name] = fk2
|
||||
|
||||
// Create relationships
|
||||
rel1 := models.InitRelationship(
|
||||
fmt.Sprintf("rel_%s_%s_%s", joinTableName, table1.Name, table2.Name),
|
||||
models.ManyToMany,
|
||||
)
|
||||
rel1.FromTable = table1.Name
|
||||
rel1.FromSchema = schema.Name
|
||||
rel1.ToTable = table2.Name
|
||||
rel1.ToSchema = schema.Name
|
||||
rel1.ThroughTable = joinTableName
|
||||
rel1.ThroughSchema = schema.Name
|
||||
joinTable.Relationships[rel1.Name] = rel1
|
||||
|
||||
// Add join table to schema
|
||||
schema.Tables = append(schema.Tables, joinTable)
|
||||
tableMap[joinTableName] = joinTable
|
||||
|
||||
return nil
|
||||
}
|
||||
97
pkg/readers/graphql/type_mapping.go
Normal file
97
pkg/readers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package graphql
|
||||
|
||||
func (r *Reader) isScalarType(typeName string, ctx *parseContext) bool {
|
||||
// Built-in GraphQL scalars
|
||||
builtInScalars := map[string]bool{
|
||||
"ID": true,
|
||||
"String": true,
|
||||
"Int": true,
|
||||
"Float": true,
|
||||
"Boolean": true,
|
||||
}
|
||||
|
||||
if builtInScalars[typeName] {
|
||||
return true
|
||||
}
|
||||
|
||||
// Custom scalars declared in the schema
|
||||
if ctx.customScalars[typeName] {
|
||||
return true
|
||||
}
|
||||
|
||||
// Common custom scalars (even if not declared)
|
||||
commonCustomScalars := map[string]bool{
|
||||
"DateTime": true,
|
||||
"JSON": true,
|
||||
"Date": true,
|
||||
"Time": true,
|
||||
"Upload": true,
|
||||
"Decimal": true,
|
||||
}
|
||||
|
||||
return commonCustomScalars[typeName]
|
||||
}
|
||||
|
||||
func (r *Reader) graphQLTypeToSQL(gqlType string, fieldName string, typeName string) string {
|
||||
// Check for ID type with configurable mapping
|
||||
if gqlType == "ID" {
|
||||
// Check metadata for ID type preference
|
||||
if r.options.Metadata != nil {
|
||||
// Global idType setting
|
||||
if idType, ok := r.options.Metadata["idType"].(string); ok {
|
||||
if idType == "uuid" {
|
||||
return "uuid"
|
||||
}
|
||||
}
|
||||
|
||||
// Per-type ID mapping
|
||||
if typeIdMappings, ok := r.options.Metadata["typeIdMappings"].(map[string]string); ok {
|
||||
if idType, ok := typeIdMappings[typeName]; ok {
|
||||
if idType == "uuid" {
|
||||
return "uuid"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "bigint" // Default
|
||||
}
|
||||
|
||||
// Custom scalar mappings
|
||||
if r.options.Metadata != nil {
|
||||
if customMappings, ok := r.options.Metadata["customScalarMappings"].(map[string]string); ok {
|
||||
if sqlType, ok := customMappings[gqlType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Built-in custom scalar mappings
|
||||
customScalars := map[string]string{
|
||||
"DateTime": "timestamp",
|
||||
"JSON": "jsonb",
|
||||
"Date": "date",
|
||||
"Time": "time",
|
||||
"Decimal": "numeric",
|
||||
"Upload": "bytea",
|
||||
}
|
||||
if sqlType, ok := customScalars[gqlType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
|
||||
// Standard scalar mappings
|
||||
typeMap := map[string]string{
|
||||
"String": "text",
|
||||
"Int": "integer",
|
||||
"Float": "double precision",
|
||||
"Boolean": "boolean",
|
||||
}
|
||||
|
||||
if sqlType, ok := typeMap[gqlType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
|
||||
// If not a known scalar, assume it's an enum or custom type
|
||||
// Return as-is (might be an enum)
|
||||
return gqlType
|
||||
}
|
||||
152
pkg/readers/json/README.md
Normal file
152
pkg/readers/json/README.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# JSON Reader
|
||||
|
||||
Reads database schema definitions from JSON files.
|
||||
|
||||
## Overview
|
||||
|
||||
The JSON Reader parses JSON files that define database schemas in RelSpec's canonical JSON format and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Reads RelSpec's standard JSON schema format
|
||||
- Supports complete schema representation including:
|
||||
- Databases and schemas
|
||||
- Tables, columns, and data types
|
||||
- Constraints (PK, FK, unique, check)
|
||||
- Indexes
|
||||
- Relationships
|
||||
- Views and sequences
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/schema.json",
|
||||
}
|
||||
|
||||
reader := json.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read JSON schema and convert to GORM models
|
||||
relspec --input json --in-file schema.json --output gorm --out-file models.go
|
||||
|
||||
# Convert JSON to PostgreSQL DDL
|
||||
relspec --input json --in-file database.json --output pgsql --out-file schema.sql
|
||||
|
||||
# Transform JSON to YAML
|
||||
relspec --input json --in-file schema.json --output yaml --out-file schema.yaml
|
||||
```
|
||||
|
||||
## Example JSON Schema
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "myapp",
|
||||
"database_type": "postgresql",
|
||||
"schemas": [
|
||||
{
|
||||
"name": "public",
|
||||
"tables": [
|
||||
{
|
||||
"name": "users",
|
||||
"schema": "public",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "bigint",
|
||||
"not_null": true,
|
||||
"is_primary_key": true,
|
||||
"auto_increment": true,
|
||||
"sequence": 1
|
||||
},
|
||||
"username": {
|
||||
"name": "username",
|
||||
"type": "varchar",
|
||||
"length": 50,
|
||||
"not_null": true,
|
||||
"sequence": 2
|
||||
},
|
||||
"email": {
|
||||
"name": "email",
|
||||
"type": "varchar",
|
||||
"length": 100,
|
||||
"not_null": true,
|
||||
"sequence": 3
|
||||
}
|
||||
},
|
||||
"constraints": {
|
||||
"pk_users": {
|
||||
"name": "pk_users",
|
||||
"type": "PRIMARY KEY",
|
||||
"columns": ["id"]
|
||||
},
|
||||
"uq_users_username": {
|
||||
"name": "uq_users_username",
|
||||
"type": "UNIQUE",
|
||||
"columns": ["username"]
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"idx_users_email": {
|
||||
"name": "idx_users_email",
|
||||
"columns": ["email"],
|
||||
"unique": false,
|
||||
"type": "btree"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Schema Structure
|
||||
|
||||
The JSON format follows RelSpec's internal model structure:
|
||||
|
||||
- `Database` - Top-level container
|
||||
- `name` - Database name
|
||||
- `database_type` - Database system (postgresql, mysql, etc.)
|
||||
- `schemas[]` - Array of schemas
|
||||
|
||||
- `Schema` - Schema/namespace
|
||||
- `name` - Schema name
|
||||
- `tables[]` - Array of tables
|
||||
- `views[]` - Array of views
|
||||
- `sequences[]` - Array of sequences
|
||||
|
||||
- `Table` - Table definition
|
||||
- `name` - Table name
|
||||
- `columns{}` - Map of columns
|
||||
- `constraints{}` - Map of constraints
|
||||
- `indexes{}` - Map of indexes
|
||||
- `relationships{}` - Map of relationships
|
||||
|
||||
## Notes
|
||||
|
||||
- This is RelSpec's native interchange format
|
||||
- Preserves complete schema information
|
||||
- Ideal for version control and schema documentation
|
||||
- Can be used as an intermediate format for transformations
|
||||
138
pkg/readers/pgsql/README.md
Normal file
138
pkg/readers/pgsql/README.md
Normal file
@@ -0,0 +1,138 @@
|
||||
# PostgreSQL Reader
|
||||
|
||||
Reads schema information directly from a live PostgreSQL database.
|
||||
|
||||
## Overview
|
||||
|
||||
The PostgreSQL Reader connects to a PostgreSQL database and introspects its schema, extracting complete information about tables, columns, constraints, indexes, views, and sequences.
|
||||
|
||||
## Features
|
||||
|
||||
- Direct database introspection
|
||||
- Extracts complete schema information including:
|
||||
- Tables and columns
|
||||
- Primary keys, foreign keys, unique constraints, check constraints
|
||||
- Indexes
|
||||
- Views
|
||||
- Sequences
|
||||
- Supports multiple schemas
|
||||
- Captures constraint actions (ON DELETE, ON UPDATE)
|
||||
- Derives relationships from foreign keys
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
ConnectionString: "postgres://user:password@localhost:5432/mydb?sslmode=disable",
|
||||
}
|
||||
|
||||
reader := pgsql.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Database: %s\n", db.Name)
|
||||
fmt.Printf("Schemas: %d\n", len(db.Schemas))
|
||||
for _, schema := range db.Schemas {
|
||||
fmt.Printf(" Schema: %s, Tables: %d\n", schema.Name, len(schema.Tables))
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Inspect PostgreSQL database and export to JSON
|
||||
relspec --input pgsql \
|
||||
--conn "postgres://user:password@localhost:5432/mydb" \
|
||||
--output json \
|
||||
--out-file schema.json
|
||||
|
||||
# Generate GORM models from PostgreSQL database
|
||||
relspec --input pgsql \
|
||||
--conn "postgres://user:password@localhost:5432/mydb" \
|
||||
--output gorm \
|
||||
--out-file models.go
|
||||
|
||||
# Export database structure to YAML
|
||||
relspec --input pgsql \
|
||||
--conn "postgres://localhost/mydb?sslmode=disable" \
|
||||
--output yaml \
|
||||
--out-file schema.yaml
|
||||
```
|
||||
|
||||
## Connection String Format
|
||||
|
||||
The reader uses PostgreSQL connection strings in the format:
|
||||
|
||||
```
|
||||
postgres://username:password@hostname:port/database?parameters
|
||||
```
|
||||
|
||||
Examples:
|
||||
```
|
||||
postgres://localhost/mydb
|
||||
postgres://user:pass@localhost:5432/mydb
|
||||
postgres://user@localhost/mydb?sslmode=disable
|
||||
postgres://user:pass@db.example.com:5432/production?sslmode=require
|
||||
```
|
||||
|
||||
## Extracted Information
|
||||
|
||||
### Tables
|
||||
- Table name and schema
|
||||
- Comments/descriptions
|
||||
- All columns with data types, nullable, defaults
|
||||
- Sequences
|
||||
|
||||
### Columns
|
||||
- Column name, data type, length/precision
|
||||
- NULL/NOT NULL constraints
|
||||
- Default values
|
||||
- Auto-increment information
|
||||
- Primary key designation
|
||||
|
||||
### Constraints
|
||||
- Primary keys
|
||||
- Foreign keys (with ON DELETE/UPDATE actions)
|
||||
- Unique constraints
|
||||
- Check constraints
|
||||
|
||||
### Indexes
|
||||
- Index name and type (btree, hash, gist, gin, etc.)
|
||||
- Columns in index
|
||||
- Unique/non-unique
|
||||
- Partial indexes
|
||||
|
||||
### Views
|
||||
- View definitions
|
||||
- Column information
|
||||
|
||||
### Sequences
|
||||
- Sequence properties
|
||||
- Associated tables
|
||||
|
||||
## Notes
|
||||
|
||||
- Requires PostgreSQL connection permissions
|
||||
- Reads all non-system schemas (excludes pg_catalog, information_schema, pg_toast)
|
||||
- Captures PostgreSQL-specific data types
|
||||
- Automatically maps PostgreSQL types to canonical types
|
||||
- Preserves relationship metadata for downstream conversion
|
||||
|
||||
## Requirements
|
||||
|
||||
- Go library: `github.com/jackc/pgx/v5`
|
||||
- Database user must have SELECT permissions on system catalogs
|
||||
103
pkg/readers/prisma/README.md
Normal file
103
pkg/readers/prisma/README.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# Prisma Reader
|
||||
|
||||
Reads Prisma schema files and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The Prisma Reader parses `.prisma` schema files that define database models using Prisma's schema language and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses Prisma schema syntax
|
||||
- Extracts models, fields, and relationships
|
||||
- Supports Prisma attributes and directives
|
||||
- Handles enums and composite types
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/schema.prisma",
|
||||
}
|
||||
|
||||
reader := prisma.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read Prisma schema and convert to JSON
|
||||
relspec --input prisma --in-file schema.prisma --output json --out-file schema.json
|
||||
|
||||
# Convert Prisma to GORM models
|
||||
relspec --input prisma --in-file schema.prisma --output gorm --out-file models.go
|
||||
```
|
||||
|
||||
## Example Prisma Schema
|
||||
|
||||
```prisma
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
}
|
||||
|
||||
model User {
|
||||
id Int @id @default(autoincrement())
|
||||
username String @unique @db.VarChar(50)
|
||||
email String @db.VarChar(100)
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
posts Post[]
|
||||
|
||||
@@map("users")
|
||||
}
|
||||
|
||||
model Post {
|
||||
id Int @id @default(autoincrement())
|
||||
userId Int @map("user_id")
|
||||
title String @db.VarChar(200)
|
||||
content String @db.Text
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@map("posts")
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Prisma Attributes
|
||||
|
||||
- `@id` - Primary key
|
||||
- `@unique` - Unique constraint
|
||||
- `@default` - Default value
|
||||
- `@map` - Column name mapping
|
||||
- `@@map` - Table name mapping
|
||||
- `@relation` - Relationship definition
|
||||
- `@db.*` - Database-specific type annotations
|
||||
|
||||
## Notes
|
||||
|
||||
- Extracts datasource provider information
|
||||
- Supports `@@map` for custom table names
|
||||
- Handles Prisma-specific types and converts them to standard SQL types
|
||||
811
pkg/readers/prisma/reader.go
Normal file
811
pkg/readers/prisma/reader.go
Normal file
@@ -0,0 +1,811 @@
|
||||
package prisma
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
// Reader implements the readers.Reader interface for Prisma schema format
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
// NewReader creates a new Prisma reader with the given options
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadDatabase reads and parses Prisma schema input, returning a Database model
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for Prisma reader")
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
return r.parsePrisma(string(content))
|
||||
}
|
||||
|
||||
// ReadSchema reads and parses Prisma schema input, returning a Schema model
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas found in Prisma schema")
|
||||
}
|
||||
|
||||
// Return the first schema
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
// ReadTable reads and parses Prisma schema input, returning a Table model
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
schema, err := r.ReadSchema()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(schema.Tables) == 0 {
|
||||
return nil, fmt.Errorf("no tables found in Prisma schema")
|
||||
}
|
||||
|
||||
// Return the first table
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
// parsePrisma parses Prisma schema content and returns a Database model
|
||||
func (r *Reader) parsePrisma(content string) (*models.Database, error) {
|
||||
db := models.InitDatabase("database")
|
||||
|
||||
if r.options.Metadata != nil {
|
||||
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||
db.Name = name
|
||||
}
|
||||
}
|
||||
|
||||
// Default schema for Prisma (doesn't have explicit schema concept in most cases)
|
||||
schema := models.InitSchema("public")
|
||||
schema.Enums = make([]*models.Enum, 0)
|
||||
|
||||
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||
|
||||
// State tracking
|
||||
var currentBlock string // "datasource", "generator", "model", "enum"
|
||||
var currentTable *models.Table
|
||||
var currentEnum *models.Enum
|
||||
var blockContent []string
|
||||
|
||||
// Regex patterns
|
||||
datasourceRegex := regexp.MustCompile(`^datasource\s+\w+\s*{`)
|
||||
generatorRegex := regexp.MustCompile(`^generator\s+\w+\s*{`)
|
||||
modelRegex := regexp.MustCompile(`^model\s+(\w+)\s*{`)
|
||||
enumRegex := regexp.MustCompile(`^enum\s+(\w+)\s*{`)
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
// Skip empty lines and comments
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for block start
|
||||
if matches := datasourceRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
currentBlock = "datasource"
|
||||
blockContent = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := generatorRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
currentBlock = "generator"
|
||||
blockContent = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := modelRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
currentBlock = "model"
|
||||
tableName := matches[1]
|
||||
currentTable = models.InitTable(tableName, "public")
|
||||
blockContent = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
currentBlock = "enum"
|
||||
enumName := matches[1]
|
||||
currentEnum = models.InitEnum(enumName, "public")
|
||||
blockContent = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for block end
|
||||
if trimmed == "}" {
|
||||
switch currentBlock {
|
||||
case "datasource":
|
||||
r.parseDatasource(blockContent, db)
|
||||
case "generator":
|
||||
// We don't need to do anything with generator blocks
|
||||
case "model":
|
||||
if currentTable != nil {
|
||||
r.parseModelFields(blockContent, currentTable)
|
||||
schema.Tables = append(schema.Tables, currentTable)
|
||||
currentTable = nil
|
||||
}
|
||||
case "enum":
|
||||
if currentEnum != nil {
|
||||
schema.Enums = append(schema.Enums, currentEnum)
|
||||
currentEnum = nil
|
||||
}
|
||||
}
|
||||
currentBlock = ""
|
||||
blockContent = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
// Accumulate block content
|
||||
if currentBlock != "" {
|
||||
if currentBlock == "enum" && currentEnum != nil {
|
||||
// For enums, just add the trimmed value
|
||||
if trimmed != "" {
|
||||
currentEnum.Values = append(currentEnum.Values, trimmed)
|
||||
}
|
||||
} else {
|
||||
blockContent = append(blockContent, line)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: resolve relationships
|
||||
r.resolveRelationships(schema)
|
||||
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// parseDatasource extracts database type from datasource block
|
||||
func (r *Reader) parseDatasource(lines []string, db *models.Database) {
|
||||
providerRegex := regexp.MustCompile(`provider\s*=\s*"?(\w+)"?`)
|
||||
|
||||
for _, line := range lines {
|
||||
if matches := providerRegex.FindStringSubmatch(line); matches != nil {
|
||||
provider := matches[1]
|
||||
switch provider {
|
||||
case "postgresql", "postgres":
|
||||
db.DatabaseType = models.PostgresqlDatabaseType
|
||||
case "mysql":
|
||||
db.DatabaseType = "mysql"
|
||||
case "sqlite":
|
||||
db.DatabaseType = models.SqlLiteDatabaseType
|
||||
case "sqlserver":
|
||||
db.DatabaseType = models.MSSQLDatabaseType
|
||||
default:
|
||||
db.DatabaseType = models.PostgresqlDatabaseType
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseModelFields parses model field definitions
|
||||
func (r *Reader) parseModelFields(lines []string, table *models.Table) {
|
||||
fieldRegex := regexp.MustCompile(`^(\w+)\s+(\w+)(\?|\[\])?\s*(@.+)?`)
|
||||
blockAttrRegex := regexp.MustCompile(`^@@(\w+)\((.*?)\)`)
|
||||
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
// Skip empty lines and comments
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for block attributes (@@id, @@unique, @@index)
|
||||
if matches := blockAttrRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
attrName := matches[1]
|
||||
attrContent := matches[2]
|
||||
r.parseBlockAttribute(attrName, attrContent, table)
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse field definition
|
||||
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
fieldName := matches[1]
|
||||
fieldType := matches[2]
|
||||
modifier := matches[3] // ? or []
|
||||
attributes := matches[4] // @... part
|
||||
|
||||
column := r.parseField(fieldName, fieldType, modifier, attributes, table)
|
||||
if column != nil {
|
||||
table.Columns[column.Name] = column
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseField parses a single field definition
|
||||
func (r *Reader) parseField(name, fieldType, modifier, attributes string, table *models.Table) *models.Column {
|
||||
// Check if this is a relation field (array or references another model)
|
||||
if modifier == "[]" {
|
||||
// Array field - this is a relation field, not a column
|
||||
// We'll handle this in relationship resolution
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check if this is a non-primitive type (relation field)
|
||||
// Note: We need to allow enum types through as they're like primitives
|
||||
if !r.isPrimitiveType(fieldType) && !r.isEnumType(fieldType, table) {
|
||||
// This is a relation field (e.g., user User), not a scalar column
|
||||
// Only process this if it has @relation attribute (which means it's the owning side with FK)
|
||||
// Otherwise skip it as it's just the inverse relation field
|
||||
if attributes == "" || !strings.Contains(attributes, "@relation") {
|
||||
return nil
|
||||
}
|
||||
// If it has @relation, we still don't create a column for it
|
||||
// The actual FK column will be in the fields: [...] part of @relation
|
||||
return nil
|
||||
}
|
||||
|
||||
column := models.InitColumn(name, table.Name, table.Schema)
|
||||
|
||||
// Map Prisma type to SQL type
|
||||
column.Type = r.prismaTypeToSQL(fieldType)
|
||||
|
||||
// Handle modifiers
|
||||
if modifier == "?" {
|
||||
column.NotNull = false
|
||||
} else {
|
||||
// Default: required fields are NOT NULL
|
||||
column.NotNull = true
|
||||
}
|
||||
|
||||
// Parse field attributes
|
||||
if attributes != "" {
|
||||
r.parseFieldAttributes(attributes, column, table)
|
||||
}
|
||||
|
||||
return column
|
||||
}
|
||||
|
||||
// prismaTypeToSQL converts Prisma types to SQL types
|
||||
func (r *Reader) prismaTypeToSQL(prismaType string) string {
|
||||
typeMap := map[string]string{
|
||||
"String": "text",
|
||||
"Boolean": "boolean",
|
||||
"Int": "integer",
|
||||
"BigInt": "bigint",
|
||||
"Float": "double precision",
|
||||
"Decimal": "decimal",
|
||||
"DateTime": "timestamp",
|
||||
"Json": "jsonb",
|
||||
"Bytes": "bytea",
|
||||
}
|
||||
|
||||
if sqlType, ok := typeMap[prismaType]; ok {
|
||||
return sqlType
|
||||
}
|
||||
|
||||
// If not a built-in type, it might be an enum or model reference
|
||||
// For enums, we'll use the enum name directly
|
||||
return prismaType
|
||||
}
|
||||
|
||||
// parseFieldAttributes parses field attributes like @id, @unique, @default
|
||||
func (r *Reader) parseFieldAttributes(attributes string, column *models.Column, table *models.Table) {
|
||||
// @id attribute
|
||||
if strings.Contains(attributes, "@id") {
|
||||
column.IsPrimaryKey = true
|
||||
column.NotNull = true
|
||||
}
|
||||
|
||||
// @unique attribute
|
||||
if regexp.MustCompile(`@unique\b`).MatchString(attributes) {
|
||||
uniqueConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("uq_%s", column.Name),
|
||||
models.UniqueConstraint,
|
||||
)
|
||||
uniqueConstraint.Schema = table.Schema
|
||||
uniqueConstraint.Table = table.Name
|
||||
uniqueConstraint.Columns = []string{column.Name}
|
||||
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||
}
|
||||
|
||||
// @default attribute - extract value with balanced parentheses
|
||||
if strings.Contains(attributes, "@default(") {
|
||||
defaultValue := r.extractDefaultValue(attributes)
|
||||
if defaultValue != "" {
|
||||
r.parseDefaultValue(defaultValue, column)
|
||||
}
|
||||
}
|
||||
|
||||
// @updatedAt attribute - store in comment for now
|
||||
if strings.Contains(attributes, "@updatedAt") {
|
||||
if column.Comment != "" {
|
||||
column.Comment += "; @updatedAt"
|
||||
} else {
|
||||
column.Comment = "@updatedAt"
|
||||
}
|
||||
}
|
||||
|
||||
// @relation attribute - we'll handle this in relationship resolution
|
||||
// For now, just note that this field is part of a relation
|
||||
}
|
||||
|
||||
// extractDefaultValue extracts the default value from @default(...) handling nested parentheses
|
||||
func (r *Reader) extractDefaultValue(attributes string) string {
|
||||
idx := strings.Index(attributes, "@default(")
|
||||
if idx == -1 {
|
||||
return ""
|
||||
}
|
||||
|
||||
start := idx + len("@default(")
|
||||
depth := 1
|
||||
i := start
|
||||
|
||||
for i < len(attributes) && depth > 0 {
|
||||
switch attributes[i] {
|
||||
case '(':
|
||||
depth++
|
||||
case ')':
|
||||
depth--
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
if depth == 0 {
|
||||
return attributes[start : i-1]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// parseDefaultValue parses Prisma default value expressions
|
||||
func (r *Reader) parseDefaultValue(defaultExpr string, column *models.Column) {
|
||||
defaultExpr = strings.TrimSpace(defaultExpr)
|
||||
|
||||
switch defaultExpr {
|
||||
case "autoincrement()":
|
||||
column.AutoIncrement = true
|
||||
case "now()":
|
||||
column.Default = "now()"
|
||||
case "uuid()":
|
||||
column.Default = "gen_random_uuid()"
|
||||
case "cuid()":
|
||||
// CUID is Prisma-specific, store in comment
|
||||
if column.Comment != "" {
|
||||
column.Comment += "; default(cuid())"
|
||||
} else {
|
||||
column.Comment = "default(cuid())"
|
||||
}
|
||||
case "true":
|
||||
column.Default = true
|
||||
case "false":
|
||||
column.Default = false
|
||||
default:
|
||||
// Check if it's a string literal
|
||||
if strings.HasPrefix(defaultExpr, "\"") && strings.HasSuffix(defaultExpr, "\"") {
|
||||
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||
} else if strings.HasPrefix(defaultExpr, "'") && strings.HasSuffix(defaultExpr, "'") {
|
||||
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||
} else {
|
||||
// Try to parse as number or enum value
|
||||
column.Default = defaultExpr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseBlockAttribute parses block-level attributes like @@id, @@unique, @@index
|
||||
func (r *Reader) parseBlockAttribute(attrName, content string, table *models.Table) {
|
||||
// Extract column list from brackets [col1, col2]
|
||||
colListRegex := regexp.MustCompile(`\[(.*?)\]`)
|
||||
matches := colListRegex.FindStringSubmatch(content)
|
||||
if matches == nil {
|
||||
return
|
||||
}
|
||||
|
||||
columnList := strings.Split(matches[1], ",")
|
||||
columns := make([]string, 0)
|
||||
for _, col := range columnList {
|
||||
columns = append(columns, strings.TrimSpace(col))
|
||||
}
|
||||
|
||||
switch attrName {
|
||||
case "id":
|
||||
// Composite primary key
|
||||
for _, colName := range columns {
|
||||
if col, exists := table.Columns[colName]; exists {
|
||||
col.IsPrimaryKey = true
|
||||
col.NotNull = true
|
||||
}
|
||||
}
|
||||
// Also create a PK constraint
|
||||
pkConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("pk_%s", table.Name),
|
||||
models.PrimaryKeyConstraint,
|
||||
)
|
||||
pkConstraint.Schema = table.Schema
|
||||
pkConstraint.Table = table.Name
|
||||
pkConstraint.Columns = columns
|
||||
table.Constraints[pkConstraint.Name] = pkConstraint
|
||||
|
||||
case "unique":
|
||||
// Multi-column unique constraint
|
||||
uniqueConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("uq_%s_%s", table.Name, strings.Join(columns, "_")),
|
||||
models.UniqueConstraint,
|
||||
)
|
||||
uniqueConstraint.Schema = table.Schema
|
||||
uniqueConstraint.Table = table.Name
|
||||
uniqueConstraint.Columns = columns
|
||||
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||
|
||||
case "index":
|
||||
// Index
|
||||
index := models.InitIndex(
|
||||
fmt.Sprintf("idx_%s_%s", table.Name, strings.Join(columns, "_")),
|
||||
table.Name,
|
||||
table.Schema,
|
||||
)
|
||||
index.Columns = columns
|
||||
table.Indexes[index.Name] = index
|
||||
}
|
||||
}
|
||||
|
||||
// relationField stores information about a relation field for second-pass processing
|
||||
type relationField struct {
|
||||
tableName string
|
||||
fieldName string
|
||||
relatedModel string
|
||||
isArray bool
|
||||
relationAttr string
|
||||
}
|
||||
|
||||
// resolveRelationships performs a second pass to resolve @relation attributes
|
||||
func (r *Reader) resolveRelationships(schema *models.Schema) {
|
||||
// Build a map of table names for quick lookup
|
||||
tableMap := make(map[string]*models.Table)
|
||||
for _, table := range schema.Tables {
|
||||
tableMap[table.Name] = table
|
||||
}
|
||||
|
||||
// First, we need to re-parse to find relation fields
|
||||
// We'll re-read the file to extract relation information
|
||||
if r.options.FilePath == "" {
|
||||
return
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(r.options.FilePath)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
relations := r.extractRelationFields(string(content))
|
||||
|
||||
// Process explicit @relation attributes to create FK constraints
|
||||
for _, rel := range relations {
|
||||
if rel.relationAttr != "" {
|
||||
r.createConstraintFromRelation(rel, tableMap, schema)
|
||||
}
|
||||
}
|
||||
|
||||
// Detect implicit many-to-many relationships
|
||||
r.detectImplicitManyToMany(relations, tableMap, schema)
|
||||
}
|
||||
|
||||
// extractRelationFields extracts relation field information from the schema
|
||||
func (r *Reader) extractRelationFields(content string) []relationField {
|
||||
relations := make([]relationField, 0)
|
||||
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||
|
||||
modelRegex := regexp.MustCompile(`^model\s+(\w+)\s*{`)
|
||||
fieldRegex := regexp.MustCompile(`^(\w+)\s+(\w+)(\?|\[\])?\s*(@.+)?`)
|
||||
|
||||
var currentModel string
|
||||
inModel := false
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||
continue
|
||||
}
|
||||
|
||||
if matches := modelRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
currentModel = matches[1]
|
||||
inModel = true
|
||||
continue
|
||||
}
|
||||
|
||||
if trimmed == "}" {
|
||||
inModel = false
|
||||
currentModel = ""
|
||||
continue
|
||||
}
|
||||
|
||||
if inModel && currentModel != "" {
|
||||
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
fieldName := matches[1]
|
||||
fieldType := matches[2]
|
||||
modifier := matches[3]
|
||||
attributes := matches[4]
|
||||
|
||||
// Check if this is a relation field (references another model or is an array)
|
||||
isPotentialRelation := modifier == "[]" || !r.isPrimitiveType(fieldType)
|
||||
|
||||
if isPotentialRelation {
|
||||
rel := relationField{
|
||||
tableName: currentModel,
|
||||
fieldName: fieldName,
|
||||
relatedModel: fieldType,
|
||||
isArray: modifier == "[]",
|
||||
relationAttr: attributes,
|
||||
}
|
||||
relations = append(relations, rel)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return relations
|
||||
}
|
||||
|
||||
// isPrimitiveType checks if a type is a Prisma primitive type
|
||||
func (r *Reader) isPrimitiveType(typeName string) bool {
|
||||
primitives := []string{"String", "Boolean", "Int", "BigInt", "Float", "Decimal", "DateTime", "Json", "Bytes"}
|
||||
for _, p := range primitives {
|
||||
if typeName == p {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isEnumType checks if a type name might be an enum
|
||||
// Note: We can't definitively check against schema.Enums at parse time
|
||||
// because enums might be defined after the model, so we just check
|
||||
// if it starts with uppercase (Prisma convention for enums)
|
||||
func (r *Reader) isEnumType(typeName string, table *models.Table) bool {
|
||||
// Simple heuristic: enum types start with uppercase letter
|
||||
// and are not known model names (though we can't check that yet)
|
||||
if len(typeName) > 0 && typeName[0] >= 'A' && typeName[0] <= 'Z' {
|
||||
// Additional check: primitive types are already handled above
|
||||
// So if it's uppercase and not primitive, it's likely an enum or model
|
||||
// We'll assume it's an enum if it's a single word
|
||||
return !strings.Contains(typeName, "_")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// createConstraintFromRelation creates a FK constraint from a @relation attribute
|
||||
func (r *Reader) createConstraintFromRelation(rel relationField, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||
// Skip array fields (they are the inverse side of the relation)
|
||||
if rel.isArray {
|
||||
return
|
||||
}
|
||||
|
||||
if rel.relationAttr == "" {
|
||||
return
|
||||
}
|
||||
|
||||
// Parse @relation attribute
|
||||
relationRegex := regexp.MustCompile(`@relation\((.*?)\)`)
|
||||
matches := relationRegex.FindStringSubmatch(rel.relationAttr)
|
||||
if matches == nil {
|
||||
return
|
||||
}
|
||||
|
||||
relationContent := matches[1]
|
||||
|
||||
// Extract fields and references
|
||||
fieldsRegex := regexp.MustCompile(`fields:\s*\[(.*?)\]`)
|
||||
referencesRegex := regexp.MustCompile(`references:\s*\[(.*?)\]`)
|
||||
nameRegex := regexp.MustCompile(`name:\s*"([^"]+)"`)
|
||||
onDeleteRegex := regexp.MustCompile(`onDelete:\s*(\w+)`)
|
||||
onUpdateRegex := regexp.MustCompile(`onUpdate:\s*(\w+)`)
|
||||
|
||||
fieldsMatch := fieldsRegex.FindStringSubmatch(relationContent)
|
||||
referencesMatch := referencesRegex.FindStringSubmatch(relationContent)
|
||||
|
||||
if fieldsMatch == nil || referencesMatch == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Parse field and reference column lists
|
||||
fieldCols := r.parseColumnList(fieldsMatch[1])
|
||||
refCols := r.parseColumnList(referencesMatch[1])
|
||||
|
||||
if len(fieldCols) == 0 || len(refCols) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Create FK constraint
|
||||
constraintName := fmt.Sprintf("fk_%s_%s", rel.tableName, fieldCols[0])
|
||||
|
||||
// Check for custom name
|
||||
if nameMatch := nameRegex.FindStringSubmatch(relationContent); nameMatch != nil {
|
||||
constraintName = nameMatch[1]
|
||||
}
|
||||
|
||||
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||
constraint.Schema = "public"
|
||||
constraint.Table = rel.tableName
|
||||
constraint.Columns = fieldCols
|
||||
constraint.ReferencedSchema = "public"
|
||||
constraint.ReferencedTable = rel.relatedModel
|
||||
constraint.ReferencedColumns = refCols
|
||||
|
||||
// Parse referential actions
|
||||
if onDeleteMatch := onDeleteRegex.FindStringSubmatch(relationContent); onDeleteMatch != nil {
|
||||
constraint.OnDelete = onDeleteMatch[1]
|
||||
}
|
||||
|
||||
if onUpdateMatch := onUpdateRegex.FindStringSubmatch(relationContent); onUpdateMatch != nil {
|
||||
constraint.OnUpdate = onUpdateMatch[1]
|
||||
}
|
||||
|
||||
// Add constraint to table
|
||||
if table, exists := tableMap[rel.tableName]; exists {
|
||||
table.Constraints[constraint.Name] = constraint
|
||||
}
|
||||
}
|
||||
|
||||
// parseColumnList parses a comma-separated list of column names
|
||||
func (r *Reader) parseColumnList(list string) []string {
|
||||
parts := strings.Split(list, ",")
|
||||
result := make([]string, 0)
|
||||
for _, part := range parts {
|
||||
trimmed := strings.TrimSpace(part)
|
||||
if trimmed != "" {
|
||||
result = append(result, trimmed)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// detectImplicitManyToMany detects implicit M2M relationships and creates join tables
|
||||
func (r *Reader) detectImplicitManyToMany(relations []relationField, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||
// Group relations by model pairs
|
||||
type modelPair struct {
|
||||
model1 string
|
||||
model2 string
|
||||
}
|
||||
|
||||
pairMap := make(map[modelPair][]relationField)
|
||||
|
||||
for _, rel := range relations {
|
||||
if !rel.isArray || rel.relationAttr != "" {
|
||||
// Skip non-array fields and explicit relations
|
||||
continue
|
||||
}
|
||||
|
||||
// Create a normalized pair (alphabetically sorted to avoid duplicates)
|
||||
pair := modelPair{}
|
||||
if rel.tableName < rel.relatedModel {
|
||||
pair.model1 = rel.tableName
|
||||
pair.model2 = rel.relatedModel
|
||||
} else {
|
||||
pair.model1 = rel.relatedModel
|
||||
pair.model2 = rel.tableName
|
||||
}
|
||||
|
||||
pairMap[pair] = append(pairMap[pair], rel)
|
||||
}
|
||||
|
||||
// Check for pairs with arrays on both sides (implicit M2M)
|
||||
for pair, rels := range pairMap {
|
||||
if len(rels) >= 2 {
|
||||
// This is an implicit many-to-many relationship
|
||||
r.createImplicitJoinTable(pair.model1, pair.model2, tableMap, schema)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// createImplicitJoinTable creates a virtual join table for implicit M2M relations
|
||||
func (r *Reader) createImplicitJoinTable(model1, model2 string, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||
// Prisma naming convention: _Model1ToModel2 (alphabetically sorted)
|
||||
joinTableName := fmt.Sprintf("_%sTo%s", model1, model2)
|
||||
|
||||
// Check if join table already exists
|
||||
if _, exists := tableMap[joinTableName]; exists {
|
||||
return
|
||||
}
|
||||
|
||||
// Create join table
|
||||
joinTable := models.InitTable(joinTableName, "public")
|
||||
|
||||
// Get primary keys from both tables
|
||||
pk1 := r.getPrimaryKeyColumn(tableMap[model1])
|
||||
pk2 := r.getPrimaryKeyColumn(tableMap[model2])
|
||||
|
||||
if pk1 == nil || pk2 == nil {
|
||||
return // Can't create join table without PKs
|
||||
}
|
||||
|
||||
// Create FK columns in join table
|
||||
fkCol1Name := fmt.Sprintf("%sId", model1)
|
||||
fkCol1 := models.InitColumn(fkCol1Name, joinTableName, "public")
|
||||
fkCol1.Type = pk1.Type
|
||||
fkCol1.NotNull = true
|
||||
joinTable.Columns[fkCol1Name] = fkCol1
|
||||
|
||||
fkCol2Name := fmt.Sprintf("%sId", model2)
|
||||
fkCol2 := models.InitColumn(fkCol2Name, joinTableName, "public")
|
||||
fkCol2.Type = pk2.Type
|
||||
fkCol2.NotNull = true
|
||||
joinTable.Columns[fkCol2Name] = fkCol2
|
||||
|
||||
// Create composite primary key
|
||||
pkConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("pk_%s", joinTableName),
|
||||
models.PrimaryKeyConstraint,
|
||||
)
|
||||
pkConstraint.Schema = "public"
|
||||
pkConstraint.Table = joinTableName
|
||||
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||
|
||||
// Mark columns as PK
|
||||
fkCol1.IsPrimaryKey = true
|
||||
fkCol2.IsPrimaryKey = true
|
||||
|
||||
// Create FK constraints
|
||||
fk1 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, model1),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk1.Schema = "public"
|
||||
fk1.Table = joinTableName
|
||||
fk1.Columns = []string{fkCol1Name}
|
||||
fk1.ReferencedSchema = "public"
|
||||
fk1.ReferencedTable = model1
|
||||
fk1.ReferencedColumns = []string{pk1.Name}
|
||||
fk1.OnDelete = "Cascade"
|
||||
joinTable.Constraints[fk1.Name] = fk1
|
||||
|
||||
fk2 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, model2),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk2.Schema = "public"
|
||||
fk2.Table = joinTableName
|
||||
fk2.Columns = []string{fkCol2Name}
|
||||
fk2.ReferencedSchema = "public"
|
||||
fk2.ReferencedTable = model2
|
||||
fk2.ReferencedColumns = []string{pk2.Name}
|
||||
fk2.OnDelete = "Cascade"
|
||||
joinTable.Constraints[fk2.Name] = fk2
|
||||
|
||||
// Add join table to schema
|
||||
schema.Tables = append(schema.Tables, joinTable)
|
||||
tableMap[joinTableName] = joinTable
|
||||
}
|
||||
|
||||
// getPrimaryKeyColumn returns the primary key column of a table
|
||||
func (r *Reader) getPrimaryKeyColumn(table *models.Table) *models.Column {
|
||||
if table == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, col := range table.Columns {
|
||||
if col.IsPrimaryKey {
|
||||
return col
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
160
pkg/readers/sqldir/README.md
Normal file
160
pkg/readers/sqldir/README.md
Normal file
@@ -0,0 +1,160 @@
|
||||
# SQL Directory Reader
|
||||
|
||||
The SQL Directory Reader (`sqldir`) reads SQL scripts from a directory structure and populates the `Scripts` field of a `Schema`. It supports recursive directory scanning and extracts priority, sequence, and name information from filenames.
|
||||
|
||||
## File Naming Convention
|
||||
|
||||
Scripts must follow this naming pattern (supports both underscores and hyphens as separators):
|
||||
|
||||
```
|
||||
{priority}_{sequence}_{name}.{sql|pgsql}
|
||||
{priority}-{sequence}-{name}.{sql|pgsql}
|
||||
```
|
||||
|
||||
### Components
|
||||
|
||||
- **priority**: Integer (0-9999) - Defines execution order (lower executes first)
|
||||
- **sequence**: Integer (0-9999) - Defines order within the same priority level
|
||||
- **separator**: Underscore `_` or hyphen `-` (can be mixed)
|
||||
- **name**: Descriptive name (alphanumeric, underscores, hyphens allowed)
|
||||
- **extension**: `.sql` or `.pgsql`
|
||||
|
||||
### Examples
|
||||
|
||||
```
|
||||
migrations/
|
||||
├── 1_001_create_schema.sql # Priority 1, Sequence 1 (underscore format)
|
||||
├── 1-002-create-users-table.sql # Priority 1, Sequence 2 (hyphen format)
|
||||
├── 1_003_create_posts_table.pgsql # Priority 1, Sequence 3 (underscore format)
|
||||
├── 2-001-add-indexes.sql # Priority 2, Sequence 1 (hyphen format)
|
||||
├── 2_002_add_constraints.sql # Priority 2, Sequence 2 (underscore format)
|
||||
├── 10-10-create-newid.pgsql # Priority 10, Sequence 10 (hyphen format)
|
||||
└── subdirectory/
|
||||
└── 3_001_seed_data.sql # Priority 3, Sequence 1 (subdirs supported)
|
||||
```
|
||||
|
||||
**Execution Order**: 1→2→3→4→5→6→7 (sorted by Priority ascending, then Sequence ascending)
|
||||
|
||||
**Both formats can be mixed** in the same directory - the reader handles both seamlessly.
|
||||
|
||||
### Invalid Filenames (Ignored)
|
||||
|
||||
- `migration.sql` - Missing priority/sequence
|
||||
- `1_create_users.sql` - Missing sequence
|
||||
- `create_users.sql` - Missing priority/sequence
|
||||
- `1_001_test.txt` - Wrong extension
|
||||
- `readme.md` - Not a SQL file
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```go
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
)
|
||||
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/path/to/migrations",
|
||||
Metadata: map[string]any{
|
||||
"schema_name": "public", // Optional, defaults to "public"
|
||||
"database_name": "myapp", // Optional, defaults to "database"
|
||||
},
|
||||
})
|
||||
|
||||
// Read all scripts
|
||||
database, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Access scripts
|
||||
for _, schema := range database.Schemas {
|
||||
for _, script := range schema.Scripts {
|
||||
fmt.Printf("Script: %s (P:%d S:%d)\n",
|
||||
script.Name, script.Priority, script.Sequence)
|
||||
fmt.Printf("SQL: %s\n", script.SQL)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Read Schema Only
|
||||
|
||||
```go
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d scripts\n", len(schema.Scripts))
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- **Recursive Directory Scanning**: Automatically scans all subdirectories
|
||||
- **Multiple Extensions**: Supports both `.sql` and `.pgsql` files
|
||||
- **Flexible Naming**: Extract metadata from filename patterns
|
||||
- **Error Handling**: Validates directory existence and file accessibility
|
||||
- **Schema Integration**: Scripts are added to the standard RelSpec `Schema` model
|
||||
|
||||
## Script Model
|
||||
|
||||
Each script is stored as a `models.Script`:
|
||||
|
||||
```go
|
||||
type Script struct {
|
||||
Name string // Extracted from filename (e.g., "create_users")
|
||||
Description string // Auto-generated description with file path
|
||||
SQL string // Complete SQL content from file
|
||||
Priority int // Execution priority from filename
|
||||
Sequence uint // Execution sequence from filename
|
||||
// ... other fields available but not populated by this reader
|
||||
}
|
||||
```
|
||||
|
||||
## Integration with SQL Executor
|
||||
|
||||
The SQL Directory Reader is designed to work seamlessly with the SQL Executor Writer:
|
||||
|
||||
```go
|
||||
// Read scripts
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "./migrations",
|
||||
})
|
||||
db, _ := reader.ReadDatabase()
|
||||
|
||||
// Execute scripts
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/mydb",
|
||||
},
|
||||
})
|
||||
writer.WriteDatabase(db) // Executes in Priority→Sequence order
|
||||
```
|
||||
|
||||
See `pkg/writers/sqlexec/README.md` for more details on script execution.
|
||||
|
||||
## Error Handling
|
||||
|
||||
The reader will return errors for:
|
||||
- Non-existent directory paths
|
||||
- Inaccessible directories or files
|
||||
- Invalid file permissions
|
||||
- File read failures
|
||||
|
||||
Files that don't match the naming pattern are silently ignored (not treated as errors).
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests:
|
||||
```bash
|
||||
go test ./pkg/readers/sqldir/
|
||||
```
|
||||
|
||||
Tests include:
|
||||
- Valid file parsing
|
||||
- Recursive directory scanning
|
||||
- Invalid filename handling
|
||||
- Empty directory handling
|
||||
- Error conditions
|
||||
127
pkg/readers/sqldir/example_test.go
Normal file
127
pkg/readers/sqldir/example_test.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package sqldir_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||
)
|
||||
|
||||
// Example demonstrates how to read SQL scripts from a directory and execute them
|
||||
func Example() {
|
||||
// Step 1: Read SQL scripts from a directory
|
||||
// Directory structure example:
|
||||
// migrations/
|
||||
// 1_001_create_schema.sql
|
||||
// 1_002_create_users_table.sql
|
||||
// 1_003_create_posts_table.pgsql
|
||||
// 2_001_add_indexes.sql
|
||||
// 2_002_seed_data.sql
|
||||
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/path/to/migrations",
|
||||
Metadata: map[string]any{
|
||||
"schema_name": "public",
|
||||
"database_name": "myapp",
|
||||
},
|
||||
})
|
||||
|
||||
// Read the database schema with scripts
|
||||
database, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to read scripts: %v", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Read %d schemas\n", len(database.Schemas))
|
||||
fmt.Printf("Found %d scripts in schema '%s'\n",
|
||||
len(database.Schemas[0].Scripts),
|
||||
database.Schemas[0].Name)
|
||||
|
||||
// Step 2: Execute the scripts against a PostgreSQL database
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://user:password@localhost:5432/myapp?sslmode=disable",
|
||||
},
|
||||
})
|
||||
|
||||
// Execute all scripts in Priority then Sequence order
|
||||
if err := writer.WriteDatabase(database); err != nil {
|
||||
log.Fatalf("Failed to execute scripts: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("All scripts executed successfully!")
|
||||
}
|
||||
|
||||
// Example_withSingleSchema shows how to read and execute scripts for a single schema
|
||||
func Example_withSingleSchema() {
|
||||
// Read scripts
|
||||
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/path/to/migrations",
|
||||
})
|
||||
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to read schema: %v", err)
|
||||
}
|
||||
|
||||
// Execute scripts
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": "postgres://localhost/testdb",
|
||||
},
|
||||
})
|
||||
|
||||
if err := writer.WriteSchema(schema); err != nil {
|
||||
log.Fatalf("Failed to execute scripts: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("Schema scripts executed successfully!")
|
||||
}
|
||||
|
||||
// Example_fileNamingConvention shows the expected file naming pattern
|
||||
func Example_fileNamingConvention() {
|
||||
// File naming pattern: {priority}_{sequence}_{name}.sql or .pgsql
|
||||
// OR: {priority}-{sequence}-{name}.sql or .pgsql
|
||||
//
|
||||
// Both underscore (_) and hyphen (-) separators are supported and can be mixed.
|
||||
//
|
||||
// Components:
|
||||
// - priority: Integer (0-9999) - Scripts with lower priority execute first
|
||||
// - sequence: Integer (0-9999) - Within same priority, lower sequence executes first
|
||||
// - separator: Underscore (_) or hyphen (-)
|
||||
// - name: Descriptive name (alphanumeric, underscores, hyphens)
|
||||
// - extension: .sql or .pgsql
|
||||
//
|
||||
// Examples (underscore format):
|
||||
// ✓ 1_001_create_users.sql (Priority=1, Sequence=1)
|
||||
// ✓ 1_002_create_posts.sql (Priority=1, Sequence=2)
|
||||
// ✓ 2_001_add_indexes.pgsql (Priority=2, Sequence=1)
|
||||
// ✓ 10_100_migration.sql (Priority=10, Sequence=100)
|
||||
//
|
||||
// Examples (hyphen format):
|
||||
// ✓ 1-001-create-users.sql (Priority=1, Sequence=1)
|
||||
// ✓ 1-002-create-posts.sql (Priority=1, Sequence=2)
|
||||
// ✓ 2-001-add-indexes.pgsql (Priority=2, Sequence=1)
|
||||
// ✓ 10-10-create-newid.pgsql (Priority=10, Sequence=10)
|
||||
//
|
||||
// Mixed format (both in same directory):
|
||||
// ✓ 1_001_create_users.sql (underscore format)
|
||||
// ✓ 1-002-create-posts.sql (hyphen format)
|
||||
// ✓ 2_001_add_indexes.sql (underscore format)
|
||||
//
|
||||
// Execution order for mixed examples:
|
||||
// 1. 1_001_create_users.sql (Priority 1, Sequence 1)
|
||||
// 2. 1-002-create-posts.sql (Priority 1, Sequence 2)
|
||||
// 3. 2_001_add_indexes.sql (Priority 2, Sequence 1)
|
||||
//
|
||||
// Invalid filenames (will be ignored):
|
||||
// ✗ migration.sql (missing priority/sequence)
|
||||
// ✗ 1_create_users.sql (missing sequence)
|
||||
// ✗ create_users.sql (missing priority/sequence)
|
||||
// ✗ 1_001_create_users.txt (wrong extension)
|
||||
|
||||
fmt.Println("See comments for file naming conventions")
|
||||
}
|
||||
169
pkg/readers/sqldir/reader.go
Normal file
169
pkg/readers/sqldir/reader.go
Normal file
@@ -0,0 +1,169 @@
|
||||
package sqldir
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
// Reader implements the readers.Reader interface for SQL script directories
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
// NewReader creates a new SQL directory reader
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadDatabase reads all SQL scripts from a directory into a Database
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("directory path is required")
|
||||
}
|
||||
|
||||
// Check if directory exists
|
||||
info, err := os.Stat(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to access directory: %w", err)
|
||||
}
|
||||
if !info.IsDir() {
|
||||
return nil, fmt.Errorf("path is not a directory: %s", r.options.FilePath)
|
||||
}
|
||||
|
||||
// Read scripts from directory
|
||||
scripts, err := r.readScripts()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read scripts: %w", err)
|
||||
}
|
||||
|
||||
// Get schema name from metadata or use default
|
||||
schemaName := "public"
|
||||
if name, ok := r.options.Metadata["schema_name"].(string); ok && name != "" {
|
||||
schemaName = name
|
||||
}
|
||||
|
||||
// Create schema with scripts
|
||||
schema := &models.Schema{
|
||||
Name: schemaName,
|
||||
Scripts: scripts,
|
||||
}
|
||||
|
||||
// Get database name from metadata or use default
|
||||
dbName := "database"
|
||||
if name, ok := r.options.Metadata["database_name"].(string); ok && name != "" {
|
||||
dbName = name
|
||||
}
|
||||
|
||||
// Create database with schema
|
||||
database := &models.Database{
|
||||
Name: dbName,
|
||||
Schemas: []*models.Schema{schema},
|
||||
}
|
||||
|
||||
// Set back-reference
|
||||
schema.RefDatabase = database
|
||||
|
||||
return database, nil
|
||||
}
|
||||
|
||||
// ReadSchema reads all SQL scripts from a directory into a Schema
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schema found")
|
||||
}
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
// ReadTable is not applicable for SQL script directories
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
return nil, fmt.Errorf("ReadTable is not supported for SQL script directories")
|
||||
}
|
||||
|
||||
// readScripts recursively scans the directory for SQL files and parses them into Script models
|
||||
func (r *Reader) readScripts() ([]*models.Script, error) {
|
||||
var scripts []*models.Script
|
||||
|
||||
// Regular expression to parse filename: {priority}{sep}{sequence}{sep}{name}.sql or .pgsql
|
||||
// Separator can be underscore (_) or hyphen (-)
|
||||
// Example: 1_001_create_users.sql -> priority=1, sequence=001, name=create_users
|
||||
// Example: 2_005_add_indexes.pgsql -> priority=2, sequence=005, name=add_indexes
|
||||
// Example: 10-10-create-newid.pgsql -> priority=10, sequence=10, name=create-newid
|
||||
pattern := regexp.MustCompile(`^(\d+)[_-](\d+)[_-](.+)\.(sql|pgsql)$`)
|
||||
|
||||
err := filepath.WalkDir(r.options.FilePath, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Skip directories
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get filename
|
||||
filename := d.Name()
|
||||
|
||||
// Match against pattern
|
||||
matches := pattern.FindStringSubmatch(filename)
|
||||
if matches == nil {
|
||||
// Skip files that don't match the pattern
|
||||
return nil
|
||||
}
|
||||
|
||||
// Parse priority
|
||||
priority, err := strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid priority in filename %s: %w", filename, err)
|
||||
}
|
||||
|
||||
// Parse sequence
|
||||
sequence, err := strconv.ParseUint(matches[2], 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid sequence in filename %s: %w", filename, err)
|
||||
}
|
||||
|
||||
// Extract name
|
||||
name := matches[3]
|
||||
|
||||
// Read SQL content
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read file %s: %w", path, err)
|
||||
}
|
||||
|
||||
// Get relative path from base directory
|
||||
relPath, err := filepath.Rel(r.options.FilePath, path)
|
||||
if err != nil {
|
||||
relPath = path
|
||||
}
|
||||
|
||||
// Create Script model
|
||||
script := models.InitScript(name)
|
||||
script.Description = fmt.Sprintf("SQL script from %s", relPath)
|
||||
script.SQL = string(content)
|
||||
script.Priority = priority
|
||||
script.Sequence = uint(sequence)
|
||||
|
||||
scripts = append(scripts, script)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scripts, nil
|
||||
}
|
||||
375
pkg/readers/sqldir/reader_test.go
Normal file
375
pkg/readers/sqldir/reader_test.go
Normal file
@@ -0,0 +1,375 @@
|
||||
package sqldir
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
func TestReader_ReadDatabase(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create test SQL files with both underscore and hyphen separators
|
||||
testFiles := map[string]string{
|
||||
"1_001_create_users.sql": "CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT);",
|
||||
"1_002_create_posts.sql": "CREATE TABLE posts (id SERIAL PRIMARY KEY, user_id INT);",
|
||||
"2_001_add_indexes.sql": "CREATE INDEX idx_posts_user_id ON posts(user_id);",
|
||||
"1_003_seed_data.pgsql": "INSERT INTO users (name) VALUES ('Alice'), ('Bob');",
|
||||
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL PRIMARY KEY);",
|
||||
"2-005-add-column.sql": "ALTER TABLE users ADD COLUMN email TEXT;",
|
||||
}
|
||||
|
||||
for filename, content := range testFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create subdirectory with additional script
|
||||
subDir := filepath.Join(tempDir, "migrations")
|
||||
if err := os.MkdirAll(subDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create subdirectory: %v", err)
|
||||
}
|
||||
subFile := filepath.Join(subDir, "3_001_add_column.sql")
|
||||
if err := os.WriteFile(subFile, []byte("ALTER TABLE users ADD COLUMN email TEXT;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create subdirectory file: %v", err)
|
||||
}
|
||||
|
||||
// Create reader
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
Metadata: map[string]any{
|
||||
"schema_name": "test_schema",
|
||||
"database_name": "test_db",
|
||||
},
|
||||
})
|
||||
|
||||
// Read database
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify database
|
||||
if db.Name != "test_db" {
|
||||
t.Errorf("Expected database name 'test_db', got '%s'", db.Name)
|
||||
}
|
||||
|
||||
if len(db.Schemas) != 1 {
|
||||
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if schema.Name != "test_schema" {
|
||||
t.Errorf("Expected schema name 'test_schema', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
// Verify scripts (should be 7 total: 4 underscore + 2 hyphen + 1 subdirectory)
|
||||
if len(schema.Scripts) != 7 {
|
||||
t.Fatalf("Expected 7 scripts, got %d", len(schema.Scripts))
|
||||
}
|
||||
|
||||
// Verify script details
|
||||
expectedScripts := []struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
}{
|
||||
{"create_users", 1, 1},
|
||||
{"create_posts", 1, 2},
|
||||
{"seed_data", 1, 3},
|
||||
{"add_indexes", 2, 1},
|
||||
{"add-column", 2, 5},
|
||||
{"add_column", 3, 1},
|
||||
{"create-newid", 10, 10},
|
||||
}
|
||||
|
||||
scriptMap := make(map[string]*struct {
|
||||
priority int
|
||||
sequence uint
|
||||
sql string
|
||||
})
|
||||
for _, script := range schema.Scripts {
|
||||
scriptMap[script.Name] = &struct {
|
||||
priority int
|
||||
sequence uint
|
||||
sql string
|
||||
}{
|
||||
priority: script.Priority,
|
||||
sequence: script.Sequence,
|
||||
sql: script.SQL,
|
||||
}
|
||||
}
|
||||
|
||||
for _, expected := range expectedScripts {
|
||||
script, exists := scriptMap[expected.name]
|
||||
if !exists {
|
||||
t.Errorf("Expected script '%s' not found", expected.name)
|
||||
continue
|
||||
}
|
||||
if script.priority != expected.priority {
|
||||
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||
expected.name, expected.priority, script.priority)
|
||||
}
|
||||
if script.sequence != expected.sequence {
|
||||
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||
expected.name, expected.sequence, script.sequence)
|
||||
}
|
||||
if script.sql == "" {
|
||||
t.Errorf("Script '%s': SQL content is empty", expected.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadSchema(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create test SQL file
|
||||
testFile := filepath.Join(tempDir, "1_001_test.sql")
|
||||
if err := os.WriteFile(testFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Create reader
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
// Read schema
|
||||
schema, err := reader.ReadSchema()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadSchema failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify schema
|
||||
if schema.Name != "public" {
|
||||
t.Errorf("Expected default schema name 'public', got '%s'", schema.Name)
|
||||
}
|
||||
|
||||
if len(schema.Scripts) != 1 {
|
||||
t.Fatalf("Expected 1 script, got %d", len(schema.Scripts))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_InvalidDirectory(t *testing.T) {
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: "/nonexistent/directory",
|
||||
})
|
||||
|
||||
_, err := reader.ReadDatabase()
|
||||
if err == nil {
|
||||
t.Error("Expected error for nonexistent directory, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_EmptyDirectory(t *testing.T) {
|
||||
// Create temporary empty directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
if len(db.Schemas[0].Scripts) != 0 {
|
||||
t.Errorf("Expected 0 scripts in empty directory, got %d", len(db.Schemas[0].Scripts))
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_InvalidFilename(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create files with various invalid patterns
|
||||
invalidFiles := []string{
|
||||
"invalid.sql", // No priority/sequence
|
||||
"1_test.sql", // Missing sequence
|
||||
"test_1_2.sql", // Wrong order
|
||||
"a_001_test.sql", // Non-numeric priority
|
||||
"1_abc_test.sql", // Non-numeric sequence
|
||||
"1_001_test.txt", // Wrong extension
|
||||
"1_001_test.sql.backup", // Wrong extension
|
||||
}
|
||||
|
||||
for _, filename := range invalidFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte("SELECT 1;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create one valid file
|
||||
validFile := filepath.Join(tempDir, "1_001_valid.sql")
|
||||
if err := os.WriteFile(validFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create valid file: %v", err)
|
||||
}
|
||||
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
// Should only have the valid file
|
||||
if len(db.Schemas[0].Scripts) != 1 {
|
||||
t.Errorf("Expected 1 script (invalid files should be skipped), got %d", len(db.Schemas[0].Scripts))
|
||||
}
|
||||
|
||||
if db.Schemas[0].Scripts[0].Name != "valid" {
|
||||
t.Errorf("Expected script name 'valid', got '%s'", db.Schemas[0].Scripts[0].Name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_ReadTable(t *testing.T) {
|
||||
reader := NewReader(&readers.ReaderOptions{})
|
||||
|
||||
_, err := reader.ReadTable()
|
||||
if err == nil {
|
||||
t.Error("Expected error for ReadTable (not supported), got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_HyphenFormat(t *testing.T) {
|
||||
// Create temporary test directory
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-hyphen-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create test files with hyphen separators
|
||||
testFiles := map[string]string{
|
||||
"1-001-create-table.sql": "CREATE TABLE test (id INT);",
|
||||
"1-002-insert-data.pgsql": "INSERT INTO test VALUES (1);",
|
||||
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL);",
|
||||
"2-005-add-index.sql": "CREATE INDEX idx_test ON test(id);",
|
||||
}
|
||||
|
||||
for filename, content := range testFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create reader
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
// Read database
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) != 4 {
|
||||
t.Fatalf("Expected 4 scripts, got %d", len(schema.Scripts))
|
||||
}
|
||||
|
||||
// Verify specific hyphen-formatted scripts
|
||||
expectedScripts := map[string]struct {
|
||||
priority int
|
||||
sequence uint
|
||||
}{
|
||||
"create-table": {1, 1},
|
||||
"insert-data": {1, 2},
|
||||
"add-index": {2, 5},
|
||||
"create-newid": {10, 10},
|
||||
}
|
||||
|
||||
for _, script := range schema.Scripts {
|
||||
expected, exists := expectedScripts[script.Name]
|
||||
if !exists {
|
||||
t.Errorf("Unexpected script: %s", script.Name)
|
||||
continue
|
||||
}
|
||||
if script.Priority != expected.priority {
|
||||
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||
script.Name, expected.priority, script.Priority)
|
||||
}
|
||||
if script.Sequence != expected.sequence {
|
||||
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||
script.Name, expected.sequence, script.Sequence)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReader_MixedFormat(t *testing.T) {
|
||||
// Test that both underscore and hyphen formats can be mixed
|
||||
tempDir, err := os.MkdirTemp("", "sqldir-test-mixed-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
testFiles := map[string]string{
|
||||
"1_001_underscore.sql": "SELECT 1;",
|
||||
"1-002-hyphen.sql": "SELECT 2;",
|
||||
"2_003_underscore.sql": "SELECT 3;",
|
||||
"2-004-hyphen.sql": "SELECT 4;",
|
||||
}
|
||||
|
||||
for filename, content := range testFiles {
|
||||
filePath := filepath.Join(tempDir, filename)
|
||||
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
reader := NewReader(&readers.ReaderOptions{
|
||||
FilePath: tempDir,
|
||||
})
|
||||
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
schema := db.Schemas[0]
|
||||
if len(schema.Scripts) != 4 {
|
||||
t.Fatalf("Expected 4 scripts (mixed format), got %d", len(schema.Scripts))
|
||||
}
|
||||
|
||||
// Verify both formats are parsed correctly
|
||||
names := make(map[string]bool)
|
||||
for _, script := range schema.Scripts {
|
||||
names[script.Name] = true
|
||||
}
|
||||
|
||||
expectedNames := []string{"underscore", "hyphen", "underscore", "hyphen"}
|
||||
for _, name := range expectedNames {
|
||||
if !names[name] {
|
||||
t.Errorf("Expected script name '%s' not found", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
122
pkg/readers/typeorm/README.md
Normal file
122
pkg/readers/typeorm/README.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# TypeORM Reader
|
||||
|
||||
Reads TypeScript files containing TypeORM entity definitions and extracts database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The TypeORM Reader parses TypeScript source files that define TypeORM entities (classes with TypeORM decorators) and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses TypeORM decorators and entity definitions
|
||||
- Extracts table, column, and relationship information
|
||||
- Supports various TypeORM column types and options
|
||||
- Handles constraints, indexes, and relationships
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/entities/",
|
||||
}
|
||||
|
||||
reader := typeorm.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read TypeORM entities and convert to JSON
|
||||
relspec --input typeorm --in-file entities/ --output json --out-file schema.json
|
||||
|
||||
# Convert TypeORM to GORM models
|
||||
relspec --input typeorm --in-file User.ts --output gorm --out-file models.go
|
||||
```
|
||||
|
||||
## Example TypeORM Entity
|
||||
|
||||
```typescript
|
||||
import {
|
||||
Entity,
|
||||
PrimaryGeneratedColumn,
|
||||
Column,
|
||||
CreateDateColumn,
|
||||
OneToMany,
|
||||
} from 'typeorm';
|
||||
import { Post } from './Post';
|
||||
|
||||
@Entity('users')
|
||||
export class User {
|
||||
@PrimaryGeneratedColumn('increment')
|
||||
id: number;
|
||||
|
||||
@Column({ type: 'varchar', length: 50, unique: true })
|
||||
username: string;
|
||||
|
||||
@Column({ type: 'varchar', length: 100 })
|
||||
email: string;
|
||||
|
||||
@CreateDateColumn({ name: 'created_at' })
|
||||
createdAt: Date;
|
||||
|
||||
@OneToMany(() => Post, (post) => post.user)
|
||||
posts: Post[];
|
||||
}
|
||||
|
||||
@Entity('posts')
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn('increment')
|
||||
id: number;
|
||||
|
||||
@Column({ name: 'user_id' })
|
||||
userId: number;
|
||||
|
||||
@Column({ type: 'varchar', length: 200 })
|
||||
title: string;
|
||||
|
||||
@Column({ type: 'text' })
|
||||
content: string;
|
||||
|
||||
@ManyToOne(() => User, (user) => user.posts, { onDelete: 'CASCADE' })
|
||||
@JoinColumn({ name: 'user_id' })
|
||||
user: User;
|
||||
}
|
||||
```
|
||||
|
||||
## Supported TypeORM Decorators
|
||||
|
||||
- `@Entity()` - Entity/table definition
|
||||
- `@PrimaryGeneratedColumn()` - Auto-increment primary key
|
||||
- `@PrimaryColumn()` - Primary key
|
||||
- `@Column()` - Column definition
|
||||
- `@CreateDateColumn()` - Auto-set creation timestamp
|
||||
- `@UpdateDateColumn()` - Auto-update timestamp
|
||||
- `@OneToMany()` - One-to-many relationship
|
||||
- `@ManyToOne()` - Many-to-one relationship
|
||||
- `@JoinColumn()` - Foreign key column
|
||||
- `@Index()` - Index definition
|
||||
- `@Unique()` - Unique constraint
|
||||
|
||||
## Notes
|
||||
|
||||
- Schema name can be specified in `@Entity()` decorator
|
||||
- Supports both JavaScript and TypeScript entity files
|
||||
- Relationship metadata is extracted from decorators
|
||||
785
pkg/readers/typeorm/reader.go
Normal file
785
pkg/readers/typeorm/reader.go
Normal file
@@ -0,0 +1,785 @@
|
||||
package typeorm
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
)
|
||||
|
||||
// Reader implements the readers.Reader interface for TypeORM entity files
|
||||
type Reader struct {
|
||||
options *readers.ReaderOptions
|
||||
}
|
||||
|
||||
// NewReader creates a new TypeORM reader with the given options
|
||||
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||
return &Reader{
|
||||
options: options,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadDatabase reads and parses TypeORM entity files, returning a Database model
|
||||
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||
if r.options.FilePath == "" {
|
||||
return nil, fmt.Errorf("file path is required for TypeORM reader")
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(r.options.FilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
return r.parseTypeORM(string(content))
|
||||
}
|
||||
|
||||
// ReadSchema reads and parses TypeORM entity files, returning a Schema model
|
||||
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||
db, err := r.ReadDatabase()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(db.Schemas) == 0 {
|
||||
return nil, fmt.Errorf("no schemas found in TypeORM entities")
|
||||
}
|
||||
|
||||
return db.Schemas[0], nil
|
||||
}
|
||||
|
||||
// ReadTable reads and parses TypeORM entity files, returning a Table model
|
||||
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||
schema, err := r.ReadSchema()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(schema.Tables) == 0 {
|
||||
return nil, fmt.Errorf("no tables found in TypeORM entities")
|
||||
}
|
||||
|
||||
return schema.Tables[0], nil
|
||||
}
|
||||
|
||||
// entityInfo stores information about an entity during parsing
|
||||
type entityInfo struct {
|
||||
name string
|
||||
fields []fieldInfo
|
||||
decorators []string
|
||||
}
|
||||
|
||||
// fieldInfo stores information about a field during parsing
|
||||
type fieldInfo struct {
|
||||
name string
|
||||
typeName string
|
||||
decorators []string
|
||||
}
|
||||
|
||||
// parseTypeORM parses TypeORM entity content and returns a Database model
|
||||
func (r *Reader) parseTypeORM(content string) (*models.Database, error) {
|
||||
db := models.InitDatabase("database")
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
// Parse entities
|
||||
entities := r.extractEntities(content)
|
||||
|
||||
// Convert entities to tables and views
|
||||
tableMap := make(map[string]*models.Table)
|
||||
for _, entity := range entities {
|
||||
// Check if this is a view
|
||||
isView := false
|
||||
for _, decorator := range entity.decorators {
|
||||
if strings.HasPrefix(decorator, "@ViewEntity") {
|
||||
isView = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if isView {
|
||||
view := r.entityToView(entity)
|
||||
schema.Views = append(schema.Views, view)
|
||||
} else {
|
||||
table := r.entityToTable(entity)
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
tableMap[table.Name] = table
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: resolve relationships
|
||||
r.resolveRelationships(entities, tableMap, schema)
|
||||
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// extractEntities extracts entity and view definitions from TypeORM content
|
||||
func (r *Reader) extractEntities(content string) []entityInfo {
|
||||
entities := make([]entityInfo, 0)
|
||||
|
||||
// First, extract decorators properly (handling multi-line)
|
||||
content = r.normalizeDecorators(content)
|
||||
|
||||
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||
|
||||
entityRegex := regexp.MustCompile(`^export\s+class\s+(\w+)`)
|
||||
decoratorRegex := regexp.MustCompile(`^\s*@(\w+)(\([^)]*\))?`)
|
||||
fieldRegex := regexp.MustCompile(`^\s*(\w+):\s*([^;]+);`)
|
||||
|
||||
var currentEntity *entityInfo
|
||||
var pendingDecorators []string
|
||||
inClass := false
|
||||
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
// Skip empty lines and comments
|
||||
if trimmed == "" || strings.HasPrefix(trimmed, "//") || strings.HasPrefix(trimmed, "import ") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for decorator
|
||||
if matches := decoratorRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
decorator := matches[0]
|
||||
pendingDecorators = append(pendingDecorators, decorator)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for entity/view class
|
||||
if matches := entityRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
// Save previous entity if exists
|
||||
if currentEntity != nil {
|
||||
entities = append(entities, *currentEntity)
|
||||
}
|
||||
|
||||
currentEntity = &entityInfo{
|
||||
name: matches[1],
|
||||
fields: make([]fieldInfo, 0),
|
||||
decorators: pendingDecorators,
|
||||
}
|
||||
pendingDecorators = []string{}
|
||||
inClass = true
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for class end
|
||||
if inClass && trimmed == "}" {
|
||||
if currentEntity != nil {
|
||||
entities = append(entities, *currentEntity)
|
||||
currentEntity = nil
|
||||
}
|
||||
inClass = false
|
||||
pendingDecorators = []string{}
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for field definition
|
||||
if inClass && currentEntity != nil {
|
||||
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||
fieldName := matches[1]
|
||||
fieldType := strings.TrimSpace(matches[2])
|
||||
|
||||
field := fieldInfo{
|
||||
name: fieldName,
|
||||
typeName: fieldType,
|
||||
decorators: pendingDecorators,
|
||||
}
|
||||
currentEntity.fields = append(currentEntity.fields, field)
|
||||
pendingDecorators = []string{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save last entity
|
||||
if currentEntity != nil {
|
||||
entities = append(entities, *currentEntity)
|
||||
}
|
||||
|
||||
return entities
|
||||
}
|
||||
|
||||
// normalizeDecorators combines multi-line decorators into single lines
|
||||
func (r *Reader) normalizeDecorators(content string) string {
|
||||
// Replace multi-line decorators with single-line versions
|
||||
// Match @Decorator({ ... }) across multiple lines
|
||||
decoratorRegex := regexp.MustCompile(`@(\w+)\s*\(\s*\{([^}]*)\}\s*\)`)
|
||||
|
||||
return decoratorRegex.ReplaceAllStringFunc(content, func(match string) string {
|
||||
// Remove newlines and extra spaces from decorator
|
||||
match = strings.ReplaceAll(match, "\n", " ")
|
||||
match = strings.ReplaceAll(match, "\r", " ")
|
||||
// Normalize multiple spaces
|
||||
spaceRegex := regexp.MustCompile(`\s+`)
|
||||
match = spaceRegex.ReplaceAllString(match, " ")
|
||||
return match
|
||||
})
|
||||
}
|
||||
|
||||
// entityToView converts a view entity to a view
|
||||
func (r *Reader) entityToView(entity entityInfo) *models.View {
|
||||
// Parse @ViewEntity decorator options
|
||||
viewName := entity.name
|
||||
schemaName := "public"
|
||||
var expression string
|
||||
|
||||
for _, decorator := range entity.decorators {
|
||||
if strings.HasPrefix(decorator, "@ViewEntity") {
|
||||
// Extract options from @ViewEntity({ ... })
|
||||
options := r.parseViewEntityOptions(decorator)
|
||||
|
||||
// Check for custom view name
|
||||
if name, ok := options["name"]; ok {
|
||||
viewName = name
|
||||
}
|
||||
|
||||
// Check for schema
|
||||
if schema, ok := options["schema"]; ok {
|
||||
schemaName = schema
|
||||
}
|
||||
|
||||
// Check for expression (SQL definition)
|
||||
if expr, ok := options["expression"]; ok {
|
||||
expression = expr
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
view := models.InitView(viewName, schemaName)
|
||||
view.Definition = expression
|
||||
|
||||
// Add columns from fields (if any are defined in the view class)
|
||||
for _, field := range entity.fields {
|
||||
column := models.InitColumn(field.name, viewName, schemaName)
|
||||
column.Type = r.typeScriptTypeToSQL(field.typeName)
|
||||
view.Columns[column.Name] = column
|
||||
}
|
||||
|
||||
return view
|
||||
}
|
||||
|
||||
// parseViewEntityOptions parses @ViewEntity decorator options
|
||||
func (r *Reader) parseViewEntityOptions(decorator string) map[string]string {
|
||||
options := make(map[string]string)
|
||||
|
||||
// Extract content between parentheses
|
||||
start := strings.Index(decorator, "(")
|
||||
end := strings.LastIndex(decorator, ")")
|
||||
|
||||
if start == -1 || end == -1 || start >= end {
|
||||
return options
|
||||
}
|
||||
|
||||
content := decorator[start+1 : end]
|
||||
|
||||
// Skip if empty @ViewEntity()
|
||||
if strings.TrimSpace(content) == "" {
|
||||
return options
|
||||
}
|
||||
|
||||
// Parse name: "value"
|
||||
nameRegex := regexp.MustCompile(`name:\s*["']([^"']+)["']`)
|
||||
if matches := nameRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["name"] = matches[1]
|
||||
}
|
||||
|
||||
// Parse schema: "value"
|
||||
schemaRegex := regexp.MustCompile(`schema:\s*["']([^"']+)["']`)
|
||||
if matches := schemaRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["schema"] = matches[1]
|
||||
}
|
||||
|
||||
// Parse expression: ` ... ` (can be multi-line, captured as single line after normalization)
|
||||
// Look for expression followed by backtick or quote
|
||||
expressionRegex := regexp.MustCompile(`expression:\s*` + "`" + `([^` + "`" + `]+)` + "`")
|
||||
if matches := expressionRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["expression"] = strings.TrimSpace(matches[1])
|
||||
} else {
|
||||
// Try with regular quotes
|
||||
expressionRegex = regexp.MustCompile(`expression:\s*["']([^"']+)["']`)
|
||||
if matches := expressionRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["expression"] = strings.TrimSpace(matches[1])
|
||||
}
|
||||
}
|
||||
|
||||
return options
|
||||
}
|
||||
|
||||
// entityToTable converts an entity to a table
|
||||
func (r *Reader) entityToTable(entity entityInfo) *models.Table {
|
||||
// Parse @Entity decorator options
|
||||
tableName := entity.name
|
||||
schemaName := "public"
|
||||
var entityOptions map[string]string
|
||||
|
||||
for _, decorator := range entity.decorators {
|
||||
if strings.HasPrefix(decorator, "@Entity") {
|
||||
// Extract options from @Entity({ ... })
|
||||
entityOptions = r.parseEntityOptions(decorator)
|
||||
|
||||
// Check for custom table name
|
||||
if name, ok := entityOptions["name"]; ok {
|
||||
tableName = name
|
||||
}
|
||||
|
||||
// Check for schema
|
||||
if schema, ok := entityOptions["schema"]; ok {
|
||||
schemaName = schema
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
table := models.InitTable(tableName, schemaName)
|
||||
|
||||
// Store additional metadata from @Entity options
|
||||
if entityOptions != nil {
|
||||
// Store database name in metadata
|
||||
if database, ok := entityOptions["database"]; ok {
|
||||
if table.Metadata == nil {
|
||||
table.Metadata = make(map[string]any)
|
||||
}
|
||||
table.Metadata["database"] = database
|
||||
}
|
||||
|
||||
// Store engine in metadata
|
||||
if engine, ok := entityOptions["engine"]; ok {
|
||||
if table.Metadata == nil {
|
||||
table.Metadata = make(map[string]any)
|
||||
}
|
||||
table.Metadata["engine"] = engine
|
||||
}
|
||||
|
||||
// Store original class name if different from table name
|
||||
if entity.name != tableName {
|
||||
if table.Metadata == nil {
|
||||
table.Metadata = make(map[string]any)
|
||||
}
|
||||
table.Metadata["class_name"] = entity.name
|
||||
}
|
||||
}
|
||||
|
||||
for _, field := range entity.fields {
|
||||
// Skip relation fields (they'll be handled in relationship resolution)
|
||||
if r.isRelationField(field) {
|
||||
continue
|
||||
}
|
||||
|
||||
column := r.fieldToColumn(field, table)
|
||||
if column != nil {
|
||||
table.Columns[column.Name] = column
|
||||
}
|
||||
}
|
||||
|
||||
return table
|
||||
}
|
||||
|
||||
// parseEntityOptions parses @Entity decorator options
|
||||
func (r *Reader) parseEntityOptions(decorator string) map[string]string {
|
||||
options := make(map[string]string)
|
||||
|
||||
// Extract content between parentheses
|
||||
start := strings.Index(decorator, "(")
|
||||
end := strings.LastIndex(decorator, ")")
|
||||
|
||||
if start == -1 || end == -1 || start >= end {
|
||||
return options
|
||||
}
|
||||
|
||||
content := decorator[start+1 : end]
|
||||
|
||||
// Skip if empty @Entity()
|
||||
if strings.TrimSpace(content) == "" {
|
||||
return options
|
||||
}
|
||||
|
||||
// Parse name: "value" or name: 'value'
|
||||
nameRegex := regexp.MustCompile(`name:\s*["']([^"']+)["']`)
|
||||
if matches := nameRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["name"] = matches[1]
|
||||
}
|
||||
|
||||
// Parse schema: "value"
|
||||
schemaRegex := regexp.MustCompile(`schema:\s*["']([^"']+)["']`)
|
||||
if matches := schemaRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["schema"] = matches[1]
|
||||
}
|
||||
|
||||
// Parse database: "value"
|
||||
databaseRegex := regexp.MustCompile(`database:\s*["']([^"']+)["']`)
|
||||
if matches := databaseRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["database"] = matches[1]
|
||||
}
|
||||
|
||||
// Parse engine: "value"
|
||||
engineRegex := regexp.MustCompile(`engine:\s*["']([^"']+)["']`)
|
||||
if matches := engineRegex.FindStringSubmatch(content); matches != nil {
|
||||
options["engine"] = matches[1]
|
||||
}
|
||||
|
||||
return options
|
||||
}
|
||||
|
||||
// isRelationField checks if a field is a relation field
|
||||
func (r *Reader) isRelationField(field fieldInfo) bool {
|
||||
for _, decorator := range field.decorators {
|
||||
if strings.Contains(decorator, "@ManyToOne") ||
|
||||
strings.Contains(decorator, "@OneToMany") ||
|
||||
strings.Contains(decorator, "@ManyToMany") ||
|
||||
strings.Contains(decorator, "@OneToOne") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// fieldToColumn converts a field to a column
|
||||
func (r *Reader) fieldToColumn(field fieldInfo, table *models.Table) *models.Column {
|
||||
column := models.InitColumn(field.name, table.Name, table.Schema)
|
||||
|
||||
// Map TypeScript type to SQL type
|
||||
column.Type = r.typeScriptTypeToSQL(field.typeName)
|
||||
|
||||
// Default to NOT NULL
|
||||
column.NotNull = true
|
||||
|
||||
// Parse decorators
|
||||
for _, decorator := range field.decorators {
|
||||
r.parseColumnDecorator(decorator, column, table)
|
||||
}
|
||||
|
||||
return column
|
||||
}
|
||||
|
||||
// typeScriptTypeToSQL converts TypeScript types to SQL types
|
||||
func (r *Reader) typeScriptTypeToSQL(tsType string) string {
|
||||
// Remove array brackets and optional markers
|
||||
tsType = strings.TrimSuffix(tsType, "[]")
|
||||
tsType = strings.TrimSuffix(tsType, " | null")
|
||||
|
||||
typeMap := map[string]string{
|
||||
"string": "text",
|
||||
"number": "integer",
|
||||
"boolean": "boolean",
|
||||
"Date": "timestamp",
|
||||
"any": "jsonb",
|
||||
}
|
||||
|
||||
for tsPattern, sqlType := range typeMap {
|
||||
if strings.Contains(tsType, tsPattern) {
|
||||
return sqlType
|
||||
}
|
||||
}
|
||||
|
||||
// Default to text
|
||||
return "text"
|
||||
}
|
||||
|
||||
// parseColumnDecorator parses a column decorator
|
||||
func (r *Reader) parseColumnDecorator(decorator string, column *models.Column, table *models.Table) {
|
||||
// @PrimaryGeneratedColumn
|
||||
if strings.HasPrefix(decorator, "@PrimaryGeneratedColumn") {
|
||||
column.IsPrimaryKey = true
|
||||
column.NotNull = true
|
||||
|
||||
if strings.Contains(decorator, "'uuid'") {
|
||||
column.Type = "uuid"
|
||||
column.Default = "gen_random_uuid()"
|
||||
} else if strings.Contains(decorator, "'increment'") || strings.Contains(decorator, "()") {
|
||||
column.AutoIncrement = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// @Column
|
||||
if strings.HasPrefix(decorator, "@Column") {
|
||||
r.parseColumnOptions(decorator, column, table)
|
||||
return
|
||||
}
|
||||
|
||||
// @CreateDateColumn
|
||||
if strings.HasPrefix(decorator, "@CreateDateColumn") {
|
||||
column.Type = "timestamp"
|
||||
column.Default = "now()"
|
||||
column.NotNull = true
|
||||
return
|
||||
}
|
||||
|
||||
// @UpdateDateColumn
|
||||
if strings.HasPrefix(decorator, "@UpdateDateColumn") {
|
||||
column.Type = "timestamp"
|
||||
column.NotNull = true
|
||||
if column.Comment != "" {
|
||||
column.Comment += "; auto-update"
|
||||
} else {
|
||||
column.Comment = "auto-update"
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// parseColumnOptions parses @Column decorator options
|
||||
func (r *Reader) parseColumnOptions(decorator string, column *models.Column, table *models.Table) {
|
||||
// Extract content between parentheses
|
||||
start := strings.Index(decorator, "(")
|
||||
end := strings.LastIndex(decorator, ")")
|
||||
|
||||
if start == -1 || end == -1 || start >= end {
|
||||
return
|
||||
}
|
||||
|
||||
content := decorator[start+1 : end]
|
||||
|
||||
// Check for shorthand type: @Column('text')
|
||||
if strings.HasPrefix(content, "'") || strings.HasPrefix(content, "\"") {
|
||||
typeStr := strings.Trim(content, "'\"`")
|
||||
column.Type = typeStr
|
||||
return
|
||||
}
|
||||
|
||||
// Parse options object
|
||||
if strings.Contains(content, "type:") {
|
||||
typeRegex := regexp.MustCompile(`type:\s*['"]([^'"]+)['"]`)
|
||||
if matches := typeRegex.FindStringSubmatch(content); matches != nil {
|
||||
column.Type = matches[1]
|
||||
}
|
||||
}
|
||||
|
||||
if strings.Contains(content, "nullable: true") || strings.Contains(content, "nullable:true") {
|
||||
column.NotNull = false
|
||||
}
|
||||
|
||||
if strings.Contains(content, "unique: true") || strings.Contains(content, "unique:true") {
|
||||
uniqueConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("uq_%s", column.Name),
|
||||
models.UniqueConstraint,
|
||||
)
|
||||
uniqueConstraint.Schema = table.Schema
|
||||
uniqueConstraint.Table = table.Name
|
||||
uniqueConstraint.Columns = []string{column.Name}
|
||||
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||
}
|
||||
|
||||
if strings.Contains(content, "default:") {
|
||||
defaultRegex := regexp.MustCompile(`default:\s*['"]?([^,}'"]+)['"]?`)
|
||||
if matches := defaultRegex.FindStringSubmatch(content); matches != nil {
|
||||
defaultValue := strings.TrimSpace(matches[1])
|
||||
defaultValue = strings.Trim(defaultValue, "'\"")
|
||||
column.Default = defaultValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// resolveRelationships resolves TypeORM relationships
|
||||
func (r *Reader) resolveRelationships(entities []entityInfo, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||
// Track M2M relations that need join tables
|
||||
type m2mRelation struct {
|
||||
ownerEntity string
|
||||
targetEntity string
|
||||
ownerField string
|
||||
}
|
||||
m2mRelations := make([]m2mRelation, 0)
|
||||
|
||||
for _, entity := range entities {
|
||||
table := tableMap[entity.name]
|
||||
if table == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, field := range entity.fields {
|
||||
// Handle @ManyToOne relations
|
||||
if r.hasDecorator(field, "@ManyToOne") {
|
||||
r.createManyToOneConstraint(field, entity.name, table, tableMap)
|
||||
}
|
||||
|
||||
// Track @ManyToMany relations with @JoinTable
|
||||
if r.hasDecorator(field, "@ManyToMany") && r.hasDecorator(field, "@JoinTable") {
|
||||
targetEntity := r.extractRelationTarget(field)
|
||||
if targetEntity != "" {
|
||||
m2mRelations = append(m2mRelations, m2mRelation{
|
||||
ownerEntity: entity.name,
|
||||
targetEntity: targetEntity,
|
||||
ownerField: field.name,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create join tables for M2M relations
|
||||
for _, rel := range m2mRelations {
|
||||
r.createManyToManyJoinTable(rel.ownerEntity, rel.targetEntity, tableMap, schema)
|
||||
}
|
||||
}
|
||||
|
||||
// hasDecorator checks if a field has a specific decorator
|
||||
func (r *Reader) hasDecorator(field fieldInfo, decoratorName string) bool {
|
||||
for _, decorator := range field.decorators {
|
||||
if strings.HasPrefix(decorator, decoratorName) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// extractRelationTarget extracts the target entity from a relation decorator
|
||||
func (r *Reader) extractRelationTarget(field fieldInfo) string {
|
||||
// Remove array brackets from type
|
||||
targetType := strings.TrimSuffix(field.typeName, "[]")
|
||||
targetType = strings.TrimSpace(targetType)
|
||||
return targetType
|
||||
}
|
||||
|
||||
// createManyToOneConstraint creates a foreign key constraint for @ManyToOne
|
||||
func (r *Reader) createManyToOneConstraint(field fieldInfo, entityName string, table *models.Table, tableMap map[string]*models.Table) {
|
||||
targetEntity := r.extractRelationTarget(field)
|
||||
if targetEntity == "" {
|
||||
return
|
||||
}
|
||||
|
||||
// Get target table to find its PK
|
||||
targetTable := tableMap[targetEntity]
|
||||
if targetTable == nil {
|
||||
return
|
||||
}
|
||||
|
||||
targetPK := r.getPrimaryKeyColumn(targetTable)
|
||||
if targetPK == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Create FK column
|
||||
fkColumnName := fmt.Sprintf("%sId", field.name)
|
||||
fkColumn := models.InitColumn(fkColumnName, table.Name, table.Schema)
|
||||
fkColumn.Type = targetPK.Type
|
||||
|
||||
// Check if nullable option is set in @ManyToOne decorator
|
||||
isNullable := false
|
||||
for _, decorator := range field.decorators {
|
||||
if strings.Contains(decorator, "nullable: true") || strings.Contains(decorator, "nullable:true") {
|
||||
isNullable = true
|
||||
break
|
||||
}
|
||||
}
|
||||
fkColumn.NotNull = !isNullable
|
||||
|
||||
table.Columns[fkColumnName] = fkColumn
|
||||
|
||||
// Create FK constraint
|
||||
constraint := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", entityName, field.name),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
constraint.Schema = table.Schema
|
||||
constraint.Table = table.Name
|
||||
constraint.Columns = []string{fkColumnName}
|
||||
constraint.ReferencedSchema = "public"
|
||||
constraint.ReferencedTable = targetEntity
|
||||
constraint.ReferencedColumns = []string{targetPK.Name}
|
||||
constraint.OnDelete = "CASCADE"
|
||||
|
||||
table.Constraints[constraint.Name] = constraint
|
||||
}
|
||||
|
||||
// createManyToManyJoinTable creates a join table for M2M relations
|
||||
func (r *Reader) createManyToManyJoinTable(entity1, entity2 string, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||
// TypeORM naming convention: entity1_entity2_entity1field
|
||||
// We'll simplify to entity1_entity2
|
||||
joinTableName := fmt.Sprintf("%s_%s", strings.ToLower(entity1), strings.ToLower(entity2))
|
||||
|
||||
// Check if join table already exists
|
||||
if _, exists := tableMap[joinTableName]; exists {
|
||||
return
|
||||
}
|
||||
|
||||
// Get PKs from both tables
|
||||
table1 := tableMap[entity1]
|
||||
table2 := tableMap[entity2]
|
||||
if table1 == nil || table2 == nil {
|
||||
return
|
||||
}
|
||||
|
||||
pk1 := r.getPrimaryKeyColumn(table1)
|
||||
pk2 := r.getPrimaryKeyColumn(table2)
|
||||
if pk1 == nil || pk2 == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Create join table
|
||||
joinTable := models.InitTable(joinTableName, "public")
|
||||
|
||||
// Create FK columns
|
||||
fkCol1Name := fmt.Sprintf("%sId", strings.ToLower(entity1))
|
||||
fkCol1 := models.InitColumn(fkCol1Name, joinTableName, "public")
|
||||
fkCol1.Type = pk1.Type
|
||||
fkCol1.NotNull = true
|
||||
fkCol1.IsPrimaryKey = true
|
||||
joinTable.Columns[fkCol1Name] = fkCol1
|
||||
|
||||
fkCol2Name := fmt.Sprintf("%sId", strings.ToLower(entity2))
|
||||
fkCol2 := models.InitColumn(fkCol2Name, joinTableName, "public")
|
||||
fkCol2.Type = pk2.Type
|
||||
fkCol2.NotNull = true
|
||||
fkCol2.IsPrimaryKey = true
|
||||
joinTable.Columns[fkCol2Name] = fkCol2
|
||||
|
||||
// Create composite PK constraint
|
||||
pkConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("pk_%s", joinTableName),
|
||||
models.PrimaryKeyConstraint,
|
||||
)
|
||||
pkConstraint.Schema = "public"
|
||||
pkConstraint.Table = joinTableName
|
||||
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||
|
||||
// Create FK constraints
|
||||
fk1 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, entity1),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk1.Schema = "public"
|
||||
fk1.Table = joinTableName
|
||||
fk1.Columns = []string{fkCol1Name}
|
||||
fk1.ReferencedSchema = "public"
|
||||
fk1.ReferencedTable = entity1
|
||||
fk1.ReferencedColumns = []string{pk1.Name}
|
||||
fk1.OnDelete = "CASCADE"
|
||||
joinTable.Constraints[fk1.Name] = fk1
|
||||
|
||||
fk2 := models.InitConstraint(
|
||||
fmt.Sprintf("fk_%s_%s", joinTableName, entity2),
|
||||
models.ForeignKeyConstraint,
|
||||
)
|
||||
fk2.Schema = "public"
|
||||
fk2.Table = joinTableName
|
||||
fk2.Columns = []string{fkCol2Name}
|
||||
fk2.ReferencedSchema = "public"
|
||||
fk2.ReferencedTable = entity2
|
||||
fk2.ReferencedColumns = []string{pk2.Name}
|
||||
fk2.OnDelete = "CASCADE"
|
||||
joinTable.Constraints[fk2.Name] = fk2
|
||||
|
||||
// Add join table to schema
|
||||
schema.Tables = append(schema.Tables, joinTable)
|
||||
tableMap[joinTableName] = joinTable
|
||||
}
|
||||
|
||||
// getPrimaryKeyColumn returns the primary key column of a table
|
||||
func (r *Reader) getPrimaryKeyColumn(table *models.Table) *models.Column {
|
||||
if table == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, col := range table.Columns {
|
||||
if col.IsPrimaryKey {
|
||||
return col
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
159
pkg/readers/yaml/README.md
Normal file
159
pkg/readers/yaml/README.md
Normal file
@@ -0,0 +1,159 @@
|
||||
# YAML Reader
|
||||
|
||||
Reads database schema definitions from YAML files.
|
||||
|
||||
## Overview
|
||||
|
||||
The YAML Reader parses YAML files that define database schemas in RelSpec's canonical YAML format and converts them into RelSpec's internal database model representation.
|
||||
|
||||
## Features
|
||||
|
||||
- Reads RelSpec's standard YAML schema format
|
||||
- Human-readable alternative to JSON format
|
||||
- Supports complete schema representation including:
|
||||
- Databases and schemas
|
||||
- Tables, columns, and data types
|
||||
- Constraints (PK, FK, unique, check)
|
||||
- Indexes
|
||||
- Relationships
|
||||
- Views and sequences
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &readers.ReaderOptions{
|
||||
FilePath: "/path/to/schema.yaml",
|
||||
}
|
||||
|
||||
reader := yaml.NewReader(options)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Example
|
||||
|
||||
```bash
|
||||
# Read YAML schema and convert to GORM models
|
||||
relspec --input yaml --in-file schema.yaml --output gorm --out-file models.go
|
||||
|
||||
# Convert YAML to PostgreSQL DDL
|
||||
relspec --input yaml --in-file database.yaml --output pgsql --out-file schema.sql
|
||||
|
||||
# Transform YAML to JSON
|
||||
relspec --input yaml --in-file schema.yaml --output json --out-file schema.json
|
||||
```
|
||||
|
||||
## Example YAML Schema
|
||||
|
||||
```yaml
|
||||
name: myapp
|
||||
database_type: postgresql
|
||||
schemas:
|
||||
- name: public
|
||||
tables:
|
||||
- name: users
|
||||
schema: public
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
type: bigint
|
||||
not_null: true
|
||||
is_primary_key: true
|
||||
auto_increment: true
|
||||
sequence: 1
|
||||
username:
|
||||
name: username
|
||||
type: varchar
|
||||
length: 50
|
||||
not_null: true
|
||||
sequence: 2
|
||||
email:
|
||||
name: email
|
||||
type: varchar
|
||||
length: 100
|
||||
not_null: true
|
||||
sequence: 3
|
||||
constraints:
|
||||
pk_users:
|
||||
name: pk_users
|
||||
type: PRIMARY KEY
|
||||
columns:
|
||||
- id
|
||||
uq_users_username:
|
||||
name: uq_users_username
|
||||
type: UNIQUE
|
||||
columns:
|
||||
- username
|
||||
indexes:
|
||||
idx_users_email:
|
||||
name: idx_users_email
|
||||
columns:
|
||||
- email
|
||||
unique: false
|
||||
type: btree
|
||||
- name: posts
|
||||
schema: public
|
||||
columns:
|
||||
id:
|
||||
name: id
|
||||
type: bigint
|
||||
not_null: true
|
||||
is_primary_key: true
|
||||
sequence: 1
|
||||
user_id:
|
||||
name: user_id
|
||||
type: bigint
|
||||
not_null: true
|
||||
sequence: 2
|
||||
title:
|
||||
name: title
|
||||
type: varchar
|
||||
length: 200
|
||||
not_null: true
|
||||
sequence: 3
|
||||
constraints:
|
||||
fk_posts_user_id:
|
||||
name: fk_posts_user_id
|
||||
type: FOREIGN KEY
|
||||
columns:
|
||||
- user_id
|
||||
referenced_table: users
|
||||
referenced_schema: public
|
||||
referenced_columns:
|
||||
- id
|
||||
on_delete: CASCADE
|
||||
on_update: NO ACTION
|
||||
```
|
||||
|
||||
## Schema Structure
|
||||
|
||||
The YAML format mirrors RelSpec's internal model structure with human-readable syntax:
|
||||
|
||||
- Database level: `name`, `database_type`, `schemas`
|
||||
- Schema level: `name`, `tables`, `views`, `sequences`
|
||||
- Table level: `name`, `schema`, `columns`, `constraints`, `indexes`, `relationships`
|
||||
- Column level: `name`, `type`, `length`, `not_null`, `default`, etc.
|
||||
|
||||
## Notes
|
||||
|
||||
- YAML format is more human-readable than JSON
|
||||
- Ideal for manual editing and version control
|
||||
- Comments are supported in YAML
|
||||
- Preserves complete schema information
|
||||
- Can be used for configuration and documentation
|
||||
326
pkg/reflectutil/helpers.go
Normal file
326
pkg/reflectutil/helpers.go
Normal file
@@ -0,0 +1,326 @@
|
||||
package reflectutil
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Deref dereferences pointers until it reaches a non-pointer value
|
||||
// Returns the dereferenced value and true if successful, or the original value and false if nil
|
||||
func Deref(v reflect.Value) (reflect.Value, bool) {
|
||||
for v.Kind() == reflect.Ptr {
|
||||
if v.IsNil() {
|
||||
return v, false
|
||||
}
|
||||
v = v.Elem()
|
||||
}
|
||||
return v, true
|
||||
}
|
||||
|
||||
// DerefInterface dereferences an interface{} until it reaches a non-pointer value
|
||||
func DerefInterface(i interface{}) reflect.Value {
|
||||
v := reflect.ValueOf(i)
|
||||
v, _ = Deref(v)
|
||||
return v
|
||||
}
|
||||
|
||||
// GetFieldValue extracts a field value from a struct, map, or pointer
|
||||
// Returns nil if the field doesn't exist or can't be accessed
|
||||
func GetFieldValue(item interface{}, field string) interface{} {
|
||||
v := reflect.ValueOf(item)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch v.Kind() {
|
||||
case reflect.Struct:
|
||||
fieldVal := v.FieldByName(field)
|
||||
if fieldVal.IsValid() {
|
||||
return fieldVal.Interface()
|
||||
}
|
||||
return nil
|
||||
|
||||
case reflect.Map:
|
||||
keyVal := reflect.ValueOf(field)
|
||||
mapVal := v.MapIndex(keyVal)
|
||||
if mapVal.IsValid() {
|
||||
return mapVal.Interface()
|
||||
}
|
||||
return nil
|
||||
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// IsSliceOrArray checks if an interface{} is a slice or array
|
||||
func IsSliceOrArray(i interface{}) bool {
|
||||
v := reflect.ValueOf(i)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
k := v.Kind()
|
||||
return k == reflect.Slice || k == reflect.Array
|
||||
}
|
||||
|
||||
// IsMap checks if an interface{} is a map
|
||||
func IsMap(i interface{}) bool {
|
||||
v := reflect.ValueOf(i)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return v.Kind() == reflect.Map
|
||||
}
|
||||
|
||||
// SliceLen returns the length of a slice/array, or 0 if not a slice/array
|
||||
func SliceLen(i interface{}) int {
|
||||
v := reflect.ValueOf(i)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return 0
|
||||
}
|
||||
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
|
||||
return 0
|
||||
}
|
||||
return v.Len()
|
||||
}
|
||||
|
||||
// MapLen returns the length of a map, or 0 if not a map
|
||||
func MapLen(i interface{}) int {
|
||||
v := reflect.ValueOf(i)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return 0
|
||||
}
|
||||
if v.Kind() != reflect.Map {
|
||||
return 0
|
||||
}
|
||||
return v.Len()
|
||||
}
|
||||
|
||||
// SliceToInterfaces converts a slice/array to []interface{}
|
||||
// Returns empty slice if not a slice/array
|
||||
func SliceToInterfaces(i interface{}) []interface{} {
|
||||
v := reflect.ValueOf(i)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return []interface{}{}
|
||||
}
|
||||
|
||||
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
|
||||
return []interface{}{}
|
||||
}
|
||||
|
||||
result := make([]interface{}, v.Len())
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
result[i] = v.Index(i).Interface()
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// MapKeys returns all keys from a map as []interface{}
|
||||
// Returns empty slice if not a map
|
||||
func MapKeys(i interface{}) []interface{} {
|
||||
v := reflect.ValueOf(i)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return []interface{}{}
|
||||
}
|
||||
|
||||
if v.Kind() != reflect.Map {
|
||||
return []interface{}{}
|
||||
}
|
||||
|
||||
keys := v.MapKeys()
|
||||
result := make([]interface{}, len(keys))
|
||||
for i, key := range keys {
|
||||
result[i] = key.Interface()
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// MapValues returns all values from a map as []interface{}
|
||||
// Returns empty slice if not a map
|
||||
func MapValues(i interface{}) []interface{} {
|
||||
v := reflect.ValueOf(i)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return []interface{}{}
|
||||
}
|
||||
|
||||
if v.Kind() != reflect.Map {
|
||||
return []interface{}{}
|
||||
}
|
||||
|
||||
result := make([]interface{}, 0, v.Len())
|
||||
iter := v.MapRange()
|
||||
for iter.Next() {
|
||||
result = append(result, iter.Value().Interface())
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// MapGet safely gets a value from a map by key
|
||||
// Returns nil if key doesn't exist or not a map
|
||||
func MapGet(m interface{}, key interface{}) interface{} {
|
||||
v := reflect.ValueOf(m)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
if v.Kind() != reflect.Map {
|
||||
return nil
|
||||
}
|
||||
|
||||
keyVal := reflect.ValueOf(key)
|
||||
mapVal := v.MapIndex(keyVal)
|
||||
if mapVal.IsValid() {
|
||||
return mapVal.Interface()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// SliceIndex safely gets an element from a slice/array by index
|
||||
// Returns nil if index out of bounds or not a slice/array
|
||||
func SliceIndex(slice interface{}, index int) interface{} {
|
||||
v := reflect.ValueOf(slice)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
|
||||
return nil
|
||||
}
|
||||
|
||||
if index < 0 || index >= v.Len() {
|
||||
return nil
|
||||
}
|
||||
|
||||
return v.Index(index).Interface()
|
||||
}
|
||||
|
||||
// CompareValues compares two values for sorting
|
||||
// Returns -1 if a < b, 0 if a == b, 1 if a > b
|
||||
func CompareValues(a, b interface{}) int {
|
||||
if a == nil && b == nil {
|
||||
return 0
|
||||
}
|
||||
if a == nil {
|
||||
return -1
|
||||
}
|
||||
if b == nil {
|
||||
return 1
|
||||
}
|
||||
|
||||
va := reflect.ValueOf(a)
|
||||
vb := reflect.ValueOf(b)
|
||||
|
||||
// Handle different types
|
||||
switch va.Kind() {
|
||||
case reflect.String:
|
||||
if vb.Kind() == reflect.String {
|
||||
as := va.String()
|
||||
bs := vb.String()
|
||||
if as < bs {
|
||||
return -1
|
||||
} else if as > bs {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
if vb.Kind() >= reflect.Int && vb.Kind() <= reflect.Int64 {
|
||||
ai := va.Int()
|
||||
bi := vb.Int()
|
||||
if ai < bi {
|
||||
return -1
|
||||
} else if ai > bi {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
if vb.Kind() >= reflect.Uint && vb.Kind() <= reflect.Uint64 {
|
||||
au := va.Uint()
|
||||
bu := vb.Uint()
|
||||
if au < bu {
|
||||
return -1
|
||||
} else if au > bu {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
case reflect.Float32, reflect.Float64:
|
||||
if vb.Kind() == reflect.Float32 || vb.Kind() == reflect.Float64 {
|
||||
af := va.Float()
|
||||
bf := vb.Float()
|
||||
if af < bf {
|
||||
return -1
|
||||
} else if af > bf {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
// GetNestedValue gets a nested value using dot notation path
|
||||
// Example: GetNestedValue(obj, "database.schema.table")
|
||||
func GetNestedValue(m interface{}, path string) interface{} {
|
||||
if path == "" {
|
||||
return m
|
||||
}
|
||||
|
||||
parts := strings.Split(path, ".")
|
||||
current := m
|
||||
|
||||
for _, part := range parts {
|
||||
if current == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
v := reflect.ValueOf(current)
|
||||
v, ok := Deref(v)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch v.Kind() {
|
||||
case reflect.Map:
|
||||
keyVal := reflect.ValueOf(part)
|
||||
mapVal := v.MapIndex(keyVal)
|
||||
if !mapVal.IsValid() {
|
||||
return nil
|
||||
}
|
||||
current = mapVal.Interface()
|
||||
|
||||
case reflect.Struct:
|
||||
fieldVal := v.FieldByName(part)
|
||||
if !fieldVal.IsValid() {
|
||||
return nil
|
||||
}
|
||||
current = fieldVal.Interface()
|
||||
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return current
|
||||
}
|
||||
|
||||
// DeepEqual performs a deep equality check between two values
|
||||
func DeepEqual(a, b interface{}) bool {
|
||||
return reflect.DeepEqual(a, b)
|
||||
}
|
||||
95
pkg/ui/column_dataops.go
Normal file
95
pkg/ui/column_dataops.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package ui
|
||||
|
||||
import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
|
||||
// Column data operations - business logic for column management
|
||||
|
||||
// CreateColumn creates a new column and adds it to a table
|
||||
func (se *SchemaEditor) CreateColumn(schemaIndex, tableIndex int, name, dataType string, isPrimaryKey, isNotNull bool) *models.Column {
|
||||
table := se.GetTable(schemaIndex, tableIndex)
|
||||
if table == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if table.Columns == nil {
|
||||
table.Columns = make(map[string]*models.Column)
|
||||
}
|
||||
|
||||
newColumn := &models.Column{
|
||||
Name: name,
|
||||
Type: dataType,
|
||||
IsPrimaryKey: isPrimaryKey,
|
||||
NotNull: isNotNull,
|
||||
}
|
||||
table.UpdateDate()
|
||||
table.Columns[name] = newColumn
|
||||
return newColumn
|
||||
}
|
||||
|
||||
// UpdateColumn updates an existing column's properties
|
||||
func (se *SchemaEditor) UpdateColumn(schemaIndex, tableIndex int, oldName, newName, dataType string, isPrimaryKey, isNotNull bool, defaultValue interface{}, description string) bool {
|
||||
table := se.GetTable(schemaIndex, tableIndex)
|
||||
if table == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
column, exists := table.Columns[oldName]
|
||||
if !exists {
|
||||
return false
|
||||
}
|
||||
|
||||
table.UpdateDate()
|
||||
|
||||
// If name changed, remove old entry and create new one
|
||||
if oldName != newName {
|
||||
delete(table.Columns, oldName)
|
||||
column.Name = newName
|
||||
table.Columns[newName] = column
|
||||
}
|
||||
|
||||
// Update properties
|
||||
column.Type = dataType
|
||||
column.IsPrimaryKey = isPrimaryKey
|
||||
column.NotNull = isNotNull
|
||||
column.Default = defaultValue
|
||||
column.Description = description
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// DeleteColumn removes a column from a table
|
||||
func (se *SchemaEditor) DeleteColumn(schemaIndex, tableIndex int, columnName string) bool {
|
||||
table := se.GetTable(schemaIndex, tableIndex)
|
||||
if table == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if _, exists := table.Columns[columnName]; !exists {
|
||||
return false
|
||||
}
|
||||
|
||||
table.UpdateDate()
|
||||
|
||||
delete(table.Columns, columnName)
|
||||
return true
|
||||
}
|
||||
|
||||
// GetColumn returns a column by name
|
||||
func (se *SchemaEditor) GetColumn(schemaIndex, tableIndex int, columnName string) *models.Column {
|
||||
table := se.GetTable(schemaIndex, tableIndex)
|
||||
if table == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return table.Columns[columnName]
|
||||
}
|
||||
|
||||
// GetAllColumns returns all columns in a table
|
||||
func (se *SchemaEditor) GetAllColumns(schemaIndex, tableIndex int) map[string]*models.Column {
|
||||
table := se.GetTable(schemaIndex, tableIndex)
|
||||
if table == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return table.Columns
|
||||
}
|
||||
214
pkg/ui/column_screens.go
Normal file
214
pkg/ui/column_screens.go
Normal file
@@ -0,0 +1,214 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// showColumnEditor shows editor for a specific column
|
||||
func (se *SchemaEditor) showColumnEditor(schemaIndex, tableIndex, colIndex int, column *models.Column) {
|
||||
form := tview.NewForm()
|
||||
|
||||
// Store original name to handle renames
|
||||
originalName := column.Name
|
||||
|
||||
// Local variables to collect changes
|
||||
newName := column.Name
|
||||
newType := column.Type
|
||||
newIsPK := column.IsPrimaryKey
|
||||
newIsNotNull := column.NotNull
|
||||
newDefault := column.Default
|
||||
newDescription := column.Description
|
||||
newGUID := column.GUID
|
||||
|
||||
// Column type options: PostgreSQL, MySQL, SQL Server, and common SQL types
|
||||
columnTypes := []string{
|
||||
// Numeric Types
|
||||
"SMALLINT", "INTEGER", "BIGINT", "INT", "TINYINT", "FLOAT", "REAL", "DOUBLE PRECISION",
|
||||
"DECIMAL(10,2)", "NUMERIC", "DECIMAL", "NUMERIC(10,2)",
|
||||
// Character Types
|
||||
"CHAR", "VARCHAR", "VARCHAR(255)", "TEXT", "NCHAR", "NVARCHAR", "NVARCHAR(255)",
|
||||
// Boolean
|
||||
"BOOLEAN", "BOOL", "BIT",
|
||||
// Date/Time Types
|
||||
"DATE", "TIME", "TIMESTAMP", "TIMESTAMP WITH TIME ZONE", "INTERVAL",
|
||||
"DATETIME", "DATETIME2", "DATEFIRST",
|
||||
// UUID and JSON
|
||||
"UUID", "GUID", "JSON", "JSONB",
|
||||
// Binary Types
|
||||
"BYTEA", "BLOB", "IMAGE", "VARBINARY", "VARBINARY(MAX)", "BINARY",
|
||||
// PostgreSQL Special Types
|
||||
"int4range", "int8range", "numrange", "tsrange", "tstzrange", "daterange",
|
||||
"HSTORE", "CITEXT", "INET", "MACADDR", "POINT", "LINE", "LSEG", "BOX", "PATH", "POLYGON", "CIRCLE",
|
||||
// Array Types
|
||||
"INTEGER ARRAY", "VARCHAR ARRAY", "TEXT ARRAY", "BIGINT ARRAY",
|
||||
// MySQL Specific
|
||||
"MEDIUMINT", "DOUBLE", "FLOAT(10,2)",
|
||||
// SQL Server Specific
|
||||
"MONEY", "SMALLMONEY", "SQL_VARIANT",
|
||||
}
|
||||
selectedTypeIndex := 0
|
||||
|
||||
// Add existing type if not already in the list
|
||||
typeExists := false
|
||||
for i, opt := range columnTypes {
|
||||
if opt == column.Type {
|
||||
selectedTypeIndex = i
|
||||
typeExists = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !typeExists && column.Type != "" {
|
||||
columnTypes = append(columnTypes, column.Type)
|
||||
selectedTypeIndex = len(columnTypes) - 1
|
||||
}
|
||||
|
||||
form.AddInputField("Column Name", column.Name, 40, nil, func(value string) {
|
||||
newName = value
|
||||
})
|
||||
|
||||
form.AddDropDown("Type", columnTypes, selectedTypeIndex, func(option string, index int) {
|
||||
newType = option
|
||||
})
|
||||
|
||||
form.AddCheckbox("Primary Key", column.IsPrimaryKey, func(checked bool) {
|
||||
newIsPK = checked
|
||||
})
|
||||
|
||||
form.AddCheckbox("Not Null", column.NotNull, func(checked bool) {
|
||||
newIsNotNull = checked
|
||||
})
|
||||
|
||||
defaultStr := ""
|
||||
if column.Default != nil {
|
||||
defaultStr = fmt.Sprintf("%v", column.Default)
|
||||
}
|
||||
form.AddInputField("Default Value", defaultStr, 40, nil, func(value string) {
|
||||
newDefault = value
|
||||
})
|
||||
|
||||
form.AddTextArea("Description", column.Description, 40, 5, 0, func(value string) {
|
||||
newDescription = value
|
||||
})
|
||||
|
||||
form.AddInputField("GUID", column.GUID, 40, nil, func(value string) {
|
||||
newGUID = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
// Apply changes using dataops
|
||||
se.UpdateColumn(schemaIndex, tableIndex, originalName, newName, newType, newIsPK, newIsNotNull, newDefault, newDescription)
|
||||
se.db.Schemas[schemaIndex].Tables[tableIndex].Columns[newName].GUID = newGUID
|
||||
|
||||
se.pages.RemovePage("column-editor")
|
||||
se.pages.SwitchToPage("table-editor")
|
||||
})
|
||||
|
||||
form.AddButton("Delete", func() {
|
||||
se.showDeleteColumnConfirm(schemaIndex, tableIndex, originalName)
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
// Discard changes - don't apply them
|
||||
se.pages.RemovePage("column-editor")
|
||||
se.pages.SwitchToPage("table-editor")
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" Edit Column ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("column-editor", "table-editor")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("column-editor", form, true, true)
|
||||
}
|
||||
|
||||
// showNewColumnDialog shows dialog to create a new column
|
||||
func (se *SchemaEditor) showNewColumnDialog(schemaIndex, tableIndex int) {
|
||||
form := tview.NewForm()
|
||||
|
||||
columnName := ""
|
||||
dataType := "VARCHAR(255)"
|
||||
|
||||
// Column type options: PostgreSQL, MySQL, SQL Server, and common SQL types
|
||||
columnTypes := []string{
|
||||
// Numeric Types
|
||||
"SMALLINT", "INTEGER", "BIGINT", "INT", "TINYINT", "FLOAT", "REAL", "DOUBLE PRECISION",
|
||||
"DECIMAL(10,2)", "NUMERIC", "DECIMAL", "NUMERIC(10,2)",
|
||||
// Character Types
|
||||
"CHAR", "VARCHAR", "VARCHAR(255)", "TEXT", "NCHAR", "NVARCHAR", "NVARCHAR(255)",
|
||||
// Boolean
|
||||
"BOOLEAN", "BOOL", "BIT",
|
||||
// Date/Time Types
|
||||
"DATE", "TIME", "TIMESTAMP", "TIMESTAMP WITH TIME ZONE", "INTERVAL",
|
||||
"DATETIME", "DATETIME2", "DATEFIRST",
|
||||
// UUID and JSON
|
||||
"UUID", "GUID", "JSON", "JSONB",
|
||||
// Binary Types
|
||||
"BYTEA", "BLOB", "IMAGE", "VARBINARY", "VARBINARY(MAX)", "BINARY",
|
||||
// PostgreSQL Special Types
|
||||
"int4range", "int8range", "numrange", "tsrange", "tstzrange", "daterange",
|
||||
"HSTORE", "CITEXT", "INET", "MACADDR", "POINT", "LINE", "LSEG", "BOX", "PATH", "POLYGON", "CIRCLE",
|
||||
// Array Types
|
||||
"INTEGER ARRAY", "VARCHAR ARRAY", "TEXT ARRAY", "BIGINT ARRAY",
|
||||
// MySQL Specific
|
||||
"MEDIUMINT", "DOUBLE", "FLOAT(10,2)",
|
||||
// SQL Server Specific
|
||||
"MONEY", "SMALLMONEY", "SQL_VARIANT",
|
||||
}
|
||||
selectedTypeIndex := 0
|
||||
|
||||
form.AddInputField("Column Name", "", 40, nil, func(value string) {
|
||||
columnName = value
|
||||
})
|
||||
|
||||
form.AddDropDown("Data Type", columnTypes, selectedTypeIndex, func(option string, index int) {
|
||||
dataType = option
|
||||
})
|
||||
|
||||
form.AddCheckbox("Primary Key", false, nil)
|
||||
form.AddCheckbox("Not Null", false, nil)
|
||||
form.AddCheckbox("Unique", false, nil)
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
if columnName == "" {
|
||||
return
|
||||
}
|
||||
|
||||
// Get form values
|
||||
isPK := form.GetFormItemByLabel("Primary Key").(*tview.Checkbox).IsChecked()
|
||||
isNotNull := form.GetFormItemByLabel("Not Null").(*tview.Checkbox).IsChecked()
|
||||
|
||||
se.CreateColumn(schemaIndex, tableIndex, columnName, dataType, isPK, isNotNull)
|
||||
|
||||
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||
se.pages.RemovePage("new-column")
|
||||
se.pages.RemovePage("table-editor")
|
||||
se.showTableEditor(schemaIndex, tableIndex, table)
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||
se.pages.RemovePage("new-column")
|
||||
se.pages.RemovePage("table-editor")
|
||||
se.showTableEditor(schemaIndex, tableIndex, table)
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" New Column ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("new-column", "table-editor")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("new-column", form, true, true)
|
||||
}
|
||||
15
pkg/ui/database_dataops.go
Normal file
15
pkg/ui/database_dataops.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// updateDatabase updates database properties
|
||||
func (se *SchemaEditor) updateDatabase(name, description, comment, dbType, dbVersion string) {
|
||||
se.db.Name = name
|
||||
se.db.Description = description
|
||||
se.db.Comment = comment
|
||||
se.db.DatabaseType = models.DatabaseType(dbType)
|
||||
se.db.DatabaseVersion = dbVersion
|
||||
se.db.UpdateDate()
|
||||
}
|
||||
78
pkg/ui/database_screens.go
Normal file
78
pkg/ui/database_screens.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
)
|
||||
|
||||
// showEditDatabaseForm displays a dialog to edit database properties
|
||||
func (se *SchemaEditor) showEditDatabaseForm() {
|
||||
form := tview.NewForm()
|
||||
|
||||
dbName := se.db.Name
|
||||
dbDescription := se.db.Description
|
||||
dbComment := se.db.Comment
|
||||
dbType := string(se.db.DatabaseType)
|
||||
dbVersion := se.db.DatabaseVersion
|
||||
dbGUID := se.db.GUID
|
||||
|
||||
// Database type options
|
||||
dbTypeOptions := []string{"pgsql", "mssql", "sqlite"}
|
||||
selectedTypeIndex := 0
|
||||
for i, opt := range dbTypeOptions {
|
||||
if opt == dbType {
|
||||
selectedTypeIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
form.AddInputField("Database Name", dbName, 40, nil, func(value string) {
|
||||
dbName = value
|
||||
})
|
||||
|
||||
form.AddInputField("Description", dbDescription, 50, nil, func(value string) {
|
||||
dbDescription = value
|
||||
})
|
||||
|
||||
form.AddInputField("Comment", dbComment, 50, nil, func(value string) {
|
||||
dbComment = value
|
||||
})
|
||||
|
||||
form.AddDropDown("Database Type", dbTypeOptions, selectedTypeIndex, func(option string, index int) {
|
||||
dbType = option
|
||||
})
|
||||
|
||||
form.AddInputField("Database Version", dbVersion, 20, nil, func(value string) {
|
||||
dbVersion = value
|
||||
})
|
||||
|
||||
form.AddInputField("GUID", dbGUID, 40, nil, func(value string) {
|
||||
dbGUID = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
if dbName == "" {
|
||||
return
|
||||
}
|
||||
se.updateDatabase(dbName, dbDescription, dbComment, dbType, dbVersion)
|
||||
se.db.GUID = dbGUID
|
||||
se.pages.RemovePage("edit-database")
|
||||
se.pages.RemovePage("main")
|
||||
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
se.pages.RemovePage("edit-database")
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" Edit Database ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("edit-database", "main")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("edit-database", form, true, true)
|
||||
}
|
||||
139
pkg/ui/dialogs.go
Normal file
139
pkg/ui/dialogs.go
Normal file
@@ -0,0 +1,139 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
)
|
||||
|
||||
// showExitConfirmation shows a confirmation dialog when trying to exit without saving
|
||||
func (se *SchemaEditor) showExitConfirmation(pageToRemove, pageToSwitchTo string) {
|
||||
modal := tview.NewModal().
|
||||
SetText("Exit without saving changes?").
|
||||
AddButtons([]string{"Cancel", "No, exit without saving"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
if buttonLabel == "No, exit without saving" {
|
||||
se.pages.RemovePage(pageToRemove)
|
||||
se.pages.SwitchToPage(pageToSwitchTo)
|
||||
}
|
||||
se.pages.RemovePage("exit-confirm")
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("exit-confirm")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("exit-confirm", modal, true, true)
|
||||
}
|
||||
|
||||
// showExitEditorConfirm shows confirmation dialog when trying to exit the entire editor
|
||||
func (se *SchemaEditor) showExitEditorConfirm() {
|
||||
modal := tview.NewModal().
|
||||
SetText("Exit RelSpec Editor? Press ESC again to confirm.").
|
||||
AddButtons([]string{"Cancel", "Exit"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
if buttonLabel == "Exit" {
|
||||
se.app.Stop()
|
||||
}
|
||||
se.pages.RemovePage("exit-editor-confirm")
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.app.Stop()
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("exit-editor-confirm", modal, true, true)
|
||||
}
|
||||
|
||||
// showDeleteSchemaConfirm shows confirmation dialog for schema deletion
|
||||
func (se *SchemaEditor) showDeleteSchemaConfirm(schemaIndex int) {
|
||||
modal := tview.NewModal().
|
||||
SetText(fmt.Sprintf("Delete schema '%s'? This will delete all tables in this schema.",
|
||||
se.db.Schemas[schemaIndex].Name)).
|
||||
AddButtons([]string{"Cancel", "Delete"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
if buttonLabel == "Delete" {
|
||||
se.DeleteSchema(schemaIndex)
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.pages.RemovePage("schemas")
|
||||
se.showSchemaList()
|
||||
}
|
||||
se.pages.RemovePage("confirm-delete-schema")
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("confirm-delete-schema")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("confirm-delete-schema", modal, true, true)
|
||||
}
|
||||
|
||||
// showDeleteTableConfirm shows confirmation dialog for table deletion
|
||||
func (se *SchemaEditor) showDeleteTableConfirm(schemaIndex, tableIndex int) {
|
||||
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||
modal := tview.NewModal().
|
||||
SetText(fmt.Sprintf("Delete table '%s'? This action cannot be undone.",
|
||||
table.Name)).
|
||||
AddButtons([]string{"Cancel", "Delete"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
if buttonLabel == "Delete" {
|
||||
se.DeleteTable(schemaIndex, tableIndex)
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
se.pages.RemovePage("table-editor")
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.showSchemaEditor(schemaIndex, schema)
|
||||
}
|
||||
se.pages.RemovePage("confirm-delete-table")
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("confirm-delete-table")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("confirm-delete-table", modal, true, true)
|
||||
}
|
||||
|
||||
// showDeleteColumnConfirm shows confirmation dialog for column deletion
|
||||
func (se *SchemaEditor) showDeleteColumnConfirm(schemaIndex, tableIndex int, columnName string) {
|
||||
modal := tview.NewModal().
|
||||
SetText(fmt.Sprintf("Delete column '%s'? This action cannot be undone.",
|
||||
columnName)).
|
||||
AddButtons([]string{"Cancel", "Delete"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
if buttonLabel == "Delete" {
|
||||
se.DeleteColumn(schemaIndex, tableIndex, columnName)
|
||||
se.pages.RemovePage("column-editor")
|
||||
se.pages.RemovePage("confirm-delete-column")
|
||||
se.pages.SwitchToPage("table-editor")
|
||||
} else {
|
||||
se.pages.RemovePage("confirm-delete-column")
|
||||
}
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("confirm-delete-column")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("confirm-delete-column", modal, true, true)
|
||||
}
|
||||
35
pkg/ui/domain_dataops.go
Normal file
35
pkg/ui/domain_dataops.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// createDomain creates a new domain
|
||||
func (se *SchemaEditor) createDomain(name, description string) {
|
||||
domain := &models.Domain{
|
||||
Name: name,
|
||||
Description: description,
|
||||
Tables: make([]*models.DomainTable, 0),
|
||||
Sequence: uint(len(se.db.Domains)),
|
||||
}
|
||||
|
||||
se.db.Domains = append(se.db.Domains, domain)
|
||||
se.showDomainList()
|
||||
}
|
||||
|
||||
// updateDomain updates an existing domain
|
||||
func (se *SchemaEditor) updateDomain(index int, name, description string) {
|
||||
if index >= 0 && index < len(se.db.Domains) {
|
||||
se.db.Domains[index].Name = name
|
||||
se.db.Domains[index].Description = description
|
||||
se.showDomainList()
|
||||
}
|
||||
}
|
||||
|
||||
// deleteDomain deletes a domain by index
|
||||
func (se *SchemaEditor) deleteDomain(index int) {
|
||||
if index >= 0 && index < len(se.db.Domains) {
|
||||
se.db.Domains = append(se.db.Domains[:index], se.db.Domains[index+1:]...)
|
||||
se.showDomainList()
|
||||
}
|
||||
}
|
||||
258
pkg/ui/domain_screens.go
Normal file
258
pkg/ui/domain_screens.go
Normal file
@@ -0,0 +1,258 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// showDomainList displays the domain management screen
|
||||
func (se *SchemaEditor) showDomainList() {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText("[::b]Manage Domains").
|
||||
SetDynamicColors(true).
|
||||
SetTextAlign(tview.AlignCenter)
|
||||
|
||||
// Create domains table
|
||||
domainTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
|
||||
|
||||
// Add header row
|
||||
headers := []string{"Name", "Sequence", "Total Tables", "Description"}
|
||||
headerWidths := []int{20, 15, 20}
|
||||
for i, header := range headers {
|
||||
padding := ""
|
||||
if i < len(headerWidths) {
|
||||
padding = strings.Repeat(" ", headerWidths[i]-len(header))
|
||||
}
|
||||
cell := tview.NewTableCell(header + padding).
|
||||
SetTextColor(tcell.ColorYellow).
|
||||
SetSelectable(false).
|
||||
SetAlign(tview.AlignLeft)
|
||||
domainTable.SetCell(0, i, cell)
|
||||
}
|
||||
|
||||
// Add existing domains
|
||||
for row, domain := range se.db.Domains {
|
||||
domain := domain // capture for closure
|
||||
|
||||
// Name - pad to 20 chars
|
||||
nameStr := fmt.Sprintf("%-20s", domain.Name)
|
||||
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
|
||||
domainTable.SetCell(row+1, 0, nameCell)
|
||||
|
||||
// Sequence - pad to 15 chars
|
||||
seqStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", domain.Sequence))
|
||||
seqCell := tview.NewTableCell(seqStr).SetSelectable(true)
|
||||
domainTable.SetCell(row+1, 1, seqCell)
|
||||
|
||||
// Total Tables - pad to 20 chars
|
||||
tablesStr := fmt.Sprintf("%-20s", fmt.Sprintf("%d", len(domain.Tables)))
|
||||
tablesCell := tview.NewTableCell(tablesStr).SetSelectable(true)
|
||||
domainTable.SetCell(row+1, 2, tablesCell)
|
||||
|
||||
// Description - no padding, takes remaining space
|
||||
descCell := tview.NewTableCell(domain.Description).SetSelectable(true)
|
||||
domainTable.SetCell(row+1, 3, descCell)
|
||||
}
|
||||
|
||||
domainTable.SetTitle(" Domains ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Action buttons flex
|
||||
btnFlex := tview.NewFlex()
|
||||
btnNewDomain := tview.NewButton("New Domain [n]").SetSelectedFunc(func() {
|
||||
se.showNewDomainDialog()
|
||||
})
|
||||
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("domains")
|
||||
})
|
||||
|
||||
// Set up button input captures for Tab/Shift+Tab navigation
|
||||
btnNewDomain.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(domainTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnBack)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnNewDomain)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(domainTable)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnFlex.AddItem(btnNewDomain, 0, 1, true).
|
||||
AddItem(btnBack, 0, 1, false)
|
||||
|
||||
domainTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("domains")
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnNewDomain)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyEnter {
|
||||
row, _ := domainTable.GetSelection()
|
||||
if row > 0 && row <= len(se.db.Domains) { // Skip header row
|
||||
domainIndex := row - 1
|
||||
se.showDomainEditor(domainIndex, se.db.Domains[domainIndex])
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if event.Rune() == 'n' {
|
||||
se.showNewDomainDialog()
|
||||
return nil
|
||||
}
|
||||
if event.Rune() == 'b' {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("domains")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(domainTable, 0, 1, true).
|
||||
AddItem(btnFlex, 1, 0, false)
|
||||
|
||||
se.pages.AddPage("domains", flex, true, true)
|
||||
}
|
||||
|
||||
// showNewDomainDialog displays a dialog to create a new domain
|
||||
func (se *SchemaEditor) showNewDomainDialog() {
|
||||
form := tview.NewForm()
|
||||
|
||||
domainName := ""
|
||||
domainDesc := ""
|
||||
|
||||
form.AddInputField("Name", "", 40, nil, func(value string) {
|
||||
domainName = value
|
||||
})
|
||||
|
||||
form.AddInputField("Description", "", 50, nil, func(value string) {
|
||||
domainDesc = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
if domainName == "" {
|
||||
return
|
||||
}
|
||||
se.createDomain(domainName, domainDesc)
|
||||
se.pages.RemovePage("new-domain")
|
||||
se.pages.RemovePage("domains")
|
||||
se.showDomainList()
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
se.pages.RemovePage("new-domain")
|
||||
se.pages.RemovePage("domains")
|
||||
se.showDomainList()
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" New Domain ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("new-domain", "domains")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("new-domain", form, true, true)
|
||||
}
|
||||
|
||||
// showDomainEditor displays a dialog to edit an existing domain
|
||||
func (se *SchemaEditor) showDomainEditor(index int, domain *models.Domain) {
|
||||
form := tview.NewForm()
|
||||
|
||||
domainName := domain.Name
|
||||
domainDesc := domain.Description
|
||||
|
||||
form.AddInputField("Name", domainName, 40, nil, func(value string) {
|
||||
domainName = value
|
||||
})
|
||||
|
||||
form.AddInputField("Description", domainDesc, 50, nil, func(value string) {
|
||||
domainDesc = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
if domainName == "" {
|
||||
return
|
||||
}
|
||||
se.updateDomain(index, domainName, domainDesc)
|
||||
se.pages.RemovePage("edit-domain")
|
||||
se.pages.RemovePage("domains")
|
||||
se.showDomainList()
|
||||
})
|
||||
|
||||
form.AddButton("Delete", func() {
|
||||
se.showDeleteDomainConfirm(index)
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
se.pages.RemovePage("edit-domain")
|
||||
se.pages.RemovePage("domains")
|
||||
se.showDomainList()
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" Edit Domain ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("edit-domain", "domains")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("edit-domain", form, true, true)
|
||||
}
|
||||
|
||||
// showDeleteDomainConfirm shows a confirmation dialog before deleting a domain
|
||||
func (se *SchemaEditor) showDeleteDomainConfirm(index int) {
|
||||
modal := tview.NewModal().
|
||||
SetText(fmt.Sprintf("Delete domain '%s'? This action cannot be undone.", se.db.Domains[index].Name)).
|
||||
AddButtons([]string{"Cancel", "Delete"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
if buttonLabel == "Delete" {
|
||||
se.deleteDomain(index)
|
||||
se.pages.RemovePage("delete-domain-confirm")
|
||||
se.pages.RemovePage("edit-domain")
|
||||
se.pages.RemovePage("domains")
|
||||
se.showDomainList()
|
||||
} else {
|
||||
se.pages.RemovePage("delete-domain-confirm")
|
||||
}
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("delete-domain-confirm")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddAndSwitchToPage("delete-domain-confirm", modal, true)
|
||||
}
|
||||
73
pkg/ui/editor.go
Normal file
73
pkg/ui/editor.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/rivo/tview"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// SchemaEditor represents the interactive schema editor
|
||||
type SchemaEditor struct {
|
||||
db *models.Database
|
||||
app *tview.Application
|
||||
pages *tview.Pages
|
||||
loadConfig *LoadConfig
|
||||
saveConfig *SaveConfig
|
||||
}
|
||||
|
||||
// NewSchemaEditor creates a new schema editor
|
||||
func NewSchemaEditor(db *models.Database) *SchemaEditor {
|
||||
return &SchemaEditor{
|
||||
db: db,
|
||||
app: tview.NewApplication(),
|
||||
pages: tview.NewPages(),
|
||||
loadConfig: nil,
|
||||
saveConfig: nil,
|
||||
}
|
||||
}
|
||||
|
||||
// NewSchemaEditorWithConfigs creates a new schema editor with load/save configurations
|
||||
func NewSchemaEditorWithConfigs(db *models.Database, loadConfig *LoadConfig, saveConfig *SaveConfig) *SchemaEditor {
|
||||
return &SchemaEditor{
|
||||
db: db,
|
||||
app: tview.NewApplication(),
|
||||
pages: tview.NewPages(),
|
||||
loadConfig: loadConfig,
|
||||
saveConfig: saveConfig,
|
||||
}
|
||||
}
|
||||
|
||||
// Run starts the interactive editor
|
||||
func (se *SchemaEditor) Run() error {
|
||||
// If no database is loaded, show load screen
|
||||
if se.db == nil {
|
||||
se.showLoadScreen()
|
||||
} else {
|
||||
// Create main menu view
|
||||
mainMenu := se.createMainMenu()
|
||||
se.pages.AddPage("main", mainMenu, true, true)
|
||||
}
|
||||
|
||||
// Run the application
|
||||
if err := se.app.SetRoot(se.pages, true).Run(); err != nil {
|
||||
return fmt.Errorf("application error: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetDatabase returns the current database
|
||||
func (se *SchemaEditor) GetDatabase() *models.Database {
|
||||
return se.db
|
||||
}
|
||||
|
||||
// Helper function to get sorted column names
|
||||
func getColumnNames(table *models.Table) []string {
|
||||
names := make([]string, 0, len(table.Columns))
|
||||
for name := range table.Columns {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
791
pkg/ui/load_save_screens.go
Normal file
791
pkg/ui/load_save_screens.go
Normal file
@@ -0,0 +1,791 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/merge"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||
rbun "git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||
rdbml "git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||
rdctx "git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||
rdrawdb "git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||
rdrizzle "git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||
rgorm "git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||
rgraphql "git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||
rjson "git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||
rpgsql "git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||
rprisma "git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||
rtypeorm "git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||
ryaml "git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||
)
|
||||
|
||||
// LoadConfig holds the configuration for loading a database
|
||||
type LoadConfig struct {
|
||||
SourceType string
|
||||
FilePath string
|
||||
ConnString string
|
||||
}
|
||||
|
||||
// SaveConfig holds the configuration for saving a database
|
||||
type SaveConfig struct {
|
||||
TargetType string
|
||||
FilePath string
|
||||
ConnString string
|
||||
}
|
||||
|
||||
// showLoadScreen displays the database load screen
|
||||
func (se *SchemaEditor) showLoadScreen() {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText("[::b]Load Database Schema").
|
||||
SetTextAlign(tview.AlignCenter).
|
||||
SetDynamicColors(true)
|
||||
|
||||
// Form
|
||||
form := tview.NewForm()
|
||||
form.SetBorder(true).SetTitle(" Load Configuration ").SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Format selection
|
||||
formatOptions := []string{
|
||||
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
|
||||
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
|
||||
}
|
||||
selectedFormat := 0
|
||||
currentFormat := formatOptions[selectedFormat]
|
||||
|
||||
// File path input
|
||||
filePath := ""
|
||||
connString := ""
|
||||
|
||||
form.AddDropDown("Format", formatOptions, 0, func(option string, index int) {
|
||||
selectedFormat = index
|
||||
currentFormat = option
|
||||
})
|
||||
|
||||
form.AddInputField("File Path", "", 50, nil, func(value string) {
|
||||
filePath = value
|
||||
})
|
||||
|
||||
form.AddInputField("Connection String", "", 50, nil, func(value string) {
|
||||
connString = value
|
||||
})
|
||||
|
||||
form.AddTextView("Help", getLoadHelpText(), 0, 5, true, false)
|
||||
|
||||
// Buttons
|
||||
form.AddButton("Load [l]", func() {
|
||||
se.loadDatabase(currentFormat, filePath, connString)
|
||||
})
|
||||
|
||||
form.AddButton("Create New [n]", func() {
|
||||
se.createNewDatabase()
|
||||
})
|
||||
|
||||
form.AddButton("Exit [q]", func() {
|
||||
se.app.Stop()
|
||||
})
|
||||
|
||||
// Keyboard shortcuts
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.app.Stop()
|
||||
return nil
|
||||
}
|
||||
switch event.Rune() {
|
||||
case 'l':
|
||||
se.loadDatabase(currentFormat, filePath, connString)
|
||||
return nil
|
||||
case 'n':
|
||||
se.createNewDatabase()
|
||||
return nil
|
||||
case 'q':
|
||||
se.app.Stop()
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
// Tab navigation
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.app.Stop()
|
||||
return nil
|
||||
}
|
||||
if event.Rune() == 'l' || event.Rune() == 'n' || event.Rune() == 'q' {
|
||||
return event
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(form, 0, 1, true)
|
||||
|
||||
se.pages.AddAndSwitchToPage("load-database", flex, true)
|
||||
}
|
||||
|
||||
// showSaveScreen displays the save database screen
|
||||
func (se *SchemaEditor) showSaveScreen() {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText("[::b]Save Database Schema").
|
||||
SetTextAlign(tview.AlignCenter).
|
||||
SetDynamicColors(true)
|
||||
|
||||
// Form
|
||||
form := tview.NewForm()
|
||||
form.SetBorder(true).SetTitle(" Save Configuration ").SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Format selection
|
||||
formatOptions := []string{
|
||||
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
|
||||
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
|
||||
}
|
||||
selectedFormat := 0
|
||||
currentFormat := formatOptions[selectedFormat]
|
||||
|
||||
// File path input
|
||||
filePath := ""
|
||||
if se.saveConfig != nil {
|
||||
// Pre-populate with existing save config
|
||||
for i, format := range formatOptions {
|
||||
if format == se.saveConfig.TargetType {
|
||||
selectedFormat = i
|
||||
currentFormat = format
|
||||
break
|
||||
}
|
||||
}
|
||||
filePath = se.saveConfig.FilePath
|
||||
}
|
||||
|
||||
form.AddDropDown("Format", formatOptions, selectedFormat, func(option string, index int) {
|
||||
selectedFormat = index
|
||||
currentFormat = option
|
||||
})
|
||||
|
||||
form.AddInputField("File Path", filePath, 50, nil, func(value string) {
|
||||
filePath = value
|
||||
})
|
||||
|
||||
form.AddTextView("Help", getSaveHelpText(), 0, 5, true, false)
|
||||
|
||||
// Buttons
|
||||
form.AddButton("Save [s]", func() {
|
||||
se.saveDatabase(currentFormat, filePath)
|
||||
})
|
||||
|
||||
form.AddButton("Update Existing Database [u]", func() {
|
||||
// Use saveConfig if available, otherwise use loadConfig
|
||||
if se.saveConfig != nil {
|
||||
se.showUpdateExistingDatabaseConfirm()
|
||||
} else if se.loadConfig != nil {
|
||||
se.showUpdateExistingDatabaseConfirm()
|
||||
} else {
|
||||
se.showErrorDialog("Error", "No database source found. Use Save instead.")
|
||||
}
|
||||
})
|
||||
|
||||
form.AddButton("Back [b]", func() {
|
||||
se.pages.RemovePage("save-database")
|
||||
se.pages.SwitchToPage("main")
|
||||
})
|
||||
|
||||
// Keyboard shortcuts
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("save-database")
|
||||
se.pages.SwitchToPage("main")
|
||||
return nil
|
||||
}
|
||||
switch event.Rune() {
|
||||
case 's':
|
||||
se.saveDatabase(currentFormat, filePath)
|
||||
return nil
|
||||
case 'u':
|
||||
// Use saveConfig if available, otherwise use loadConfig
|
||||
if se.saveConfig != nil {
|
||||
se.showUpdateExistingDatabaseConfirm()
|
||||
} else if se.loadConfig != nil {
|
||||
se.showUpdateExistingDatabaseConfirm()
|
||||
} else {
|
||||
se.showErrorDialog("Error", "No database source found. Use Save instead.")
|
||||
}
|
||||
return nil
|
||||
case 'b':
|
||||
se.pages.RemovePage("save-database")
|
||||
se.pages.SwitchToPage("main")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(form, 0, 1, true)
|
||||
|
||||
se.pages.AddAndSwitchToPage("save-database", flex, true)
|
||||
}
|
||||
|
||||
// loadDatabase loads a database from the specified configuration
|
||||
func (se *SchemaEditor) loadDatabase(format, filePath, connString string) {
|
||||
// Validate input
|
||||
if format == "pgsql" {
|
||||
if connString == "" {
|
||||
se.showErrorDialog("Error", "Connection string is required for PostgreSQL")
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if filePath == "" {
|
||||
se.showErrorDialog("Error", "File path is required for "+format)
|
||||
return
|
||||
}
|
||||
// Expand home directory
|
||||
if len(filePath) > 0 && filePath[0] == '~' {
|
||||
home, err := os.UserHomeDir()
|
||||
if err == nil {
|
||||
filePath = filepath.Join(home, filePath[1:])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create reader
|
||||
var reader readers.Reader
|
||||
switch format {
|
||||
case "dbml":
|
||||
reader = rdbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "dctx":
|
||||
reader = rdctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drawdb":
|
||||
reader = rdrawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "graphql":
|
||||
reader = rgraphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "json":
|
||||
reader = rjson.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "yaml":
|
||||
reader = ryaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "gorm":
|
||||
reader = rgorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "bun":
|
||||
reader = rbun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drizzle":
|
||||
reader = rdrizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "prisma":
|
||||
reader = rprisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "typeorm":
|
||||
reader = rtypeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "pgsql":
|
||||
reader = rpgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||
default:
|
||||
se.showErrorDialog("Error", "Unsupported format: "+format)
|
||||
return
|
||||
}
|
||||
|
||||
// Read database
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
se.showErrorDialog("Load Error", fmt.Sprintf("Failed to load database: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Store load config
|
||||
se.loadConfig = &LoadConfig{
|
||||
SourceType: format,
|
||||
FilePath: filePath,
|
||||
ConnString: connString,
|
||||
}
|
||||
|
||||
// Update database
|
||||
se.db = db
|
||||
|
||||
// Show success and switch to main menu
|
||||
se.showSuccessDialog("Load Complete", fmt.Sprintf("Successfully loaded database '%s'", db.Name), func() {
|
||||
se.pages.RemovePage("load-database")
|
||||
se.pages.RemovePage("main")
|
||||
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||
})
|
||||
}
|
||||
|
||||
// saveDatabase saves the database to the specified configuration
|
||||
func (se *SchemaEditor) saveDatabase(format, filePath string) {
|
||||
// Validate input
|
||||
if format == "pgsql" {
|
||||
se.showErrorDialog("Error", "Direct PostgreSQL save is not supported from the UI. Use --to pgsql --to-path output.sql")
|
||||
return
|
||||
}
|
||||
|
||||
if filePath == "" {
|
||||
se.showErrorDialog("Error", "File path is required")
|
||||
return
|
||||
}
|
||||
|
||||
// Expand home directory
|
||||
if len(filePath) > 0 && filePath[0] == '~' {
|
||||
home, err := os.UserHomeDir()
|
||||
if err == nil {
|
||||
filePath = filepath.Join(home, filePath[1:])
|
||||
}
|
||||
}
|
||||
|
||||
// Create writer
|
||||
var writer writers.Writer
|
||||
switch format {
|
||||
case "dbml":
|
||||
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "dctx":
|
||||
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "drawdb":
|
||||
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "graphql":
|
||||
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "json":
|
||||
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "yaml":
|
||||
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "gorm":
|
||||
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "bun":
|
||||
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "drizzle":
|
||||
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "prisma":
|
||||
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "typeorm":
|
||||
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
case "pgsql":
|
||||
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
|
||||
default:
|
||||
se.showErrorDialog("Error", "Unsupported format: "+format)
|
||||
return
|
||||
}
|
||||
|
||||
// Write database
|
||||
err := writer.WriteDatabase(se.db)
|
||||
if err != nil {
|
||||
se.showErrorDialog("Save Error", fmt.Sprintf("Failed to save database: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Store save config
|
||||
se.saveConfig = &SaveConfig{
|
||||
TargetType: format,
|
||||
FilePath: filePath,
|
||||
}
|
||||
|
||||
// Show success
|
||||
se.showSuccessDialog("Save Complete", fmt.Sprintf("Successfully saved database to %s", filePath), func() {
|
||||
se.pages.RemovePage("save-database")
|
||||
se.pages.SwitchToPage("main")
|
||||
})
|
||||
}
|
||||
|
||||
// createNewDatabase creates a new empty database
|
||||
func (se *SchemaEditor) createNewDatabase() {
|
||||
// Create a new empty database
|
||||
se.db = &models.Database{
|
||||
Name: "New Database",
|
||||
Schemas: []*models.Schema{},
|
||||
}
|
||||
|
||||
// Clear load config
|
||||
se.loadConfig = nil
|
||||
|
||||
// Show success and switch to main menu
|
||||
se.showSuccessDialog("New Database", "Created new empty database", func() {
|
||||
se.pages.RemovePage("load-database")
|
||||
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||
})
|
||||
}
|
||||
|
||||
// showErrorDialog displays an error dialog
|
||||
func (se *SchemaEditor) showErrorDialog(_title, message string) {
|
||||
modal := tview.NewModal().
|
||||
SetText(message).
|
||||
AddButtons([]string{"OK"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
se.pages.RemovePage("error-dialog")
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("error-dialog")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("error-dialog", modal, true, true)
|
||||
}
|
||||
|
||||
// showSuccessDialog displays a success dialog
|
||||
func (se *SchemaEditor) showSuccessDialog(_title, message string, onClose func()) {
|
||||
modal := tview.NewModal().
|
||||
SetText(message).
|
||||
AddButtons([]string{"OK"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
se.pages.RemovePage("success-dialog")
|
||||
if onClose != nil {
|
||||
onClose()
|
||||
}
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("success-dialog")
|
||||
if onClose != nil {
|
||||
onClose()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("success-dialog", modal, true, true)
|
||||
}
|
||||
|
||||
// getLoadHelpText returns the help text for the load screen
|
||||
func getLoadHelpText() string {
|
||||
return `File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm
|
||||
Database formats: pgsql (requires connection string)
|
||||
|
||||
Examples:
|
||||
- File path: ~/schemas/mydb.dbml or /path/to/schema.json
|
||||
- Connection: postgres://user:pass@localhost/dbname`
|
||||
}
|
||||
|
||||
// showUpdateExistingDatabaseConfirm displays a confirmation dialog before updating existing database
|
||||
func (se *SchemaEditor) showUpdateExistingDatabaseConfirm() {
|
||||
// Use saveConfig if available, otherwise use loadConfig
|
||||
var targetType, targetPath string
|
||||
if se.saveConfig != nil {
|
||||
targetType = se.saveConfig.TargetType
|
||||
targetPath = se.saveConfig.FilePath
|
||||
} else if se.loadConfig != nil {
|
||||
targetType = se.loadConfig.SourceType
|
||||
targetPath = se.loadConfig.FilePath
|
||||
} else {
|
||||
return
|
||||
}
|
||||
|
||||
confirmText := fmt.Sprintf("Update existing database?\n\nFormat: %s\nPath: %s\n\nThis will overwrite the source.",
|
||||
targetType, targetPath)
|
||||
|
||||
modal := tview.NewModal().
|
||||
SetText(confirmText).
|
||||
AddButtons([]string{"Cancel", "Update"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
if buttonLabel == "Update" {
|
||||
se.pages.RemovePage("update-confirm")
|
||||
se.pages.RemovePage("save-database")
|
||||
se.saveDatabase(targetType, targetPath)
|
||||
se.pages.SwitchToPage("main")
|
||||
} else {
|
||||
se.pages.RemovePage("update-confirm")
|
||||
}
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("update-confirm")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddAndSwitchToPage("update-confirm", modal, true)
|
||||
}
|
||||
|
||||
// getSaveHelpText returns the help text for the save screen
|
||||
func getSaveHelpText() string {
|
||||
return `File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql (SQL export)
|
||||
|
||||
Examples:
|
||||
- File: ~/schemas/mydb.dbml
|
||||
- Directory (for code formats): ./models/`
|
||||
}
|
||||
|
||||
// showImportScreen displays the import/merge database screen
|
||||
func (se *SchemaEditor) showImportScreen() {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText("[::b]Import & Merge Database Schema").
|
||||
SetTextAlign(tview.AlignCenter).
|
||||
SetDynamicColors(true)
|
||||
|
||||
// Form
|
||||
form := tview.NewForm()
|
||||
form.SetBorder(true).SetTitle(" Import Configuration ").SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Format selection
|
||||
formatOptions := []string{
|
||||
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
|
||||
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
|
||||
}
|
||||
selectedFormat := 0
|
||||
currentFormat := formatOptions[selectedFormat]
|
||||
|
||||
// File path input
|
||||
filePath := ""
|
||||
connString := ""
|
||||
skipDomains := false
|
||||
skipRelations := false
|
||||
skipEnums := false
|
||||
skipViews := false
|
||||
skipSequences := false
|
||||
skipTables := ""
|
||||
|
||||
form.AddDropDown("Format", formatOptions, 0, func(option string, index int) {
|
||||
selectedFormat = index
|
||||
currentFormat = option
|
||||
})
|
||||
|
||||
form.AddInputField("File Path", "", 50, nil, func(value string) {
|
||||
filePath = value
|
||||
})
|
||||
|
||||
form.AddInputField("Connection String", "", 50, nil, func(value string) {
|
||||
connString = value
|
||||
})
|
||||
|
||||
form.AddInputField("Skip Tables (comma-separated)", "", 50, nil, func(value string) {
|
||||
skipTables = value
|
||||
})
|
||||
|
||||
form.AddCheckbox("Skip Domains", false, func(checked bool) {
|
||||
skipDomains = checked
|
||||
})
|
||||
|
||||
form.AddCheckbox("Skip Relations", false, func(checked bool) {
|
||||
skipRelations = checked
|
||||
})
|
||||
|
||||
form.AddCheckbox("Skip Enums", false, func(checked bool) {
|
||||
skipEnums = checked
|
||||
})
|
||||
|
||||
form.AddCheckbox("Skip Views", false, func(checked bool) {
|
||||
skipViews = checked
|
||||
})
|
||||
|
||||
form.AddCheckbox("Skip Sequences", false, func(checked bool) {
|
||||
skipSequences = checked
|
||||
})
|
||||
|
||||
form.AddTextView("Help", getImportHelpText(), 0, 7, true, false)
|
||||
|
||||
// Buttons
|
||||
form.AddButton("Import & Merge [i]", func() {
|
||||
se.importAndMergeDatabase(currentFormat, filePath, connString, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||
})
|
||||
|
||||
form.AddButton("Back [b]", func() {
|
||||
se.pages.RemovePage("import-database")
|
||||
se.pages.SwitchToPage("main")
|
||||
})
|
||||
|
||||
form.AddButton("Exit [q]", func() {
|
||||
se.app.Stop()
|
||||
})
|
||||
|
||||
// Keyboard shortcuts
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("import-database")
|
||||
se.pages.SwitchToPage("main")
|
||||
return nil
|
||||
}
|
||||
switch event.Rune() {
|
||||
case 'i':
|
||||
se.importAndMergeDatabase(currentFormat, filePath, connString, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||
return nil
|
||||
case 'b':
|
||||
se.pages.RemovePage("import-database")
|
||||
se.pages.SwitchToPage("main")
|
||||
return nil
|
||||
case 'q':
|
||||
se.app.Stop()
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(form, 0, 1, true)
|
||||
|
||||
se.pages.AddAndSwitchToPage("import-database", flex, true)
|
||||
}
|
||||
|
||||
// importAndMergeDatabase imports and merges a database from the specified configuration
|
||||
func (se *SchemaEditor) importAndMergeDatabase(format, filePath, connString string, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
|
||||
// Validate input
|
||||
if format == "pgsql" {
|
||||
if connString == "" {
|
||||
se.showErrorDialog("Error", "Connection string is required for PostgreSQL")
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if filePath == "" {
|
||||
se.showErrorDialog("Error", "File path is required for "+format)
|
||||
return
|
||||
}
|
||||
// Expand home directory
|
||||
if len(filePath) > 0 && filePath[0] == '~' {
|
||||
home, err := os.UserHomeDir()
|
||||
if err == nil {
|
||||
filePath = filepath.Join(home, filePath[1:])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create reader
|
||||
var reader readers.Reader
|
||||
switch format {
|
||||
case "dbml":
|
||||
reader = rdbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "dctx":
|
||||
reader = rdctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drawdb":
|
||||
reader = rdrawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "graphql":
|
||||
reader = rgraphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "json":
|
||||
reader = rjson.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "yaml":
|
||||
reader = ryaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "gorm":
|
||||
reader = rgorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "bun":
|
||||
reader = rbun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "drizzle":
|
||||
reader = rdrizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "prisma":
|
||||
reader = rprisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "typeorm":
|
||||
reader = rtypeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||
case "pgsql":
|
||||
reader = rpgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||
default:
|
||||
se.showErrorDialog("Error", "Unsupported format: "+format)
|
||||
return
|
||||
}
|
||||
|
||||
// Read the database to import
|
||||
importDb, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
se.showErrorDialog("Import Error", fmt.Sprintf("Failed to read database: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Show confirmation dialog
|
||||
se.showImportConfirmation(importDb, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||
}
|
||||
|
||||
// showImportConfirmation shows a confirmation dialog before merging
|
||||
func (se *SchemaEditor) showImportConfirmation(importDb *models.Database, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
|
||||
confirmText := fmt.Sprintf("Import & Merge Database?\n\nSource: %s\nTarget: %s\n\nThis will add missing schemas, tables, columns, and other objects from the source to your database.\n\nExisting items will NOT be modified.",
|
||||
importDb.Name, se.db.Name)
|
||||
|
||||
modal := tview.NewModal().
|
||||
SetText(confirmText).
|
||||
AddButtons([]string{"Cancel", "Merge"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
se.pages.RemovePage("import-confirm")
|
||||
if buttonLabel == "Merge" {
|
||||
se.performMerge(importDb, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
|
||||
}
|
||||
})
|
||||
|
||||
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("import-confirm")
|
||||
se.pages.SwitchToPage("import-database")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddAndSwitchToPage("import-confirm", modal, true)
|
||||
}
|
||||
|
||||
// performMerge performs the actual merge operation
|
||||
func (se *SchemaEditor) performMerge(importDb *models.Database, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
|
||||
// Create merge options
|
||||
opts := &merge.MergeOptions{
|
||||
SkipDomains: skipDomains,
|
||||
SkipRelations: skipRelations,
|
||||
SkipEnums: skipEnums,
|
||||
SkipViews: skipViews,
|
||||
SkipSequences: skipSequences,
|
||||
}
|
||||
|
||||
// Parse skip tables
|
||||
if skipTables != "" {
|
||||
opts.SkipTableNames = parseSkipTablesUI(skipTables)
|
||||
}
|
||||
|
||||
// Perform the merge
|
||||
result := merge.MergeDatabases(se.db, importDb, opts)
|
||||
|
||||
// Update the database timestamp
|
||||
se.db.UpdateDate()
|
||||
|
||||
// Show success dialog with summary
|
||||
summary := merge.GetMergeSummary(result)
|
||||
se.showSuccessDialog("Import Complete", summary, func() {
|
||||
se.pages.RemovePage("import-database")
|
||||
se.pages.RemovePage("main")
|
||||
se.pages.AddPage("main", se.createMainMenu(), true, true)
|
||||
})
|
||||
}
|
||||
|
||||
// getImportHelpText returns the help text for the import screen
|
||||
func getImportHelpText() string {
|
||||
return `Import & Merge: Adds missing schemas, tables, columns, and other objects to your existing database.
|
||||
|
||||
File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm
|
||||
Database formats: pgsql (requires connection string)
|
||||
|
||||
Skip options: Check to exclude specific object types from the merge.`
|
||||
}
|
||||
|
||||
func parseSkipTablesUI(skipTablesStr string) map[string]bool {
|
||||
skipTables := make(map[string]bool)
|
||||
if skipTablesStr == "" {
|
||||
return skipTables
|
||||
}
|
||||
|
||||
// Split by comma and trim whitespace
|
||||
parts := strings.Split(skipTablesStr, ",")
|
||||
for _, part := range parts {
|
||||
trimmed := strings.TrimSpace(part)
|
||||
if trimmed != "" {
|
||||
// Store in lowercase for case-insensitive matching
|
||||
skipTables[strings.ToLower(trimmed)] = true
|
||||
}
|
||||
}
|
||||
|
||||
return skipTables
|
||||
}
|
||||
65
pkg/ui/main_menu.go
Normal file
65
pkg/ui/main_menu.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
)
|
||||
|
||||
// createMainMenu creates the main menu screen
|
||||
func (se *SchemaEditor) createMainMenu() tview.Primitive {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title with database name
|
||||
dbName := se.db.Name
|
||||
if dbName == "" {
|
||||
dbName = "Untitled"
|
||||
}
|
||||
updateAtStr := ""
|
||||
if se.db.UpdatedAt != "" {
|
||||
updateAtStr = fmt.Sprintf("Updated @ %s", se.db.UpdatedAt)
|
||||
}
|
||||
titleText := fmt.Sprintf("[::b]RelSpec Schema Editor\n[::d]Database: %s %s\n[::d]Press arrow keys to navigate, Enter to select", dbName, updateAtStr)
|
||||
title := tview.NewTextView().
|
||||
SetText(titleText).
|
||||
SetDynamicColors(true)
|
||||
|
||||
// Menu options
|
||||
menu := tview.NewList().
|
||||
AddItem("Edit Database", "Edit database name, description, and properties", 'e', func() {
|
||||
se.showEditDatabaseForm()
|
||||
}).
|
||||
AddItem("Manage Schemas", "View, create, edit, and delete schemas", 's', func() {
|
||||
se.showSchemaList()
|
||||
}).
|
||||
AddItem("Manage Tables", "View and manage tables in schemas", 't', func() {
|
||||
se.showTableList()
|
||||
}).
|
||||
AddItem("Manage Domains", "View, create, edit, and delete domains", 'd', func() {
|
||||
se.showDomainList()
|
||||
}).
|
||||
AddItem("Import & Merge", "Import and merge schema from another database", 'i', func() {
|
||||
se.showImportScreen()
|
||||
}).
|
||||
AddItem("Save Database", "Save database to file or database", 'w', func() {
|
||||
se.showSaveScreen()
|
||||
}).
|
||||
AddItem("Exit Editor", "Exit the editor", 'q', func() {
|
||||
se.app.Stop()
|
||||
})
|
||||
|
||||
menu.SetBorder(true).SetTitle(" Menu ").SetTitleAlign(tview.AlignLeft)
|
||||
menu.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitEditorConfirm()
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 5, 0, false).
|
||||
AddItem(menu, 0, 1, true)
|
||||
|
||||
return flex
|
||||
}
|
||||
55
pkg/ui/schema_dataops.go
Normal file
55
pkg/ui/schema_dataops.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package ui
|
||||
|
||||
import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
|
||||
// Schema data operations - business logic for schema management
|
||||
|
||||
// CreateSchema creates a new schema and adds it to the database
|
||||
func (se *SchemaEditor) CreateSchema(name, description string) *models.Schema {
|
||||
newSchema := &models.Schema{
|
||||
Name: name,
|
||||
Description: description,
|
||||
Tables: make([]*models.Table, 0),
|
||||
Sequences: make([]*models.Sequence, 0),
|
||||
Enums: make([]*models.Enum, 0),
|
||||
}
|
||||
se.db.UpdateDate()
|
||||
se.db.Schemas = append(se.db.Schemas, newSchema)
|
||||
return newSchema
|
||||
}
|
||||
|
||||
// UpdateSchema updates an existing schema's properties
|
||||
func (se *SchemaEditor) UpdateSchema(schemaIndex int, name, owner, description string) {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return
|
||||
}
|
||||
se.db.UpdateDate()
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
schema.Name = name
|
||||
schema.Owner = owner
|
||||
schema.Description = description
|
||||
schema.UpdateDate()
|
||||
}
|
||||
|
||||
// DeleteSchema removes a schema from the database
|
||||
func (se *SchemaEditor) DeleteSchema(schemaIndex int) bool {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return false
|
||||
}
|
||||
se.db.UpdateDate()
|
||||
se.db.Schemas = append(se.db.Schemas[:schemaIndex], se.db.Schemas[schemaIndex+1:]...)
|
||||
return true
|
||||
}
|
||||
|
||||
// GetSchema returns a schema by index
|
||||
func (se *SchemaEditor) GetSchema(schemaIndex int) *models.Schema {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return nil
|
||||
}
|
||||
return se.db.Schemas[schemaIndex]
|
||||
}
|
||||
|
||||
// GetAllSchemas returns all schemas
|
||||
func (se *SchemaEditor) GetAllSchemas() []*models.Schema {
|
||||
return se.db.Schemas
|
||||
}
|
||||
362
pkg/ui/schema_screens.go
Normal file
362
pkg/ui/schema_screens.go
Normal file
@@ -0,0 +1,362 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// showSchemaList displays the schema management screen
|
||||
func (se *SchemaEditor) showSchemaList() {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText("[::b]Manage Schemas").
|
||||
SetDynamicColors(true).
|
||||
SetTextAlign(tview.AlignCenter)
|
||||
|
||||
// Create schemas table
|
||||
schemaTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
|
||||
|
||||
// Add header row with padding for full width
|
||||
headers := []string{"Name", "Sequence", "Total Tables", "Total Sequences", "Total Views", "GUID", "Description"}
|
||||
headerWidths := []int{20, 15, 20, 20, 15, 36} // Last column takes remaining space
|
||||
for i, header := range headers {
|
||||
padding := ""
|
||||
if i < len(headerWidths) {
|
||||
padding = strings.Repeat(" ", headerWidths[i]-len(header))
|
||||
}
|
||||
cell := tview.NewTableCell(header + padding).
|
||||
SetTextColor(tcell.ColorYellow).
|
||||
SetSelectable(false).
|
||||
SetAlign(tview.AlignLeft)
|
||||
schemaTable.SetCell(0, i, cell)
|
||||
}
|
||||
|
||||
// Add existing schemas
|
||||
for row, schema := range se.db.Schemas {
|
||||
schema := schema // capture for closure
|
||||
|
||||
// Name - pad to 20 chars
|
||||
nameStr := fmt.Sprintf("%-20s", schema.Name)
|
||||
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
|
||||
schemaTable.SetCell(row+1, 0, nameCell)
|
||||
|
||||
// Sequence - pad to 15 chars
|
||||
seqStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", schema.Sequence))
|
||||
seqCell := tview.NewTableCell(seqStr).SetSelectable(true)
|
||||
schemaTable.SetCell(row+1, 1, seqCell)
|
||||
|
||||
// Total Tables - pad to 20 chars
|
||||
tablesStr := fmt.Sprintf("%-20s", fmt.Sprintf("%d", len(schema.Tables)))
|
||||
tablesCell := tview.NewTableCell(tablesStr).SetSelectable(true)
|
||||
schemaTable.SetCell(row+1, 2, tablesCell)
|
||||
|
||||
// Total Sequences - pad to 20 chars
|
||||
sequencesStr := fmt.Sprintf("%-20s", fmt.Sprintf("%d", len(schema.Sequences)))
|
||||
sequencesCell := tview.NewTableCell(sequencesStr).SetSelectable(true)
|
||||
schemaTable.SetCell(row+1, 3, sequencesCell)
|
||||
|
||||
// Total Views - pad to 15 chars
|
||||
viewsStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", len(schema.Views)))
|
||||
viewsCell := tview.NewTableCell(viewsStr).SetSelectable(true)
|
||||
schemaTable.SetCell(row+1, 4, viewsCell)
|
||||
|
||||
// GUID - pad to 36 chars
|
||||
guidStr := fmt.Sprintf("%-36s", schema.GUID)
|
||||
guidCell := tview.NewTableCell(guidStr).SetSelectable(true)
|
||||
schemaTable.SetCell(row+1, 5, guidCell)
|
||||
|
||||
// Description - no padding, takes remaining space
|
||||
descCell := tview.NewTableCell(schema.Description).SetSelectable(true)
|
||||
schemaTable.SetCell(row+1, 6, descCell)
|
||||
}
|
||||
|
||||
schemaTable.SetTitle(" Schemas ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Action buttons flex (define before input capture)
|
||||
btnFlex := tview.NewFlex()
|
||||
btnNewSchema := tview.NewButton("New Schema [n]").SetSelectedFunc(func() {
|
||||
se.showNewSchemaDialog()
|
||||
})
|
||||
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("schemas")
|
||||
})
|
||||
|
||||
// Set up button input captures for Tab/Shift+Tab navigation
|
||||
btnNewSchema.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(schemaTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnBack)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnNewSchema)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(schemaTable)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnFlex.AddItem(btnNewSchema, 0, 1, true).
|
||||
AddItem(btnBack, 0, 1, false)
|
||||
|
||||
schemaTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("schemas")
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnNewSchema)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyEnter {
|
||||
row, _ := schemaTable.GetSelection()
|
||||
if row > 0 && row <= len(se.db.Schemas) { // Skip header row
|
||||
schemaIndex := row - 1
|
||||
se.showSchemaEditor(schemaIndex, se.db.Schemas[schemaIndex])
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if event.Rune() == 'n' {
|
||||
se.showNewSchemaDialog()
|
||||
return nil
|
||||
}
|
||||
if event.Rune() == 'b' {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("schemas")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(schemaTable, 0, 1, true).
|
||||
AddItem(btnFlex, 1, 0, false)
|
||||
|
||||
se.pages.AddPage("schemas", flex, true, true)
|
||||
}
|
||||
|
||||
// showSchemaEditor shows the editor for a specific schema
|
||||
func (se *SchemaEditor) showSchemaEditor(index int, schema *models.Schema) {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText(fmt.Sprintf("[::b]Schema: %s", schema.Name)).
|
||||
SetDynamicColors(true).
|
||||
SetTextAlign(tview.AlignCenter)
|
||||
|
||||
// Schema info display
|
||||
info := tview.NewTextView().SetDynamicColors(true)
|
||||
info.SetText(fmt.Sprintf("Tables: %d | Description: %s",
|
||||
len(schema.Tables), schema.Description))
|
||||
|
||||
// Table list
|
||||
tableList := tview.NewList().ShowSecondaryText(true)
|
||||
|
||||
for i, table := range schema.Tables {
|
||||
tableIndex := i
|
||||
table := table
|
||||
colCount := len(table.Columns)
|
||||
tableList.AddItem(table.Name, fmt.Sprintf("%d columns", colCount), rune('0'+i), func() {
|
||||
se.showTableEditor(index, tableIndex, table)
|
||||
})
|
||||
}
|
||||
|
||||
tableList.AddItem("[New Table]", "Add a new table to this schema", 'n', func() {
|
||||
se.showNewTableDialog(index)
|
||||
})
|
||||
|
||||
tableList.AddItem("[Edit Schema Info]", "Edit schema properties", 'e', func() {
|
||||
se.showEditSchemaDialog(index)
|
||||
})
|
||||
|
||||
tableList.AddItem("[Delete Schema]", "Delete this schema", 'd', func() {
|
||||
se.showDeleteSchemaConfirm(index)
|
||||
})
|
||||
|
||||
tableList.SetBorder(true).SetTitle(" Tables ").SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Action buttons (define before input capture)
|
||||
btnFlex := tview.NewFlex()
|
||||
btnNewTable := tview.NewButton("New Table [n]").SetSelectedFunc(func() {
|
||||
se.showNewTableDialog(index)
|
||||
})
|
||||
btnBack := tview.NewButton("Back to Schemas [b]").SetSelectedFunc(func() {
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.pages.SwitchToPage("schemas")
|
||||
})
|
||||
|
||||
// Set up button input captures for Tab/Shift+Tab navigation
|
||||
btnNewTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(tableList)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnBack)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnNewTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(tableList)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
tableList.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.pages.SwitchToPage("schemas")
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnNewTable)
|
||||
return nil
|
||||
}
|
||||
if event.Rune() == 'b' {
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.pages.SwitchToPage("schemas")
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnFlex.AddItem(btnNewTable, 0, 1, true).
|
||||
AddItem(btnBack, 0, 1, false)
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(info, 2, 0, false).
|
||||
AddItem(tableList, 0, 1, true).
|
||||
AddItem(btnFlex, 1, 0, false)
|
||||
|
||||
se.pages.AddPage("schema-editor", flex, true, true)
|
||||
}
|
||||
|
||||
// showNewSchemaDialog shows dialog to create a new schema
|
||||
func (se *SchemaEditor) showNewSchemaDialog() {
|
||||
form := tview.NewForm()
|
||||
|
||||
schemaName := ""
|
||||
description := ""
|
||||
|
||||
form.AddInputField("Schema Name", "", 40, nil, func(value string) {
|
||||
schemaName = value
|
||||
})
|
||||
|
||||
form.AddInputField("Description", "", 40, nil, func(value string) {
|
||||
description = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
if schemaName == "" {
|
||||
return
|
||||
}
|
||||
|
||||
se.CreateSchema(schemaName, description)
|
||||
|
||||
se.pages.RemovePage("new-schema")
|
||||
se.pages.RemovePage("schemas")
|
||||
se.showSchemaList()
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
se.pages.RemovePage("new-schema")
|
||||
se.pages.RemovePage("schemas")
|
||||
se.showSchemaList()
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" New Schema ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("new-schema", "schemas")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("new-schema", form, true, true)
|
||||
}
|
||||
|
||||
// showEditSchemaDialog shows dialog to edit schema properties
|
||||
func (se *SchemaEditor) showEditSchemaDialog(schemaIndex int) {
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
form := tview.NewForm()
|
||||
|
||||
// Local variables to collect changes
|
||||
newName := schema.Name
|
||||
newOwner := schema.Owner
|
||||
newDescription := schema.Description
|
||||
newGUID := schema.GUID
|
||||
|
||||
form.AddInputField("Schema Name", schema.Name, 40, nil, func(value string) {
|
||||
newName = value
|
||||
})
|
||||
|
||||
form.AddInputField("Owner", schema.Owner, 40, nil, func(value string) {
|
||||
newOwner = value
|
||||
})
|
||||
|
||||
form.AddTextArea("Description", schema.Description, 40, 5, 0, func(value string) {
|
||||
newDescription = value
|
||||
})
|
||||
|
||||
form.AddInputField("GUID", schema.GUID, 40, nil, func(value string) {
|
||||
newGUID = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
// Apply changes using dataops
|
||||
se.UpdateSchema(schemaIndex, newName, newOwner, newDescription)
|
||||
se.db.Schemas[schemaIndex].GUID = newGUID
|
||||
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
se.pages.RemovePage("edit-schema")
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.showSchemaEditor(schemaIndex, schema)
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
// Discard changes - don't apply them
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
se.pages.RemovePage("edit-schema")
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.showSchemaEditor(schemaIndex, schema)
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" Edit Schema ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("edit-schema", "schema-editor")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("edit-schema", form, true, true)
|
||||
}
|
||||
88
pkg/ui/table_dataops.go
Normal file
88
pkg/ui/table_dataops.go
Normal file
@@ -0,0 +1,88 @@
|
||||
package ui
|
||||
|
||||
import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
|
||||
// Table data operations - business logic for table management
|
||||
|
||||
// CreateTable creates a new table and adds it to a schema
|
||||
func (se *SchemaEditor) CreateTable(schemaIndex int, name, description string) *models.Table {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return nil
|
||||
}
|
||||
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
newTable := &models.Table{
|
||||
Name: name,
|
||||
Schema: schema.Name,
|
||||
Description: description,
|
||||
Columns: make(map[string]*models.Column),
|
||||
Constraints: make(map[string]*models.Constraint),
|
||||
Indexes: make(map[string]*models.Index),
|
||||
}
|
||||
schema.UpdateDate()
|
||||
schema.Tables = append(schema.Tables, newTable)
|
||||
return newTable
|
||||
}
|
||||
|
||||
// UpdateTable updates an existing table's properties
|
||||
func (se *SchemaEditor) UpdateTable(schemaIndex, tableIndex int, name, description string) {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return
|
||||
}
|
||||
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||
return
|
||||
}
|
||||
schema.UpdateDate()
|
||||
table := schema.Tables[tableIndex]
|
||||
table.Name = name
|
||||
table.Description = description
|
||||
table.UpdateDate()
|
||||
}
|
||||
|
||||
// DeleteTable removes a table from a schema
|
||||
func (se *SchemaEditor) DeleteTable(schemaIndex, tableIndex int) bool {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return false
|
||||
}
|
||||
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||
return false
|
||||
}
|
||||
schema.UpdateDate()
|
||||
schema.Tables = append(schema.Tables[:tableIndex], schema.Tables[tableIndex+1:]...)
|
||||
return true
|
||||
}
|
||||
|
||||
// GetTable returns a table by schema and table index
|
||||
func (se *SchemaEditor) GetTable(schemaIndex, tableIndex int) *models.Table {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return nil
|
||||
}
|
||||
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return schema.Tables[tableIndex]
|
||||
}
|
||||
|
||||
// GetAllTables returns all tables across all schemas
|
||||
func (se *SchemaEditor) GetAllTables() []*models.Table {
|
||||
var tables []*models.Table
|
||||
for _, schema := range se.db.Schemas {
|
||||
tables = append(tables, schema.Tables...)
|
||||
}
|
||||
return tables
|
||||
}
|
||||
|
||||
// GetTablesInSchema returns all tables in a specific schema
|
||||
func (se *SchemaEditor) GetTablesInSchema(schemaIndex int) []*models.Table {
|
||||
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
|
||||
return nil
|
||||
}
|
||||
return se.db.Schemas[schemaIndex].Tables
|
||||
}
|
||||
546
pkg/ui/table_screens.go
Normal file
546
pkg/ui/table_screens.go
Normal file
@@ -0,0 +1,546 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
"github.com/rivo/tview"
|
||||
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
)
|
||||
|
||||
// showTableList displays all tables across all schemas
|
||||
func (se *SchemaEditor) showTableList() {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText("[::b]All Tables").
|
||||
SetDynamicColors(true).
|
||||
SetTextAlign(tview.AlignCenter)
|
||||
|
||||
// Create tables table
|
||||
tableTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
|
||||
|
||||
// Add header row with padding for full width
|
||||
headers := []string{"Name", "Schema", "Sequence", "Total Columns", "Total Relations", "Total Indexes", "GUID", "Description", "Comment"}
|
||||
headerWidths := []int{18, 15, 12, 14, 15, 14, 36, 0, 12} // Description gets remainder
|
||||
for i, header := range headers {
|
||||
padding := ""
|
||||
if i < len(headerWidths) && headerWidths[i] > 0 {
|
||||
padding = strings.Repeat(" ", headerWidths[i]-len(header))
|
||||
}
|
||||
cell := tview.NewTableCell(header + padding).
|
||||
SetTextColor(tcell.ColorYellow).
|
||||
SetSelectable(false).
|
||||
SetAlign(tview.AlignLeft)
|
||||
tableTable.SetCell(0, i, cell)
|
||||
}
|
||||
|
||||
var tables []*models.Table
|
||||
var tableLocations []struct{ schemaIdx, tableIdx int }
|
||||
|
||||
for si, schema := range se.db.Schemas {
|
||||
for ti, table := range schema.Tables {
|
||||
tables = append(tables, table)
|
||||
tableLocations = append(tableLocations, struct{ schemaIdx, tableIdx int }{si, ti})
|
||||
}
|
||||
}
|
||||
|
||||
for row, table := range tables {
|
||||
tableIdx := tableLocations[row]
|
||||
schema := se.db.Schemas[tableIdx.schemaIdx]
|
||||
|
||||
// Name - pad to 18 chars
|
||||
nameStr := fmt.Sprintf("%-18s", table.Name)
|
||||
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 0, nameCell)
|
||||
|
||||
// Schema - pad to 15 chars
|
||||
schemaStr := fmt.Sprintf("%-15s", schema.Name)
|
||||
schemaCell := tview.NewTableCell(schemaStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 1, schemaCell)
|
||||
|
||||
// Sequence - pad to 12 chars
|
||||
seqStr := fmt.Sprintf("%-12s", fmt.Sprintf("%d", table.Sequence))
|
||||
seqCell := tview.NewTableCell(seqStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 2, seqCell)
|
||||
|
||||
// Total Columns - pad to 14 chars
|
||||
colsStr := fmt.Sprintf("%-14s", fmt.Sprintf("%d", len(table.Columns)))
|
||||
colsCell := tview.NewTableCell(colsStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 3, colsCell)
|
||||
|
||||
// Total Relations - pad to 15 chars
|
||||
relsStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", len(table.Relationships)))
|
||||
relsCell := tview.NewTableCell(relsStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 4, relsCell)
|
||||
|
||||
// Total Indexes - pad to 14 chars
|
||||
idxStr := fmt.Sprintf("%-14s", fmt.Sprintf("%d", len(table.Indexes)))
|
||||
idxCell := tview.NewTableCell(idxStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 5, idxCell)
|
||||
|
||||
// GUID - pad to 36 chars
|
||||
guidStr := fmt.Sprintf("%-36s", table.GUID)
|
||||
guidCell := tview.NewTableCell(guidStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 6, guidCell)
|
||||
|
||||
// Description - no padding, takes remaining space
|
||||
descCell := tview.NewTableCell(table.Description).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 7, descCell)
|
||||
|
||||
// Comment - pad to 12 chars
|
||||
commentStr := fmt.Sprintf("%-12s", table.Comment)
|
||||
commentCell := tview.NewTableCell(commentStr).SetSelectable(true)
|
||||
tableTable.SetCell(row+1, 8, commentCell)
|
||||
}
|
||||
|
||||
tableTable.SetTitle(" All Tables ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Action buttons (define before input capture)
|
||||
btnFlex := tview.NewFlex()
|
||||
btnNewTable := tview.NewButton("New Table [n]").SetSelectedFunc(func() {
|
||||
se.showNewTableDialogFromList()
|
||||
})
|
||||
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("tables")
|
||||
})
|
||||
|
||||
// Set up button input captures for Tab/Shift+Tab navigation
|
||||
btnNewTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(tableTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnBack)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnNewTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(tableTable)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnFlex.AddItem(btnNewTable, 0, 1, true).
|
||||
AddItem(btnBack, 0, 1, false)
|
||||
|
||||
tableTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("tables")
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnNewTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyEnter {
|
||||
row, _ := tableTable.GetSelection()
|
||||
if row > 0 && row <= len(tables) { // Skip header row
|
||||
tableIdx := tableLocations[row-1]
|
||||
se.showTableEditor(tableIdx.schemaIdx, tableIdx.tableIdx, tables[row-1])
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if event.Rune() == 'n' {
|
||||
se.showNewTableDialogFromList()
|
||||
return nil
|
||||
}
|
||||
if event.Rune() == 'b' {
|
||||
se.pages.SwitchToPage("main")
|
||||
se.pages.RemovePage("tables")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(tableTable, 0, 1, true).
|
||||
AddItem(btnFlex, 1, 0, false)
|
||||
|
||||
se.pages.AddPage("tables", flex, true, true)
|
||||
}
|
||||
|
||||
// showTableEditor shows editor for a specific table
|
||||
func (se *SchemaEditor) showTableEditor(schemaIndex, tableIndex int, table *models.Table) {
|
||||
flex := tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
|
||||
// Title
|
||||
title := tview.NewTextView().
|
||||
SetText(fmt.Sprintf("[::b]Table: %s", table.Name)).
|
||||
SetDynamicColors(true).
|
||||
SetTextAlign(tview.AlignCenter)
|
||||
|
||||
// Table info
|
||||
info := tview.NewTextView().SetDynamicColors(true)
|
||||
info.SetText(fmt.Sprintf("Schema: %s | Columns: %d | Description: %s",
|
||||
table.Schema, len(table.Columns), table.Description))
|
||||
|
||||
// Create columns table
|
||||
colTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
|
||||
|
||||
// Add header row with padding for full width
|
||||
headers := []string{"Name", "Type", "Default", "KeyType", "GUID", "Description"}
|
||||
headerWidths := []int{20, 18, 15, 15, 36} // Last column takes remaining space
|
||||
for i, header := range headers {
|
||||
padding := ""
|
||||
if i < len(headerWidths) {
|
||||
padding = strings.Repeat(" ", headerWidths[i]-len(header))
|
||||
}
|
||||
cell := tview.NewTableCell(header + padding).
|
||||
SetTextColor(tcell.ColorYellow).
|
||||
SetSelectable(false).
|
||||
SetAlign(tview.AlignLeft)
|
||||
colTable.SetCell(0, i, cell)
|
||||
}
|
||||
|
||||
// Get sorted column names
|
||||
columnNames := getColumnNames(table)
|
||||
for row, colName := range columnNames {
|
||||
column := table.Columns[colName]
|
||||
|
||||
// Name - pad to 20 chars
|
||||
nameStr := fmt.Sprintf("%-20s", colName)
|
||||
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
|
||||
colTable.SetCell(row+1, 0, nameCell)
|
||||
|
||||
// Type - pad to 18 chars
|
||||
typeStr := fmt.Sprintf("%-18s", column.Type)
|
||||
typeCell := tview.NewTableCell(typeStr).SetSelectable(true)
|
||||
colTable.SetCell(row+1, 1, typeCell)
|
||||
|
||||
// Default - pad to 15 chars
|
||||
defaultStr := ""
|
||||
if column.Default != nil {
|
||||
defaultStr = fmt.Sprintf("%v", column.Default)
|
||||
}
|
||||
defaultStr = fmt.Sprintf("%-15s", defaultStr)
|
||||
defaultCell := tview.NewTableCell(defaultStr).SetSelectable(true)
|
||||
colTable.SetCell(row+1, 2, defaultCell)
|
||||
|
||||
// KeyType - pad to 15 chars
|
||||
keyTypeStr := ""
|
||||
if column.IsPrimaryKey {
|
||||
keyTypeStr = "PRIMARY"
|
||||
} else if column.NotNull {
|
||||
keyTypeStr = "NOT NULL"
|
||||
}
|
||||
keyTypeStr = fmt.Sprintf("%-15s", keyTypeStr)
|
||||
keyTypeCell := tview.NewTableCell(keyTypeStr).SetSelectable(true)
|
||||
colTable.SetCell(row+1, 3, keyTypeCell)
|
||||
|
||||
// GUID - pad to 36 chars
|
||||
guidStr := fmt.Sprintf("%-36s", column.GUID)
|
||||
guidCell := tview.NewTableCell(guidStr).SetSelectable(true)
|
||||
colTable.SetCell(row+1, 4, guidCell)
|
||||
|
||||
// Description
|
||||
descCell := tview.NewTableCell(column.Description).SetSelectable(true)
|
||||
colTable.SetCell(row+1, 5, descCell)
|
||||
}
|
||||
|
||||
colTable.SetTitle(" Columns ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
|
||||
|
||||
// Action buttons flex (define before input capture)
|
||||
btnFlex := tview.NewFlex()
|
||||
btnNewCol := tview.NewButton("Add Column [n]").SetSelectedFunc(func() {
|
||||
se.showNewColumnDialog(schemaIndex, tableIndex)
|
||||
})
|
||||
btnEditTable := tview.NewButton("Edit Table [e]").SetSelectedFunc(func() {
|
||||
se.showEditTableDialog(schemaIndex, tableIndex)
|
||||
})
|
||||
btnEditColumn := tview.NewButton("Edit Column [c]").SetSelectedFunc(func() {
|
||||
row, _ := colTable.GetSelection()
|
||||
if row > 0 && row <= len(columnNames) { // Skip header row
|
||||
colName := columnNames[row-1]
|
||||
column := table.Columns[colName]
|
||||
se.showColumnEditor(schemaIndex, tableIndex, row-1, column)
|
||||
}
|
||||
})
|
||||
btnDelTable := tview.NewButton("Delete Table [d]").SetSelectedFunc(func() {
|
||||
se.showDeleteTableConfirm(schemaIndex, tableIndex)
|
||||
})
|
||||
btnBack := tview.NewButton("Back to Schema [b]").SetSelectedFunc(func() {
|
||||
se.pages.RemovePage("table-editor")
|
||||
se.pages.SwitchToPage("schema-editor")
|
||||
})
|
||||
|
||||
// Set up button input captures for Tab/Shift+Tab navigation
|
||||
btnNewCol.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(colTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnEditColumn)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnEditColumn.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnNewCol)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnEditTable)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnEditTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnEditColumn)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnDelTable)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnDelTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnEditTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnBack)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyBacktab {
|
||||
se.app.SetFocus(btnDelTable)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(colTable)
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
btnFlex.AddItem(btnNewCol, 0, 1, true).
|
||||
AddItem(btnEditColumn, 0, 1, false).
|
||||
AddItem(btnEditTable, 0, 1, false).
|
||||
AddItem(btnDelTable, 0, 1, false).
|
||||
AddItem(btnBack, 0, 1, false)
|
||||
|
||||
colTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.pages.SwitchToPage("schema-editor")
|
||||
se.pages.RemovePage("table-editor")
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyTab {
|
||||
se.app.SetFocus(btnNewCol)
|
||||
return nil
|
||||
}
|
||||
if event.Key() == tcell.KeyEnter {
|
||||
row, _ := colTable.GetSelection()
|
||||
if row > 0 { // Skip header row
|
||||
colName := columnNames[row-1]
|
||||
column := table.Columns[colName]
|
||||
se.showColumnEditor(schemaIndex, tableIndex, row-1, column)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if event.Rune() == 'c' {
|
||||
row, _ := colTable.GetSelection()
|
||||
if row > 0 && row <= len(columnNames) { // Skip header row
|
||||
colName := columnNames[row-1]
|
||||
column := table.Columns[colName]
|
||||
se.showColumnEditor(schemaIndex, tableIndex, row-1, column)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if event.Rune() == 'b' {
|
||||
se.pages.RemovePage("table-editor")
|
||||
se.pages.SwitchToPage("schema-editor")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
flex.AddItem(title, 1, 0, false).
|
||||
AddItem(info, 2, 0, false).
|
||||
AddItem(colTable, 0, 1, true).
|
||||
AddItem(btnFlex, 1, 0, false)
|
||||
|
||||
se.pages.AddPage("table-editor", flex, true, true)
|
||||
}
|
||||
|
||||
// showNewTableDialog shows dialog to create a new table
|
||||
func (se *SchemaEditor) showNewTableDialog(schemaIndex int) {
|
||||
form := tview.NewForm()
|
||||
|
||||
tableName := ""
|
||||
description := ""
|
||||
|
||||
form.AddInputField("Table Name", "", 40, nil, func(value string) {
|
||||
tableName = value
|
||||
})
|
||||
|
||||
form.AddInputField("Description", "", 40, nil, func(value string) {
|
||||
description = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
if tableName == "" {
|
||||
return
|
||||
}
|
||||
|
||||
se.CreateTable(schemaIndex, tableName, description)
|
||||
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
se.pages.RemovePage("new-table")
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.showSchemaEditor(schemaIndex, schema)
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
schema := se.db.Schemas[schemaIndex]
|
||||
se.pages.RemovePage("new-table")
|
||||
se.pages.RemovePage("schema-editor")
|
||||
se.showSchemaEditor(schemaIndex, schema)
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" New Table ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("new-table", "schema-editor")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("new-table", form, true, true)
|
||||
}
|
||||
|
||||
// showNewTableDialogFromList shows dialog to create a new table with schema selection
|
||||
func (se *SchemaEditor) showNewTableDialogFromList() {
|
||||
form := tview.NewForm()
|
||||
|
||||
tableName := ""
|
||||
description := ""
|
||||
selectedSchemaIdx := 0
|
||||
|
||||
// Create schema dropdown options
|
||||
schemaOptions := make([]string, len(se.db.Schemas))
|
||||
for i, schema := range se.db.Schemas {
|
||||
schemaOptions[i] = schema.Name
|
||||
}
|
||||
|
||||
form.AddInputField("Table Name", "", 40, nil, func(value string) {
|
||||
tableName = value
|
||||
})
|
||||
|
||||
form.AddDropDown("Schema", schemaOptions, 0, func(option string, optionIndex int) {
|
||||
selectedSchemaIdx = optionIndex
|
||||
})
|
||||
|
||||
form.AddInputField("Description", "", 40, nil, func(value string) {
|
||||
description = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
if tableName == "" {
|
||||
return
|
||||
}
|
||||
|
||||
se.CreateTable(selectedSchemaIdx, tableName, description)
|
||||
|
||||
se.pages.RemovePage("new-table-from-list")
|
||||
se.pages.RemovePage("tables")
|
||||
se.showTableList()
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
se.pages.RemovePage("new-table-from-list")
|
||||
se.pages.RemovePage("tables")
|
||||
se.showTableList()
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" New Table ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("new-table-from-list", "tables")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("new-table-from-list", form, true, true)
|
||||
}
|
||||
|
||||
// showEditTableDialog shows dialog to edit table properties
|
||||
func (se *SchemaEditor) showEditTableDialog(schemaIndex, tableIndex int) {
|
||||
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||
form := tview.NewForm()
|
||||
|
||||
// Local variables to collect changes
|
||||
newName := table.Name
|
||||
newDescription := table.Description
|
||||
newGUID := table.GUID
|
||||
|
||||
form.AddInputField("Table Name", table.Name, 40, nil, func(value string) {
|
||||
newName = value
|
||||
})
|
||||
|
||||
form.AddTextArea("Description", table.Description, 40, 5, 0, func(value string) {
|
||||
newDescription = value
|
||||
})
|
||||
|
||||
form.AddInputField("GUID", table.GUID, 40, nil, func(value string) {
|
||||
newGUID = value
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
// Apply changes using dataops
|
||||
se.UpdateTable(schemaIndex, tableIndex, newName, newDescription)
|
||||
se.db.Schemas[schemaIndex].Tables[tableIndex].GUID = newGUID
|
||||
|
||||
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||
se.pages.RemovePage("edit-table")
|
||||
se.pages.RemovePage("table-editor")
|
||||
se.showTableEditor(schemaIndex, tableIndex, table)
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
// Discard changes - don't apply them
|
||||
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
|
||||
se.pages.RemovePage("edit-table")
|
||||
se.pages.RemovePage("table-editor")
|
||||
se.showTableEditor(schemaIndex, tableIndex, table)
|
||||
})
|
||||
|
||||
form.SetBorder(true).SetTitle(" Edit Table ").SetTitleAlign(tview.AlignLeft)
|
||||
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
if event.Key() == tcell.KeyEscape {
|
||||
se.showExitConfirmation("edit-table", "table-editor")
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
})
|
||||
|
||||
se.pages.AddPage("edit-table", form, true, true)
|
||||
}
|
||||
299
pkg/ui/ui_rules.md
Normal file
299
pkg/ui/ui_rules.md
Normal file
@@ -0,0 +1,299 @@
|
||||
# UI Rules and Guidelines
|
||||
|
||||
## Layout Requirements
|
||||
|
||||
All layouts / forms must be in seperate files regarding their domain or entity.
|
||||
|
||||
### Screen Layout Structure
|
||||
|
||||
All screens must follow this consistent layout:
|
||||
|
||||
1. **Title at the Top** (1 row, fixed height)
|
||||
- Centered bold text: `[::b]Title Text`
|
||||
- Use `tview.NewTextView()` with `SetTextAlign(tview.AlignCenter)`
|
||||
- Enable dynamic colors: `SetDynamicColors(true)`
|
||||
|
||||
2. **Content in the Middle** (flexible height)
|
||||
- Tables, lists, forms, or info displays
|
||||
- Uses flex weight of 1 for dynamic sizing
|
||||
|
||||
3. **Action Buttons at the Bottom** (1 row, fixed height)
|
||||
- Must be in a horizontal flex container
|
||||
- Action buttons before Back button
|
||||
- Back button is always last
|
||||
|
||||
### Form Layout Structure
|
||||
|
||||
All forms must follow this button order:
|
||||
|
||||
1. **Save Button** (always first)
|
||||
- Label: "Save"
|
||||
- Primary action that commits changes
|
||||
|
||||
2. **Delete Button** (optional, only for edit forms)
|
||||
- Label: "Delete"
|
||||
- Only shown when editing existing items (not for new items)
|
||||
- Must show confirmation dialog before deletion
|
||||
|
||||
3. **Back Button** (always last)
|
||||
- Label: "Back"
|
||||
- Returns to previous screen without saving
|
||||
|
||||
**Button Order Examples:**
|
||||
- **New Item Forms:** Save, Back
|
||||
- **Edit Item Forms:** Save, Delete, Back
|
||||
|
||||
## Tab Navigation
|
||||
|
||||
All screens must implement circular tab navigation:
|
||||
|
||||
1. **Tab Key** - Moves focus to the next focusable element
|
||||
2. **Shift+Tab (BackTab)** - Moves focus to the previous focusable element
|
||||
3. **At the End** - Tab cycles back to the first element
|
||||
4. **At the Start** - Shift+Tab cycles back to the last element
|
||||
|
||||
**Navigation Flow Pattern:**
|
||||
- Each widget must handle both Tab and BackTab
|
||||
- First widget: BackTab → Last widget, Tab → Second widget
|
||||
- Middle widgets: BackTab → Previous widget, Tab → Next widget
|
||||
- Last widget: BackTab → Previous widget, Tab → First widget
|
||||
|
||||
## Keyboard Shortcuts
|
||||
|
||||
### Standard Keys
|
||||
|
||||
- **ESC** - Cancel current operation or go back to previous screen
|
||||
- **Tab** - Move focus forward (circular)
|
||||
- **Shift+Tab** - Move focus backward (circular)
|
||||
- **Enter** - Activate/select current item in tables and lists
|
||||
|
||||
### Letter Key Shortcuts
|
||||
|
||||
- **'n'** - New (create new item)
|
||||
- **'b'** - Back (return to previous screen)
|
||||
- **'e'** - Edit (edit current item)
|
||||
- **'d'** - Delete (delete current item)
|
||||
- **'c'** - Edit Column (in table editor)
|
||||
- **'s'** - Manage Schemas (in main menu)
|
||||
- **'t'** - Manage Tables (in main menu)
|
||||
- **'q'** - Quit/Exit (in main menu)
|
||||
|
||||
## Consistency Requirements
|
||||
|
||||
1. **Layout Structure** - All screens: Title (top) → Content (middle) → Buttons (bottom)
|
||||
2. **Title Format** - Bold (`[::b]`), centered, dynamic colors enabled
|
||||
3. **Tables** - Fixed headers (row 0), borders enabled, selectable rows
|
||||
4. **Buttons** - Include keyboard shortcuts in labels (e.g., "Back [b]")
|
||||
5. **Forms** - Button order: Save, Delete (if edit), Back
|
||||
6. **Destructive Actions** - Always show confirmation dialogs
|
||||
7. **ESC Key** - All screens support ESC to go back
|
||||
8. **Action Buttons** - Positioned before Back button, in logical order
|
||||
9. **Data Refresh** - Always refresh the previous screen when returning from a form or dialog
|
||||
|
||||
## Widget Naming Conventions
|
||||
|
||||
- **Tables:** `schemaTable`, `tableTable`, `colTable`
|
||||
- **Buttons:** Prefix with `btn` (e.g., `btnBack`, `btnDelete`, `btnNewSchema`)
|
||||
- **Flex containers:** `btnFlex` for button containers, `flex` for main layout
|
||||
- **Forms:** `form`
|
||||
- **Lists:** `list`, `tableList`
|
||||
- **Text views:** `title`, `info`
|
||||
- Use camelCase for all variable names
|
||||
|
||||
## Page Naming Conventions
|
||||
|
||||
Use descriptive kebab-case names:
|
||||
|
||||
- **Main screens:** `main`, `schemas`, `tables`, `schema-editor`, `table-editor`, `column-editor`
|
||||
- **Load/Save screens:** `load-database`, `save-database`
|
||||
- **Creation dialogs:** `new-schema`, `new-table`, `new-column`, `new-table-from-list`
|
||||
- **Edit dialogs:** `edit-schema`, `edit-table`
|
||||
- **Confirmations:** `confirm-delete-schema`, `confirm-delete-table`, `confirm-delete-column`
|
||||
- **Exit confirmations:** `exit-confirm`, `exit-editor-confirm`
|
||||
- **Status dialogs:** `error-dialog`, `success-dialog`
|
||||
|
||||
## Dialog and Confirmation Rules
|
||||
|
||||
### Confirmation Dialogs
|
||||
|
||||
1. **Delete Confirmations** - Required for all destructive actions
|
||||
- Show item name in confirmation text
|
||||
- Buttons: "Cancel", "Delete"
|
||||
- ESC key dismisses dialog
|
||||
|
||||
2. **Exit Confirmations** - Required when exiting forms with potential unsaved changes
|
||||
- Text: "Exit without saving changes?"
|
||||
- Buttons: "Cancel", "No, exit without saving"
|
||||
- ESC key confirms exit
|
||||
|
||||
3. **Save Confirmations** - Optional, based on context
|
||||
- Use for critical data changes
|
||||
- Clear description of what will be saved
|
||||
|
||||
### Dialog Behavior
|
||||
|
||||
- All dialogs must capture ESC key for dismissal
|
||||
- Modal dialogs overlay current screen
|
||||
- Confirmation dialogs use `tview.NewModal()`
|
||||
- Remove dialog page after action completes
|
||||
|
||||
## Data Refresh Rules
|
||||
|
||||
When returning from any form or dialog, the previous screen must be refreshed to show updated data. If Tables exists in the screen, their data must be updated:
|
||||
|
||||
1. **After Save** - Remove and recreate the previous screen to display updated data
|
||||
2. **After Delete** - Remove and recreate the previous screen to display remaining data
|
||||
3. **After Cancel/Back** - Remove and recreate the previous screen (data may have changed)
|
||||
4. **Implementation Pattern** - Remove the current page, remove the previous page, then recreate the previous page with fresh data
|
||||
|
||||
**Why This Matters:**
|
||||
- Ensures users see their changes immediately
|
||||
- Prevents stale data from being displayed
|
||||
- Maintains data consistency across the UI
|
||||
- Avoids confusion from seeing outdated information
|
||||
|
||||
**Example Flow:**
|
||||
```
|
||||
User on Schema List → Opens Edit Schema Form → Saves Changes →
|
||||
Returns to Schema List (refreshed with updated schema data)
|
||||
```
|
||||
|
||||
## Big Loading/Saving Operations
|
||||
|
||||
When loading big changes, files or data, always give a load completed or load error dialog.
|
||||
Do the same with saving.
|
||||
This informs the user what happens.
|
||||
When data is dirty, always ask the user to save when trying to exit.
|
||||
|
||||
### Load/Save Screens
|
||||
|
||||
- **Load Screen** (`load-database`) - Shown when no source is specified via command line
|
||||
- Format dropdown (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)
|
||||
- File path input (for file-based formats)
|
||||
- Connection string input (for database formats like pgsql)
|
||||
- Load button [l] - Loads the database
|
||||
- Create New button [n] - Creates a new empty database
|
||||
- Exit button [q] - Exits the application
|
||||
- ESC key exits the application
|
||||
|
||||
- **Save Screen** (`save-database`) - Accessible from main menu with 'w' key
|
||||
- Format dropdown (same as load screen)
|
||||
- File path input
|
||||
- Help text explaining format requirements
|
||||
- Save button [s] - Saves the database
|
||||
- Back button [b] - Returns to main menu
|
||||
- ESC key returns to main menu
|
||||
- Pre-populated with existing save configuration if available
|
||||
|
||||
### Status Dialogs
|
||||
|
||||
- **Error Dialog** (`error-dialog`) - Shows error messages with OK button and ESC to dismiss
|
||||
- **Success Dialog** (`success-dialog`) - Shows success messages with OK button, ESC to dismiss, and optional callback on close
|
||||
|
||||
## Screen Organization
|
||||
|
||||
Organize UI code into these files:
|
||||
|
||||
### UI Files (Screens and Dialogs)
|
||||
|
||||
- **editor.go** - Core `SchemaEditor` struct, constructor, `Run()` method, helper functions
|
||||
- **main_menu.go** - Main menu screen
|
||||
- **load_save_screens.go** - Database load and save screens
|
||||
- **database_screens.go** - Database edit form
|
||||
- **schema_screens.go** - Schema list, schema editor, new/edit schema dialogs
|
||||
- **table_screens.go** - Tables list, table editor, new/edit table dialogs
|
||||
- **column_screens.go** - Column editor, new column dialog
|
||||
- **domain_screens.go** - Domain list, domain editor, new/edit domain dialogs
|
||||
- **dialogs.go** - Confirmation dialogs (exit, delete)
|
||||
|
||||
### Data Operations Files (Business Logic)
|
||||
|
||||
- **schema_dataops.go** - Schema CRUD operations (Create, Read, Update, Delete)
|
||||
- **table_dataops.go** - Table CRUD operations
|
||||
- **column_dataops.go** - Column CRUD operations
|
||||
|
||||
## Code Separation Rules
|
||||
|
||||
### UI vs Business Logic
|
||||
|
||||
1. **UI Files** - Handle only presentation and user interaction
|
||||
- Display data in tables, lists, and forms
|
||||
- Capture user input
|
||||
- Navigate between screens
|
||||
- Show/hide dialogs
|
||||
- Call dataops methods for actual data changes
|
||||
|
||||
2. **Dataops Files** - Handle only business logic and data manipulation
|
||||
- Create, read, update, delete operations
|
||||
- Data validation
|
||||
- Data structure manipulation
|
||||
- Return created/updated objects or success/failure status
|
||||
- No UI code or tview references
|
||||
|
||||
### Implementation Pattern
|
||||
|
||||
#### Creating New Items
|
||||
|
||||
**Bad (Direct Data Manipulation in UI):**
|
||||
```go
|
||||
form.AddButton("Save", func() {
|
||||
schema := &models.Schema{Name: name, Description: desc, ...}
|
||||
se.db.Schemas = append(se.db.Schemas, schema)
|
||||
})
|
||||
```
|
||||
|
||||
**Good (Using Dataops Methods):**
|
||||
```go
|
||||
form.AddButton("Save", func() {
|
||||
se.CreateSchema(name, description)
|
||||
})
|
||||
```
|
||||
|
||||
#### Editing Existing Items
|
||||
|
||||
**Bad (Modifying Data in onChange Callbacks):**
|
||||
```go
|
||||
form.AddInputField("Name", column.Name, 40, nil, func(value string) {
|
||||
column.Name = value // Changes immediately as user types!
|
||||
})
|
||||
form.AddButton("Save", func() {
|
||||
// Data already changed, just refresh screen
|
||||
})
|
||||
```
|
||||
|
||||
**Good (Local Variables + Dataops on Save):**
|
||||
```go
|
||||
// Store original values
|
||||
originalName := column.Name
|
||||
newName := column.Name
|
||||
|
||||
form.AddInputField("Name", column.Name, 40, nil, func(value string) {
|
||||
newName = value // Store in local variable
|
||||
})
|
||||
|
||||
form.AddButton("Save", func() {
|
||||
// Apply changes only when Save is clicked
|
||||
se.UpdateColumn(schemaIndex, tableIndex, originalName, newName, ...)
|
||||
// Then refresh screen
|
||||
})
|
||||
|
||||
form.AddButton("Back", func() {
|
||||
// Discard changes - don't apply local variables
|
||||
// Just refresh screen
|
||||
})
|
||||
```
|
||||
|
||||
### Why This Matters
|
||||
|
||||
**Edit Forms Must Use Local Variables:**
|
||||
1. **Deferred Changes** - Changes only apply when Save is clicked
|
||||
2. **Cancellable** - Back button discards changes without saving
|
||||
3. **Handles Renames** - Original name preserved to update map keys correctly
|
||||
4. **User Expectations** - Save means "commit changes", Back means "cancel"
|
||||
|
||||
This separation ensures:
|
||||
- Cleaner, more maintainable code
|
||||
- Reusable business logic
|
||||
- Easier testing
|
||||
- Clear separation of concerns
|
||||
- Proper change management (save vs cancel)
|
||||
129
pkg/writers/bun/README.md
Normal file
129
pkg/writers/bun/README.md
Normal file
@@ -0,0 +1,129 @@
|
||||
# Bun Writer
|
||||
|
||||
Generates Go source files with Bun model definitions from database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The Bun Writer converts RelSpec's internal database model representation into Go source code with Bun struct definitions, complete with proper tags, relationships, and table configuration.
|
||||
|
||||
## Features
|
||||
|
||||
- Generates Bun-compatible Go structs
|
||||
- Creates proper `bun` struct tags
|
||||
- Adds relationship fields
|
||||
- Supports both single-file and multi-file output
|
||||
- Maps SQL types to Go types
|
||||
- Handles nullable fields with sql.Null* types
|
||||
- Generates table aliases
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &writers.WriterOptions{
|
||||
OutputPath: "models.go",
|
||||
PackageName: "models",
|
||||
}
|
||||
|
||||
writer := bun.NewWriter(options)
|
||||
err := writer.WriteDatabase(db)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Examples
|
||||
|
||||
```bash
|
||||
# Generate Bun models from PostgreSQL database
|
||||
relspec --input pgsql \
|
||||
--conn "postgres://localhost/mydb" \
|
||||
--output bun \
|
||||
--out-file models.go \
|
||||
--package models
|
||||
|
||||
# Convert GORM models to Bun
|
||||
relspec --input gorm --in-file gorm_models.go --output bun --out-file bun_models.go
|
||||
|
||||
# Multi-file output
|
||||
relspec --input json --in-file schema.json --output bun --out-file models/
|
||||
```
|
||||
|
||||
## Generated Code Example
|
||||
|
||||
```go
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
"database/sql"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
bun.BaseModel `bun:"table:users,alias:u"`
|
||||
|
||||
ID int64 `bun:"id,pk,autoincrement" json:"id"`
|
||||
Username string `bun:"username,notnull,unique" json:"username"`
|
||||
Email string `bun:"email,notnull" json:"email"`
|
||||
Bio sql.NullString `bun:"bio" json:"bio,omitempty"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull,default:now()" json:"created_at"`
|
||||
|
||||
// Relationships
|
||||
Posts []*Post `bun:"rel:has-many,join:id=user_id" json:"posts,omitempty"`
|
||||
}
|
||||
|
||||
type Post struct {
|
||||
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||
|
||||
ID int64 `bun:"id,pk" json:"id"`
|
||||
UserID int64 `bun:"user_id,notnull" json:"user_id"`
|
||||
Title string `bun:"title,notnull" json:"title"`
|
||||
Content sql.NullString `bun:"content" json:"content,omitempty"`
|
||||
|
||||
// Belongs to
|
||||
User *User `bun:"rel:belongs-to,join:user_id=id" json:"user,omitempty"`
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Bun Tags
|
||||
|
||||
- `table` - Table name and alias
|
||||
- `column` - Column name (auto-derived if not specified)
|
||||
- `pk` - Primary key
|
||||
- `autoincrement` - Auto-increment
|
||||
- `notnull` - NOT NULL constraint
|
||||
- `unique` - Unique constraint
|
||||
- `default` - Default value
|
||||
- `rel` - Relationship definition
|
||||
- `type` - Explicit SQL type
|
||||
|
||||
## Type Mapping
|
||||
|
||||
| SQL Type | Go Type | Nullable Type |
|
||||
|----------|---------|---------------|
|
||||
| bigint | int64 | sql.NullInt64 |
|
||||
| integer | int | sql.NullInt32 |
|
||||
| varchar, text | string | sql.NullString |
|
||||
| boolean | bool | sql.NullBool |
|
||||
| timestamp | time.Time | sql.NullTime |
|
||||
| numeric | float64 | sql.NullFloat64 |
|
||||
|
||||
## Notes
|
||||
|
||||
- Model names are derived from table names (singularized, PascalCase)
|
||||
- Table aliases are auto-generated from table names
|
||||
- Multi-file mode: one file per table named `sql_{schema}_{table}.go`
|
||||
- Generated code is auto-formatted
|
||||
- JSON tags are automatically added
|
||||
@@ -41,12 +41,7 @@ func NewWriter(options *writers.WriterOptions) *Writer {
|
||||
// WriteDatabase writes a complete database as Bun models
|
||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||
// Check if multi-file mode is enabled
|
||||
multiFile := false
|
||||
if w.options.Metadata != nil {
|
||||
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||
multiFile = mf
|
||||
}
|
||||
}
|
||||
multiFile := w.shouldUseMultiFile()
|
||||
|
||||
if multiFile {
|
||||
return w.writeMultiFile(db)
|
||||
@@ -346,6 +341,41 @@ func (w *Writer) writeOutput(content string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// shouldUseMultiFile determines whether to use multi-file mode based on metadata or output path
|
||||
func (w *Writer) shouldUseMultiFile() bool {
|
||||
// Check if multi_file is explicitly set in metadata
|
||||
if w.options.Metadata != nil {
|
||||
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||
return mf
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-detect based on output path
|
||||
if w.options.OutputPath == "" {
|
||||
// No output path means stdout (single file)
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if path ends with .go (explicit file)
|
||||
if strings.HasSuffix(w.options.OutputPath, ".go") {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if path ends with directory separator
|
||||
if strings.HasSuffix(w.options.OutputPath, "/") || strings.HasSuffix(w.options.OutputPath, "\\") {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check if path exists and is a directory
|
||||
info, err := os.Stat(w.options.OutputPath)
|
||||
if err == nil && info.IsDir() {
|
||||
return true
|
||||
}
|
||||
|
||||
// Default to single file for ambiguous cases
|
||||
return false
|
||||
}
|
||||
|
||||
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
||||
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
||||
return &models.Database{
|
||||
@@ -356,6 +386,7 @@ func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
||||
DatabaseVersion: db.DatabaseVersion,
|
||||
SourceFormat: db.SourceFormat,
|
||||
Schemas: nil, // Don't include schemas to avoid circular reference
|
||||
GUID: db.GUID,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -372,5 +403,6 @@ func (w *Writer) createSchemaRef(schema *models.Schema, db *models.Database) *mo
|
||||
Sequence: schema.Sequence,
|
||||
RefDatabase: w.createDatabaseRef(db), // Include database ref
|
||||
Tables: nil, // Don't include tables to avoid circular reference
|
||||
GUID: schema.GUID,
|
||||
}
|
||||
}
|
||||
|
||||
161
pkg/writers/dbml/README.md
Normal file
161
pkg/writers/dbml/README.md
Normal file
@@ -0,0 +1,161 @@
|
||||
# DBML Writer
|
||||
|
||||
Generates Database Markup Language (DBML) files from database schema information.
|
||||
|
||||
## Overview
|
||||
|
||||
The DBML Writer converts RelSpec's internal database model representation into DBML syntax, suitable for use with dbdiagram.io and other DBML-compatible tools.
|
||||
|
||||
## Features
|
||||
|
||||
- Generates DBML syntax
|
||||
- Creates table definitions with columns
|
||||
- Defines relationships
|
||||
- Includes indexes
|
||||
- Adds notes and documentation
|
||||
- Supports enums
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||
"git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||
)
|
||||
|
||||
func main() {
|
||||
options := &writers.WriterOptions{
|
||||
OutputPath: "schema.dbml",
|
||||
}
|
||||
|
||||
writer := dbml.NewWriter(options)
|
||||
err := writer.WriteDatabase(db)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### CLI Examples
|
||||
|
||||
```bash
|
||||
# Generate DBML from PostgreSQL database
|
||||
relspec --input pgsql \
|
||||
--conn "postgres://localhost/mydb" \
|
||||
--output dbml \
|
||||
--out-file schema.dbml
|
||||
|
||||
# Convert GORM models to DBML
|
||||
relspec --input gorm --in-file models.go --output dbml --out-file database.dbml
|
||||
|
||||
# Convert JSON to DBML for visualization
|
||||
relspec --input json --in-file schema.json --output dbml --out-file diagram.dbml
|
||||
```
|
||||
|
||||
## Generated DBML Example
|
||||
|
||||
```dbml
|
||||
Project MyDatabase {
|
||||
database_type: 'PostgreSQL'
|
||||
}
|
||||
|
||||
Table users {
|
||||
id bigserial [pk, increment]
|
||||
username varchar(50) [not null, unique]
|
||||
email varchar(100) [not null]
|
||||
bio text [null]
|
||||
created_at timestamp [not null, default: `now()`]
|
||||
|
||||
Note: 'Users table'
|
||||
|
||||
indexes {
|
||||
email [name: 'idx_users_email']
|
||||
}
|
||||
}
|
||||
|
||||
Table posts {
|
||||
id bigserial [pk, increment]
|
||||
user_id bigint [not null]
|
||||
title varchar(200) [not null]
|
||||
content text [null]
|
||||
created_at timestamp [default: `now()`]
|
||||
|
||||
indexes {
|
||||
user_id [name: 'idx_posts_user_id']
|
||||
(user_id, created_at) [name: 'idx_posts_user_created']
|
||||
}
|
||||
}
|
||||
|
||||
Ref: posts.user_id > users.id [delete: cascade, update: no action]
|
||||
```
|
||||
|
||||
## DBML Features
|
||||
|
||||
### Table Definitions
|
||||
```dbml
|
||||
Table table_name {
|
||||
column_name type [attributes]
|
||||
}
|
||||
```
|
||||
|
||||
### Column Attributes
|
||||
- `pk` - Primary key
|
||||
- `increment` - Auto-increment
|
||||
- `not null` - NOT NULL constraint
|
||||
- `null` - Nullable (explicit)
|
||||
- `unique` - Unique constraint
|
||||
- `default: value` - Default value
|
||||
- `note: 'text'` - Column note
|
||||
|
||||
### Relationships
|
||||
```dbml
|
||||
Ref: table1.column > table2.column
|
||||
Ref: table1.column < table2.column
|
||||
Ref: table1.column - table2.column
|
||||
```
|
||||
|
||||
Relationship types:
|
||||
- `>` - Many-to-one
|
||||
- `<` - One-to-many
|
||||
- `-` - One-to-one
|
||||
|
||||
Relationship actions:
|
||||
```dbml
|
||||
Ref: posts.user_id > users.id [delete: cascade, update: restrict]
|
||||
```
|
||||
|
||||
### Indexes
|
||||
```dbml
|
||||
indexes {
|
||||
column_name
|
||||
(column1, column2) [name: 'idx_name', unique]
|
||||
}
|
||||
```
|
||||
|
||||
## Type Mapping
|
||||
|
||||
| SQL Type | DBML Type |
|
||||
|----------|-----------|
|
||||
| bigint | bigint |
|
||||
| integer | int |
|
||||
| varchar(n) | varchar(n) |
|
||||
| text | text |
|
||||
| boolean | boolean |
|
||||
| timestamp | timestamp |
|
||||
| date | date |
|
||||
| json | json |
|
||||
| uuid | uuid |
|
||||
|
||||
## Notes
|
||||
|
||||
- DBML is designed for database visualization
|
||||
- Can be imported into dbdiagram.io
|
||||
- Human-readable format
|
||||
- Schema names can be included in table names
|
||||
- Comments and notes are preserved
|
||||
- Ideal for documentation and sharing designs
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user