Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 64aeac972a | |||
| 97a57f5dc8 | |||
| adfe126758 | |||
| 1d193c84d7 | |||
| 1d627c74b1 | |||
| 7c6a355458 | |||
| c0ef26b660 | |||
| cb38f95b79 | |||
| 196d87bc29 | |||
| beb1100d86 | |||
| 410b1ee743 | |||
| b5d39aeee4 | |||
| 5fb9a8f231 | |||
| 27da24f575 | |||
| 0fb3469dbd | |||
| 9f29bc112e | |||
| b55737ab4c | |||
| 2a271b9859 | |||
| beb5b4fac8 | |||
| e61204cb3c | |||
| d52b9cdc14 | |||
| f98b278d72 | |||
| 666eab7cec | |||
| 35bc9dfb5c | |||
| aad5db5175 | |||
| d9225a7310 | |||
| 79effe6921 | |||
| 289715ba44 | |||
| 8ca2b50f9c |
36
.github/workflows/ci.yml
vendored
36
.github/workflows/ci.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: CI
|
name: CI
|
||||||
|
run-name: "Test on master branch"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
@@ -12,7 +12,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
go-version: ['1.23', '1.24', '1.25']
|
go-version: ['1.24', '1.25']
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -34,8 +34,8 @@ jobs:
|
|||||||
- name: Download dependencies
|
- name: Download dependencies
|
||||||
run: go mod download
|
run: go mod download
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run unit tests
|
||||||
run: go test -v -race -coverprofile=coverage.out -covermode=atomic ./...
|
run: make test
|
||||||
|
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v4
|
||||||
@@ -57,11 +57,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: '1.25'
|
go-version: '1.25'
|
||||||
|
|
||||||
- name: golangci-lint
|
- name: Install golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v6
|
run: |
|
||||||
with:
|
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin latest
|
||||||
version: latest
|
echo "$(go env GOPATH)/bin" >> $GITHUB_PATH
|
||||||
args: --config=.golangci.json
|
|
||||||
|
- name: Run linter
|
||||||
|
run: make lint
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build
|
name: Build
|
||||||
@@ -76,8 +78,20 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: '1.25'
|
go-version: '1.25'
|
||||||
|
|
||||||
- name: Build
|
- name: Download dependencies
|
||||||
run: go build -v ./cmd/relspec
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Build binary
|
||||||
|
run: make build
|
||||||
|
|
||||||
|
- name: Verify binary exists
|
||||||
|
run: |
|
||||||
|
if [ ! -f build/relspec ]; then
|
||||||
|
echo "Error: Binary not found at build/relspec"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Build successful: build/relspec"
|
||||||
|
ls -lh build/relspec
|
||||||
|
|
||||||
- name: Check mod tidiness
|
- name: Check mod tidiness
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
72
.github/workflows/integration-tests.yml
vendored
Normal file
72
.github/workflows/integration-tests.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
name: Integration Tests
|
||||||
|
run-name: "Integration Tests"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
integration-tests:
|
||||||
|
name: Integration Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: relspec
|
||||||
|
POSTGRES_PASSWORD: relspec_test_password
|
||||||
|
POSTGRES_DB: relspec_test
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: '1.25'
|
||||||
|
|
||||||
|
- name: Cache Go modules
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/go/pkg/mod
|
||||||
|
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-go-
|
||||||
|
|
||||||
|
- name: Download dependencies
|
||||||
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Initialize test database
|
||||||
|
env:
|
||||||
|
PGPASSWORD: relspec_test_password
|
||||||
|
run: |
|
||||||
|
# Services are accessible via hostname matching the service name
|
||||||
|
psql -h postgres -U relspec -d relspec_test -f tests/postgres/init.sql
|
||||||
|
|
||||||
|
- name: Verify database setup
|
||||||
|
env:
|
||||||
|
PGPASSWORD: relspec_test_password
|
||||||
|
run: |
|
||||||
|
echo "Verifying database initialization..."
|
||||||
|
psql -h postgres -U relspec -d relspec_test -c "
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(*) FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND nspname NOT LIKE 'pg_%') as schemas,
|
||||||
|
(SELECT COUNT(*) FROM pg_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as tables,
|
||||||
|
(SELECT COUNT(*) FROM pg_views WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as views,
|
||||||
|
(SELECT COUNT(*) FROM pg_sequences WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as sequences;
|
||||||
|
"
|
||||||
|
|
||||||
|
- name: Run integration tests
|
||||||
|
env:
|
||||||
|
RELSPEC_TEST_PG_CONN: postgres://relspec:relspec_test_password@postgres:5432/relspec_test
|
||||||
|
run: make test-integration
|
||||||
116
.github/workflows/release.yml
vendored
Normal file
116
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
name: Release
|
||||||
|
run-name: "Making Release"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*.*.*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-release:
|
||||||
|
name: Build and Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: '1.25'
|
||||||
|
|
||||||
|
- name: Get version from tag
|
||||||
|
id: get_version
|
||||||
|
run: |
|
||||||
|
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||||
|
echo "Version: ${GITHUB_REF#refs/tags/}"
|
||||||
|
|
||||||
|
- name: Build binaries for multiple platforms
|
||||||
|
run: |
|
||||||
|
mkdir -p dist
|
||||||
|
|
||||||
|
# Linux AMD64
|
||||||
|
GOOS=linux GOARCH=amd64 go build -o dist/relspec-linux-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# Linux ARM64
|
||||||
|
GOOS=linux GOARCH=arm64 go build -o dist/relspec-linux-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# macOS AMD64
|
||||||
|
GOOS=darwin GOARCH=amd64 go build -o dist/relspec-darwin-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# macOS ARM64 (Apple Silicon)
|
||||||
|
GOOS=darwin GOARCH=arm64 go build -o dist/relspec-darwin-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# Windows AMD64
|
||||||
|
GOOS=windows GOARCH=amd64 go build -o dist/relspec-windows-amd64.exe -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# Create checksums
|
||||||
|
cd dist
|
||||||
|
sha256sum * > checksums.txt
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
id: release_notes
|
||||||
|
run: |
|
||||||
|
# Get the previous tag
|
||||||
|
previous_tag=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||||
|
|
||||||
|
if [ -z "$previous_tag" ]; then
|
||||||
|
# No previous tag, get all commits
|
||||||
|
commits=$(git log --pretty=format:"- %s (%h)" --no-merges)
|
||||||
|
else
|
||||||
|
# Get commits since the previous tag
|
||||||
|
commits=$(git log "${previous_tag}..HEAD" --pretty=format:"- %s (%h)" --no-merges)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create release notes
|
||||||
|
cat > release_notes.md << EOF
|
||||||
|
# Release ${{ steps.get_version.outputs.VERSION }}
|
||||||
|
|
||||||
|
## Changes
|
||||||
|
|
||||||
|
${commits}
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Download the appropriate binary for your platform:
|
||||||
|
|
||||||
|
- **Linux (AMD64)**: \`relspec-linux-amd64\`
|
||||||
|
- **Linux (ARM64)**: \`relspec-linux-arm64\`
|
||||||
|
- **macOS (Intel)**: \`relspec-darwin-amd64\`
|
||||||
|
- **macOS (Apple Silicon)**: \`relspec-darwin-arm64\`
|
||||||
|
- **Windows (AMD64)**: \`relspec-windows-amd64.exe\`
|
||||||
|
|
||||||
|
Make the binary executable (Linux/macOS):
|
||||||
|
\`\`\`bash
|
||||||
|
chmod +x relspec-*
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
Verify the download with the provided checksums.
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Create Release
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
body_path: release_notes.md
|
||||||
|
files: |
|
||||||
|
dist/relspec-linux-amd64
|
||||||
|
dist/relspec-linux-arm64
|
||||||
|
dist/relspec-darwin-amd64
|
||||||
|
dist/relspec-darwin-arm64
|
||||||
|
dist/relspec-windows-amd64.exe
|
||||||
|
dist/checksums.txt
|
||||||
|
draft: false
|
||||||
|
prerelease: false
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Summary
|
||||||
|
run: |
|
||||||
|
echo "Release ${{ steps.get_version.outputs.VERSION }} created successfully!"
|
||||||
|
echo "Binaries built for:"
|
||||||
|
echo " - Linux (amd64, arm64)"
|
||||||
|
echo " - macOS (amd64, arm64)"
|
||||||
|
echo " - Windows (amd64)"
|
||||||
35
AI_USE.md
Normal file
35
AI_USE.md
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# AI Usage Declaration
|
||||||
|
|
||||||
|
This Go project utilizes AI tools for the following purposes:
|
||||||
|
|
||||||
|
- Generating and improving documentation
|
||||||
|
- Writing and enhancing tests
|
||||||
|
- Refactoring and optimizing existing code
|
||||||
|
|
||||||
|
AI is **not** used for core design or architecture decisions.
|
||||||
|
All design decisions are deferred to human discussion.
|
||||||
|
AI is employed only for enhancements to human-written code.
|
||||||
|
|
||||||
|
We are aware of significant AI hallucinations; all AI-generated content is to be reviewed and verified by humans.
|
||||||
|
|
||||||
|
|
||||||
|
.-""""""-.
|
||||||
|
.' '.
|
||||||
|
/ O O \
|
||||||
|
: ` :
|
||||||
|
| |
|
||||||
|
: .------. :
|
||||||
|
\ ' ' /
|
||||||
|
'. .'
|
||||||
|
'-......-'
|
||||||
|
MEGAMIND AI
|
||||||
|
[============]
|
||||||
|
|
||||||
|
___________
|
||||||
|
/___________\
|
||||||
|
/_____________\
|
||||||
|
| ASSIMILATE |
|
||||||
|
| RESISTANCE |
|
||||||
|
| IS FUTILE |
|
||||||
|
\_____________/
|
||||||
|
\___________/
|
||||||
146
Makefile
146
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: all build test lint coverage clean install help docker-up docker-down docker-test docker-test-integration
|
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration start stop release release-version
|
||||||
|
|
||||||
# Binary name
|
# Binary name
|
||||||
BINARY_NAME=relspec
|
BINARY_NAME=relspec
|
||||||
@@ -14,6 +14,26 @@ GOGET=$(GOCMD) get
|
|||||||
GOMOD=$(GOCMD) mod
|
GOMOD=$(GOCMD) mod
|
||||||
GOCLEAN=$(GOCMD) clean
|
GOCLEAN=$(GOCMD) clean
|
||||||
|
|
||||||
|
# Auto-detect container runtime (Docker or Podman)
|
||||||
|
CONTAINER_RUNTIME := $(shell \
|
||||||
|
if command -v podman > /dev/null 2>&1; then \
|
||||||
|
echo "podman"; \
|
||||||
|
elif command -v docker > /dev/null 2>&1; then \
|
||||||
|
echo "docker"; \
|
||||||
|
else \
|
||||||
|
echo "none"; \
|
||||||
|
fi)
|
||||||
|
|
||||||
|
# Detect compose command
|
||||||
|
COMPOSE_CMD := $(shell \
|
||||||
|
if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
echo "podman-compose"; \
|
||||||
|
elif command -v docker-compose > /dev/null 2>&1; then \
|
||||||
|
echo "docker-compose"; \
|
||||||
|
else \
|
||||||
|
echo "docker compose"; \
|
||||||
|
fi)
|
||||||
|
|
||||||
all: lint test build ## Run linting, tests, and build
|
all: lint test build ## Run linting, tests, and build
|
||||||
|
|
||||||
build: ## Build the binary
|
build: ## Build the binary
|
||||||
@@ -22,9 +42,23 @@ build: ## Build the binary
|
|||||||
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
|
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
|
||||||
@echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)"
|
@echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)"
|
||||||
|
|
||||||
test: ## Run tests
|
test: test-unit ## Run all unit tests (alias for test-unit)
|
||||||
@echo "Running tests..."
|
|
||||||
$(GOTEST) -v -race -coverprofile=coverage.out ./...
|
test-unit: ## Run unit tests (excludes integration tests)
|
||||||
|
@echo "Running unit tests..."
|
||||||
|
$(GOTEST) -v -race -coverprofile=coverage.out -covermode=atomic $$(go list ./... | grep -v '/tests/integration' | grep -v '/tests/assets' | grep -v '/pkg/readers/pgsql')
|
||||||
|
|
||||||
|
test-integration: ## Run integration tests (requires RELSPEC_TEST_PG_CONN environment variable)
|
||||||
|
@echo "Running integration tests..."
|
||||||
|
@if [ -z "$$RELSPEC_TEST_PG_CONN" ]; then \
|
||||||
|
echo "Error: RELSPEC_TEST_PG_CONN environment variable is not set"; \
|
||||||
|
echo "Example: export RELSPEC_TEST_PG_CONN='postgres://relspec:relspec_test_password@localhost:5439/relspec_test'"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
@echo "Running PostgreSQL reader tests..."
|
||||||
|
$(GOTEST) -v -count=1 ./pkg/readers/pgsql/
|
||||||
|
@echo "Running general integration tests..."
|
||||||
|
$(GOTEST) -v -count=1 ./tests/integration/
|
||||||
|
|
||||||
coverage: test ## Run tests with coverage report
|
coverage: test ## Run tests with coverage report
|
||||||
@echo "Generating coverage report..."
|
@echo "Generating coverage report..."
|
||||||
@@ -40,6 +74,15 @@ lint: ## Run linter
|
|||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
lintfix: ## Run linter
|
||||||
|
@echo "Running linter..."
|
||||||
|
@if command -v golangci-lint > /dev/null; then \
|
||||||
|
golangci-lint run --config=.golangci.json --fix; \
|
||||||
|
else \
|
||||||
|
echo "golangci-lint not installed. Install with: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
|
||||||
clean: ## Clean build artifacts
|
clean: ## Clean build artifacts
|
||||||
@echo "Cleaning..."
|
@echo "Cleaning..."
|
||||||
$(GOCLEAN)
|
$(GOCLEAN)
|
||||||
@@ -58,36 +101,105 @@ deps: ## Download dependencies
|
|||||||
$(GOMOD) tidy
|
$(GOMOD) tidy
|
||||||
@echo "Dependencies updated"
|
@echo "Dependencies updated"
|
||||||
|
|
||||||
|
start: docker-up ## Alias for docker-up (start PostgreSQL test database)
|
||||||
|
|
||||||
|
stop: docker-down ## Alias for docker-down (stop PostgreSQL test database)
|
||||||
|
|
||||||
docker-up: ## Start PostgreSQL test database
|
docker-up: ## Start PostgreSQL test database
|
||||||
@echo "Starting PostgreSQL test database..."
|
@echo "Starting PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
@if [ "$(CONTAINER_RUNTIME)" = "none" ]; then \
|
||||||
docker-compose up -d postgres; \
|
echo "Error: Neither Docker nor Podman is installed"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
podman run -d --name relspec-test-postgres \
|
||||||
|
-e POSTGRES_USER=relspec \
|
||||||
|
-e POSTGRES_PASSWORD=relspec_test_password \
|
||||||
|
-e POSTGRES_DB=relspec_test \
|
||||||
|
-p 5439:5432 \
|
||||||
|
-v ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql:Z \
|
||||||
|
postgres:16-alpine 2>/dev/null || echo "Container already running"; \
|
||||||
else \
|
else \
|
||||||
docker compose up -d postgres; \
|
$(COMPOSE_CMD) up -d postgres; \
|
||||||
fi
|
fi
|
||||||
@echo "Waiting for PostgreSQL to be ready..."
|
@echo "Waiting for PostgreSQL to be ready..."
|
||||||
@sleep 3
|
@sleep 3
|
||||||
@echo "PostgreSQL is running on port 5433"
|
@echo "PostgreSQL is running on port 5439"
|
||||||
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5433/relspec_test"
|
@echo "Connection: postgres://relspec:relspec_test_password@localhost:5439/relspec_test"
|
||||||
|
|
||||||
docker-down: ## Stop PostgreSQL test database
|
docker-down: ## Stop PostgreSQL test database
|
||||||
@echo "Stopping PostgreSQL test database..."
|
@echo "Stopping PostgreSQL test database (using $(CONTAINER_RUNTIME))..."
|
||||||
@if command -v docker-compose > /dev/null 2>&1; then \
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
docker-compose down; \
|
podman stop relspec-test-postgres 2>/dev/null || true; \
|
||||||
|
podman rm relspec-test-postgres 2>/dev/null || true; \
|
||||||
else \
|
else \
|
||||||
docker compose down; \
|
$(COMPOSE_CMD) down; \
|
||||||
fi
|
fi
|
||||||
@echo "PostgreSQL stopped"
|
@echo "PostgreSQL stopped"
|
||||||
|
|
||||||
docker-test: ## Run PostgreSQL integration tests with Docker
|
docker-test: ## Run PostgreSQL integration tests with Docker/Podman
|
||||||
@./tests/postgres/run_tests.sh
|
@if [ "$(CONTAINER_RUNTIME)" = "podman" ]; then \
|
||||||
|
./tests/postgres/run_tests_podman.sh; \
|
||||||
|
else \
|
||||||
|
./tests/postgres/run_tests.sh; \
|
||||||
|
fi
|
||||||
|
|
||||||
docker-test-integration: docker-up ## Start DB and run integration tests
|
docker-test-integration: docker-up ## Start DB and run integration tests
|
||||||
@echo "Running integration tests..."
|
@echo "Running integration tests..."
|
||||||
@sleep 2
|
@sleep 2
|
||||||
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5433/relspec_test" \
|
@RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5439/relspec_test" \
|
||||||
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
||||||
@make docker-down
|
@make docker-down
|
||||||
|
|
||||||
|
release: ## Create and push a new release tag (auto-increments patch version)
|
||||||
|
@echo "Creating new release..."
|
||||||
|
@latest_tag=$$(git describe --tags --abbrev=0 2>/dev/null || echo ""); \
|
||||||
|
if [ -z "$$latest_tag" ]; then \
|
||||||
|
version="v1.0.0"; \
|
||||||
|
echo "No existing tags found. Creating first release: $$version"; \
|
||||||
|
commit_logs=$$(git log --pretty=format:"- %s" --no-merges); \
|
||||||
|
else \
|
||||||
|
echo "Latest tag: $$latest_tag"; \
|
||||||
|
version_number=$${latest_tag#v}; \
|
||||||
|
IFS='.' read -r major minor patch <<< "$$version_number"; \
|
||||||
|
patch=$$((patch + 1)); \
|
||||||
|
version="v$$major.$$minor.$$patch"; \
|
||||||
|
echo "Creating new release: $$version"; \
|
||||||
|
commit_logs=$$(git log "$${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges); \
|
||||||
|
fi; \
|
||||||
|
if [ -z "$$commit_logs" ]; then \
|
||||||
|
tag_message="Release $$version"; \
|
||||||
|
else \
|
||||||
|
tag_message="Release $$version\n\n$$commit_logs"; \
|
||||||
|
fi; \
|
||||||
|
git tag -a "$$version" -m "$$tag_message"; \
|
||||||
|
git push origin "$$version"; \
|
||||||
|
echo "Tag $$version created and pushed to remote repository."
|
||||||
|
|
||||||
|
release-version: ## Create and push a release with specific version (use: make release-version VERSION=v1.2.3)
|
||||||
|
@if [ -z "$(VERSION)" ]; then \
|
||||||
|
echo "Error: VERSION is required. Usage: make release-version VERSION=v1.2.3"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
@version="$(VERSION)"; \
|
||||||
|
if ! echo "$$version" | grep -q "^v"; then \
|
||||||
|
version="v$$version"; \
|
||||||
|
fi; \
|
||||||
|
echo "Creating release: $$version"; \
|
||||||
|
latest_tag=$$(git describe --tags --abbrev=0 2>/dev/null || echo ""); \
|
||||||
|
if [ -z "$$latest_tag" ]; then \
|
||||||
|
commit_logs=$$(git log --pretty=format:"- %s" --no-merges); \
|
||||||
|
else \
|
||||||
|
commit_logs=$$(git log "$${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges); \
|
||||||
|
fi; \
|
||||||
|
if [ -z "$$commit_logs" ]; then \
|
||||||
|
tag_message="Release $$version"; \
|
||||||
|
else \
|
||||||
|
tag_message="Release $$version\n\n$$commit_logs"; \
|
||||||
|
fi; \
|
||||||
|
git tag -a "$$version" -m "$$tag_message"; \
|
||||||
|
git push origin "$$version"; \
|
||||||
|
echo "Tag $$version created and pushed to remote repository."
|
||||||
|
|
||||||
help: ## Display this help screen
|
help: ## Display this help screen
|
||||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||||
|
|||||||
140
README.md
140
README.md
@@ -1,34 +1,89 @@
|
|||||||
# RelSpec
|
# RelSpec
|
||||||
|
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/releases/latest)
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/ci.yml)
|
||||||
|
[](https://git.warky.dev/wdevs/relspecgo/actions/workflows/integration-tests.yml)
|
||||||
|
[](https://go.dev/dl/)
|
||||||
|
[](LICENSE)
|
||||||
|
|
||||||
> Database Relations Specification Tool for Go
|
> Database Relations Specification Tool for Go
|
||||||
|
|
||||||
RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
|
RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
RelSpec provides bidirectional conversion and comparison between various database specification formats, allowing you to:
|
RelSpec provides bidirectional conversion, comparison, and validation of database specification formats, allowing you to:
|
||||||
- Inspect live databases and extract their structure
|
- Inspect live databases and extract their structure
|
||||||
- Convert between different ORM models (GORM, Bun , etc.)
|
- Validate schemas against configurable rules and naming conventions
|
||||||
|
- Convert between different ORM models (GORM, Bun, etc.)
|
||||||
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
||||||
- Generate standardized specification files (JSON, YAML, etc.)
|
- Generate standardized specification files (JSON, YAML, etc.)
|
||||||
|
- Compare database schemas and track changes
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
### Input Formats
|
### Readers (Input Formats)
|
||||||
- **XML** - Generic XML schema definitions
|
|
||||||
- **JSON** - JSON-based schema specifications
|
|
||||||
- **Clarion DCTX** - Clarion database dictionary format
|
|
||||||
- **Database Inspection** - Direct database introspection
|
|
||||||
- **GORM Models** - Read existing GORM Go structs
|
|
||||||
- **Bun Models** - Read existing Bun Go structs
|
|
||||||
|
|
||||||
### Output Formats
|
RelSpec can read database schemas from multiple sources:
|
||||||
- **GORM Models** - Generate GORM-compatible Go structs
|
|
||||||
- **Bun Models** - Generate Bun-compatible Go structs
|
#### ORM Models
|
||||||
- **JSON** - Standard JSON schema output
|
- [GORM](pkg/readers/gorm/README.md) - Go GORM model definitions
|
||||||
- **YAML** - Human-readable YAML format
|
- [Bun](pkg/readers/bun/README.md) - Go Bun model definitions
|
||||||
|
- [Drizzle](pkg/readers/drizzle/README.md) - TypeScript Drizzle ORM schemas
|
||||||
|
- [Prisma](pkg/readers/prisma/README.md) - Prisma schema language
|
||||||
|
- [TypeORM](pkg/readers/typeorm/README.md) - TypeScript TypeORM entities
|
||||||
|
|
||||||
|
#### Database Inspection
|
||||||
|
- [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection
|
||||||
|
|
||||||
|
#### Schema Formats
|
||||||
|
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
|
||||||
|
- [DCTX](pkg/readers/dctx/README.md) - Clarion database dictionary format
|
||||||
|
- [DrawDB](pkg/readers/drawdb/README.md) - DrawDB JSON format
|
||||||
|
- [GraphQL](pkg/readers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||||
|
- [JSON](pkg/readers/json/README.md) - RelSpec canonical JSON format
|
||||||
|
- [YAML](pkg/readers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
|
### Writers (Output Formats)
|
||||||
|
|
||||||
|
RelSpec can write database schemas to multiple formats:
|
||||||
|
|
||||||
|
#### ORM Models
|
||||||
|
- [GORM](pkg/writers/gorm/README.md) - Generate GORM-compatible Go structs
|
||||||
|
- [Bun](pkg/writers/bun/README.md) - Generate Bun-compatible Go structs
|
||||||
|
- [Drizzle](pkg/writers/drizzle/README.md) - Generate Drizzle ORM TypeScript schemas
|
||||||
|
- [Prisma](pkg/writers/prisma/README.md) - Generate Prisma schema files
|
||||||
|
- [TypeORM](pkg/writers/typeorm/README.md) - Generate TypeORM TypeScript entities
|
||||||
|
|
||||||
|
#### Database DDL
|
||||||
|
- [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.)
|
||||||
|
|
||||||
|
#### Schema Formats
|
||||||
|
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
|
||||||
|
- [DCTX](pkg/writers/dctx/README.md) - Clarion database dictionary format
|
||||||
|
- [DrawDB](pkg/writers/drawdb/README.md) - DrawDB JSON format
|
||||||
|
- [GraphQL](pkg/writers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||||
|
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
||||||
|
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
|
### Inspector (Schema Validation)
|
||||||
|
|
||||||
|
RelSpec includes a powerful schema validation and linting tool:
|
||||||
|
|
||||||
|
- [Inspector](pkg/inspector/README.md) - Validate database schemas against configurable rules
|
||||||
|
- Enforce naming conventions (snake_case, camelCase, custom patterns)
|
||||||
|
- Check primary key and foreign key standards
|
||||||
|
- Detect missing indexes on foreign keys
|
||||||
|
- Prevent use of SQL reserved keywords
|
||||||
|
- Ensure schema integrity (missing PKs, orphaned FKs, circular dependencies)
|
||||||
|
- Support for custom validation rules
|
||||||
|
- Multiple output formats (Markdown with colors, JSON)
|
||||||
|
- CI/CD integration ready
|
||||||
|
|
||||||
|
## Use of AI
|
||||||
|
[Rules and use of AI](./AI_USE.md)
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@@ -40,30 +95,65 @@ go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
### Schema Conversion
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Inspect database and generate GORM models
|
# Convert PostgreSQL database to GORM models
|
||||||
relspec --input db --conn "postgres://..." --output gorm --out-file models.go
|
relspec convert --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
|
||||||
|
--to gorm --to-path models/ --package models
|
||||||
|
|
||||||
# Convert GORM models to Bun
|
# Convert GORM models to Bun
|
||||||
relspec --input gorm --in-file existing.go --output bun --out-file bun_models.go
|
relspec convert --from gorm --from-path models.go \
|
||||||
|
--to bun --to-path bun_models.go --package models
|
||||||
|
|
||||||
# Export database schema to JSON
|
# Export database schema to JSON
|
||||||
relspec --input db --conn "mysql://..." --output json --out-file schema.json
|
relspec convert --from pgsql --from-conn "postgres://..." \
|
||||||
|
--to json --to-path schema.json
|
||||||
|
|
||||||
# Convert Clarion DCTX to YAML
|
# Convert DBML to PostgreSQL SQL
|
||||||
relspec --input dctx --in-file legacy.dctx --output yaml --out-file schema.yaml
|
relspec convert --from dbml --from-path schema.dbml \
|
||||||
|
--to pgsql --to-path schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Validation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Validate a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Validate DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules .relspec-rules.yaml
|
||||||
|
|
||||||
|
# Generate JSON validation report
|
||||||
|
relspec inspect --from json --from-path db.json \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Validate specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Comparison
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Compare two database schemas
|
||||||
|
relspec diff --from pgsql --from-conn "postgres://localhost/db1" \
|
||||||
|
--to pgsql --to-conn "postgres://localhost/db2"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Project Structure
|
## Project Structure
|
||||||
|
|
||||||
```
|
```
|
||||||
relspecgo/
|
relspecgo/
|
||||||
├── cmd/ # CLI application
|
├── cmd/
|
||||||
|
│ └── relspec/ # CLI application (convert, inspect, diff, scripts)
|
||||||
├── pkg/
|
├── pkg/
|
||||||
│ ├── readers/ # Input format readers
|
│ ├── readers/ # Input format readers (DBML, GORM, PostgreSQL, etc.)
|
||||||
│ ├── writers/ # Output format writers
|
│ ├── writers/ # Output format writers (GORM, Bun, SQL, etc.)
|
||||||
|
│ ├── inspector/ # Schema validation and linting
|
||||||
|
│ ├── diff/ # Schema comparison
|
||||||
│ ├── models/ # Internal data models
|
│ ├── models/ # Internal data models
|
||||||
│ └── transform/ # Transformation logic
|
│ ├── transform/ # Transformation logic
|
||||||
|
│ └── pgsql/ # PostgreSQL utilities (keywords, data types)
|
||||||
├── examples/ # Usage examples
|
├── examples/ # Usage examples
|
||||||
└── tests/ # Test files
|
└── tests/ # Test files
|
||||||
```
|
```
|
||||||
@@ -94,7 +184,7 @@ go test ./...
|
|||||||
|
|
||||||
Apache License 2.0 - See [LICENSE](LICENSE) for details.
|
Apache License 2.0 - See [LICENSE](LICENSE) for details.
|
||||||
|
|
||||||
Copyright 2025 wdevs
|
Copyright 2025 Warky Devs
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
|
|||||||
30
TODO.md
30
TODO.md
@@ -2,22 +2,24 @@
|
|||||||
|
|
||||||
|
|
||||||
## Input Readers / Writers
|
## Input Readers / Writers
|
||||||
- [x] **Database Inspector**
|
|
||||||
- [x] PostgreSQL driver
|
- [✔️] **Database Inspector**
|
||||||
|
- [✔️] PostgreSQL driver
|
||||||
- [ ] MySQL driver
|
- [ ] MySQL driver
|
||||||
- [ ] SQLite driver
|
- [ ] SQLite driver
|
||||||
- [ ] MSSQL driver
|
- [ ] MSSQL driver
|
||||||
- [x] Foreign key detection
|
- [✔️] Foreign key detection
|
||||||
- [x] Index extraction
|
- [✔️] Index extraction
|
||||||
- [ ] .sql file generation with sequence and priority
|
- [*] .sql file generation with sequence and priority
|
||||||
- [*] .dbml: Database Markup Language (DBML) for textual schema representation.
|
- [✔️] .dbml: Database Markup Language (DBML) for textual schema representation.
|
||||||
- [ ] Prisma schema support (PSL format) .prisma
|
- [✔️] Prisma schema support (PSL format) .prisma
|
||||||
- [ ] Entity Framework (.NET) model .edmx
|
- [✔️] Drizzle ORM support .ts (TypeScript / JavaScript) (Mr. Edd wanted to move from Prisma to Drizzle. If you are bugs, you are welcome to do pull requests or issues)
|
||||||
- [ ] TypeORM support
|
- [☠️] Entity Framework (.NET) model .edmx (Fuck no, EDMX files were bloated, verbose XML nightmares—hard to merge, error-prone, and a pain in teams. Microsoft wisely ditched them in EF Core for code-first. Classic overkill from old MS era.)
|
||||||
- [ ] .hbm.xml / schema.xml: Hibernate/Propel mappings (Java/PHP)
|
- [✔️] TypeORM support
|
||||||
- [ ] Django models.py (Python classes), Sequelize migrations (JS)
|
- [] .hbm.xml / schema.xml: Hibernate/Propel mappings (Java/PHP) (💲 Someone can do this, not me)
|
||||||
- [ ] .avsc: Avro schema (JSON format for data serialization)
|
- [ ] Django models.py (Python classes), Sequelize migrations (JS) (💲 Someone can do this, not me)
|
||||||
|
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
|
||||||
|
- [✔️] GraphQL schema generation
|
||||||
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
@@ -36,7 +38,7 @@
|
|||||||
- [ ] Web UI for visual editing
|
- [ ] Web UI for visual editing
|
||||||
- [ ] REST API server mode
|
- [ ] REST API server mode
|
||||||
- [ ] Support for NoSQL databases
|
- [ ] Support for NoSQL databases
|
||||||
- [ ] GraphQL schema generation
|
|
||||||
|
|
||||||
## Performance
|
## Performance
|
||||||
- [ ] Concurrent processing for multiple tables
|
- [ ] Concurrent processing for multiple tables
|
||||||
|
|||||||
@@ -6,26 +6,35 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||||
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||||
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||||
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
|
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -51,20 +60,28 @@ Input formats:
|
|||||||
- dbml: DBML schema files
|
- dbml: DBML schema files
|
||||||
- dctx: DCTX schema files
|
- dctx: DCTX schema files
|
||||||
- drawdb: DrawDB JSON files
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
- json: JSON database schema
|
- json: JSON database schema
|
||||||
- yaml: YAML database schema
|
- yaml: YAML database schema
|
||||||
- gorm: GORM model files (Go, file or directory)
|
- gorm: GORM model files (Go, file or directory)
|
||||||
- bun: Bun model files (Go, file or directory)
|
- bun: Bun model files (Go, file or directory)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL database (live connection)
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
|
||||||
Output formats:
|
Output formats:
|
||||||
- dbml: DBML schema files
|
- dbml: DBML schema files
|
||||||
- dctx: DCTX schema files
|
- dctx: DCTX schema files
|
||||||
- drawdb: DrawDB JSON files
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
- json: JSON database schema
|
- json: JSON database schema
|
||||||
- yaml: YAML database schema
|
- yaml: YAML database schema
|
||||||
- gorm: GORM model files (Go)
|
- gorm: GORM model files (Go)
|
||||||
- bun: Bun model files (Go)
|
- bun: Bun model files (Go)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL SQL schema
|
- pgsql: PostgreSQL SQL schema
|
||||||
|
|
||||||
PostgreSQL Connection String Examples:
|
PostgreSQL Connection String Examples:
|
||||||
@@ -123,18 +140,27 @@ Examples:
|
|||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
|
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||||
|
|
||||||
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
|
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||||
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||||
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
||||||
|
|
||||||
convertCmd.MarkFlagRequired("from")
|
err := convertCmd.MarkFlagRequired("from")
|
||||||
convertCmd.MarkFlagRequired("to")
|
if err != nil {
|
||||||
convertCmd.MarkFlagRequired("to-path")
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = convertCmd.MarkFlagRequired("to")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = convertCmd.MarkFlagRequired("to-path")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to-path flag as required: %v\n", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func runConvert(cmd *cobra.Command, args []string) error {
|
func runConvert(cmd *cobra.Command, args []string) error {
|
||||||
@@ -239,6 +265,30 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
|||||||
}
|
}
|
||||||
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Drizzle format")
|
||||||
|
}
|
||||||
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Prisma format")
|
||||||
|
}
|
||||||
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for TypeORM format")
|
||||||
|
}
|
||||||
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "graphql", "gql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
||||||
}
|
}
|
||||||
@@ -287,9 +337,21 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
|||||||
}
|
}
|
||||||
writer = wbun.NewWriter(writerOpts)
|
writer = wbun.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "drizzle":
|
||||||
|
writer = wdrizzle.NewWriter(writerOpts)
|
||||||
|
|
||||||
case "pgsql", "postgres", "postgresql", "sql":
|
case "pgsql", "postgres", "postgresql", "sql":
|
||||||
writer = wpgsql.NewWriter(writerOpts)
|
writer = wpgsql.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "prisma":
|
||||||
|
writer = wprisma.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "typeorm":
|
||||||
|
writer = wtypeorm.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "graphql", "gql":
|
||||||
|
writer = wgraphql.NewWriter(writerOpts)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unsupported target format: %s", dbType)
|
return fmt.Errorf("unsupported target format: %s", dbType)
|
||||||
}
|
}
|
||||||
@@ -318,7 +380,7 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
|||||||
}
|
}
|
||||||
|
|
||||||
// For formats like DCTX that don't support full database writes, require schema filter
|
// For formats like DCTX that don't support full database writes, require schema filter
|
||||||
if strings.ToLower(dbType) == "dctx" {
|
if strings.EqualFold(dbType, "dctx") {
|
||||||
if len(db.Schemas) == 0 {
|
if len(db.Schemas) == 0 {
|
||||||
return fmt.Errorf("no schemas found in database")
|
return fmt.Errorf("no schemas found in database")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/diff"
|
"git.warky.dev/wdevs/relspecgo/pkg/diff"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
@@ -15,7 +17,6 @@ import (
|
|||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -96,8 +97,14 @@ func init() {
|
|||||||
diffCmd.Flags().StringVar(&outputFormat, "format", "summary", "Output format (summary, json, html)")
|
diffCmd.Flags().StringVar(&outputFormat, "format", "summary", "Output format (summary, json, html)")
|
||||||
diffCmd.Flags().StringVar(&outputPath, "output", "", "Output file path (default: stdout for summary, required for json/html)")
|
diffCmd.Flags().StringVar(&outputPath, "output", "", "Output file path (default: stdout for summary, required for json/html)")
|
||||||
|
|
||||||
diffCmd.MarkFlagRequired("from")
|
err := diffCmd.MarkFlagRequired("from")
|
||||||
diffCmd.MarkFlagRequired("to")
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = diffCmd.MarkFlagRequired("to")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func runDiff(cmd *cobra.Command, args []string) error {
|
func runDiff(cmd *cobra.Command, args []string) error {
|
||||||
|
|||||||
321
cmd/relspec/inspect.go
Normal file
321
cmd/relspec/inspect.go
Normal file
@@ -0,0 +1,321 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
inspectSourceType string
|
||||||
|
inspectSourcePath string
|
||||||
|
inspectSourceConn string
|
||||||
|
inspectRulesPath string
|
||||||
|
inspectOutputFormat string
|
||||||
|
inspectOutputPath string
|
||||||
|
inspectSchemaFilter string
|
||||||
|
)
|
||||||
|
|
||||||
|
var inspectCmd = &cobra.Command{
|
||||||
|
Use: "inspect",
|
||||||
|
Short: "Inspect and validate database schemas against rules",
|
||||||
|
Long: `Inspect database schemas from various formats and validate against configurable rules.
|
||||||
|
|
||||||
|
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
|
||||||
|
JSON, YAML, etc.) and generates validation reports.
|
||||||
|
|
||||||
|
Input formats:
|
||||||
|
- dbml: DBML schema files
|
||||||
|
- dctx: DCTX schema files
|
||||||
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
|
- json: JSON database schema
|
||||||
|
- yaml: YAML database schema
|
||||||
|
- gorm: GORM model files (Go, file or directory)
|
||||||
|
- bun: Bun model files (Go, file or directory)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- markdown: Human-readable markdown report (default, with ANSI colors for terminal)
|
||||||
|
- json: JSON report for tooling integration
|
||||||
|
|
||||||
|
PostgreSQL Connection String Examples:
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Inspect and output JSON report to file
|
||||||
|
relspec inspect --from json --from-path db.json \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public`,
|
||||||
|
RunE: runInspect,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectRulesPath, "rules", ".relspec-rules.yaml", "Path to rules configuration file (uses defaults if not found)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectOutputFormat, "output-format", "markdown", "Output format (markdown, json)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectOutputPath, "output", "", "Output file path (default: stdout)")
|
||||||
|
inspectCmd.Flags().StringVar(&inspectSchemaFilter, "schema", "", "Filter to a specific schema by name")
|
||||||
|
|
||||||
|
err := inspectCmd.MarkFlagRequired("from")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runInspect(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Inspector ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Read source database
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Format: %s\n", inspectSourceType)
|
||||||
|
if inspectSourcePath != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Path: %s\n", inspectSourcePath)
|
||||||
|
}
|
||||||
|
if inspectSourceConn != "" {
|
||||||
|
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(inspectSourceConn))
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := readDatabaseForInspect(inspectSourceType, inspectSourcePath, inspectSourceConn)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read source: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply schema filter if specified
|
||||||
|
if inspectSchemaFilter != "" {
|
||||||
|
db = filterDatabaseBySchema(db, inspectSchemaFilter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
|
||||||
|
|
||||||
|
totalTables := 0
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
totalTables += len(schema.Tables)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
|
||||||
|
|
||||||
|
// Load rules configuration
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/3] Loading validation rules...\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Rules: %s\n", inspectRulesPath)
|
||||||
|
|
||||||
|
config, err := inspector.LoadConfig(inspectRulesPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load rules config: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
enabledCount := 0
|
||||||
|
for _, rule := range config.Rules {
|
||||||
|
if rule.IsEnabled() {
|
||||||
|
enabledCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Loaded %d rule(s) (%d enabled)\n\n", len(config.Rules), enabledCount)
|
||||||
|
|
||||||
|
// Run inspection
|
||||||
|
fmt.Fprintf(os.Stderr, "[3/3] Running validation...\n")
|
||||||
|
|
||||||
|
insp := inspector.NewInspector(db, config)
|
||||||
|
report, err := insp.Inspect()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("inspection failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Inspection complete\n")
|
||||||
|
fmt.Fprintf(os.Stderr, " Errors: %d\n", report.Summary.ErrorCount)
|
||||||
|
fmt.Fprintf(os.Stderr, " Warnings: %d\n\n", report.Summary.WarningCount)
|
||||||
|
|
||||||
|
// Format and output report
|
||||||
|
var formattedReport string
|
||||||
|
switch strings.ToLower(inspectOutputFormat) {
|
||||||
|
case "json":
|
||||||
|
formatter := inspector.NewJSONFormatter()
|
||||||
|
formattedReport, err = formatter.Format(report)
|
||||||
|
case "markdown", "md":
|
||||||
|
// Determine output writer for terminal detection
|
||||||
|
var output *os.File
|
||||||
|
if inspectOutputPath != "" {
|
||||||
|
output, err = os.Create(inspectOutputPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create output file: %w", err)
|
||||||
|
}
|
||||||
|
defer output.Close()
|
||||||
|
} else {
|
||||||
|
output = os.Stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
formatter := inspector.NewMarkdownFormatter(output)
|
||||||
|
formattedReport, err = formatter.Format(report)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported output format: %s", inspectOutputFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to format report: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write output
|
||||||
|
if inspectOutputPath != "" {
|
||||||
|
err = os.WriteFile(inspectOutputPath, []byte(formattedReport), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write output file: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "Report written to: %s\n", inspectOutputPath)
|
||||||
|
} else {
|
||||||
|
fmt.Println(formattedReport)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Inspection Complete ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
|
||||||
|
|
||||||
|
// Exit with appropriate code
|
||||||
|
if report.HasErrors() {
|
||||||
|
return fmt.Errorf("inspection found %d error(s)", report.Summary.ErrorCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func readDatabaseForInspect(dbType, filePath, connString string) (*models.Database, error) {
|
||||||
|
var reader readers.Reader
|
||||||
|
|
||||||
|
switch strings.ToLower(dbType) {
|
||||||
|
case "dbml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DBML format")
|
||||||
|
}
|
||||||
|
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "dctx":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DCTX format")
|
||||||
|
}
|
||||||
|
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drawdb":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for DrawDB format")
|
||||||
|
}
|
||||||
|
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "graphql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "json":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for JSON format")
|
||||||
|
}
|
||||||
|
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "yaml", "yml":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for YAML format")
|
||||||
|
}
|
||||||
|
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "gorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GORM format")
|
||||||
|
}
|
||||||
|
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "bun":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Bun format")
|
||||||
|
}
|
||||||
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Drizzle format")
|
||||||
|
}
|
||||||
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Prisma format")
|
||||||
|
}
|
||||||
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for TypeORM format")
|
||||||
|
}
|
||||||
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "pgsql", "postgres", "postgresql":
|
||||||
|
if connString == "" {
|
||||||
|
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
|
||||||
|
}
|
||||||
|
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func filterDatabaseBySchema(db *models.Database, schemaName string) *models.Database {
|
||||||
|
filtered := &models.Database{
|
||||||
|
Name: db.Name,
|
||||||
|
Description: db.Description,
|
||||||
|
DatabaseType: db.DatabaseType,
|
||||||
|
DatabaseVersion: db.DatabaseVersion,
|
||||||
|
SourceFormat: db.SourceFormat,
|
||||||
|
Schemas: []*models.Schema{},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name == schemaName {
|
||||||
|
filtered.Schemas = append(filtered.Schemas, schema)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
@@ -18,4 +18,6 @@ JSON, YAML, SQL, etc.).`,
|
|||||||
func init() {
|
func init() {
|
||||||
rootCmd.AddCommand(convertCmd)
|
rootCmd.AddCommand(convertCmd)
|
||||||
rootCmd.AddCommand(diffCmd)
|
rootCmd.AddCommand(diffCmd)
|
||||||
|
rootCmd.AddCommand(inspectCmd)
|
||||||
|
rootCmd.AddCommand(scriptsCmd)
|
||||||
}
|
}
|
||||||
|
|||||||
263
cmd/relspec/scripts.go
Normal file
263
cmd/relspec/scripts.go
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
scriptsDir string
|
||||||
|
scriptsConn string
|
||||||
|
scriptsSchemaName string
|
||||||
|
scriptsDBName string
|
||||||
|
)
|
||||||
|
|
||||||
|
var scriptsCmd = &cobra.Command{
|
||||||
|
Use: "scripts",
|
||||||
|
Short: "Manage and execute SQL migration scripts",
|
||||||
|
Long: `Manage and execute SQL migration scripts from a directory.
|
||||||
|
|
||||||
|
Scripts must follow the naming pattern (both separators supported):
|
||||||
|
{priority}_{sequence}_{name}.sql or .pgsql
|
||||||
|
{priority}-{sequence}-{name}.sql or .pgsql
|
||||||
|
|
||||||
|
Example filenames (underscore format):
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1
|
||||||
|
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||||
|
2_001_add_indexes.pgsql # Priority 2, Sequence 1
|
||||||
|
|
||||||
|
Example filenames (hyphen format):
|
||||||
|
1-001-create-users.sql # Priority 1, Sequence 1
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||||
|
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||||
|
|
||||||
|
Both formats can be mixed in the same directory.
|
||||||
|
Scripts are executed in order: Priority (ascending), then Sequence (ascending).`,
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptsListCmd = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "List SQL scripts from a directory",
|
||||||
|
Long: `List SQL scripts from a directory and show their execution order.
|
||||||
|
|
||||||
|
The scripts are read from the specified directory and displayed in the order
|
||||||
|
they would be executed (Priority ascending, then Sequence ascending).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
relspec scripts list --dir ./migrations`,
|
||||||
|
RunE: runScriptsList,
|
||||||
|
}
|
||||||
|
|
||||||
|
var scriptsExecuteCmd = &cobra.Command{
|
||||||
|
Use: "execute",
|
||||||
|
Short: "Execute SQL scripts against a database",
|
||||||
|
Long: `Execute SQL scripts from a directory against a PostgreSQL database.
|
||||||
|
|
||||||
|
Scripts are executed in order: Priority (ascending), then Sequence (ascending).
|
||||||
|
Execution stops immediately on the first error.
|
||||||
|
|
||||||
|
The directory is scanned recursively for files matching the patterns:
|
||||||
|
{priority}_{sequence}_{name}.sql or .pgsql (underscore format)
|
||||||
|
{priority}-{sequence}-{name}.sql or .pgsql (hyphen format)
|
||||||
|
|
||||||
|
PostgreSQL Connection String Examples:
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Execute migration scripts
|
||||||
|
relspec scripts execute --dir ./migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||||
|
|
||||||
|
# Execute with custom schema name
|
||||||
|
relspec scripts execute --dir ./migrations \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--schema public
|
||||||
|
|
||||||
|
# Execute with SSL disabled
|
||||||
|
relspec scripts execute --dir ./sql \
|
||||||
|
--conn "postgres://user:pass@localhost/db?sslmode=disable"`,
|
||||||
|
RunE: runScriptsExecute,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// List command flags
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||||
|
scriptsListCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||||
|
err := scriptsListCmd.MarkFlagRequired("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute command flags
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsDir, "dir", "", "Directory containing SQL scripts (required)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||||
|
scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||||
|
|
||||||
|
err = scriptsExecuteCmd.MarkFlagRequired("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking dir flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = scriptsExecuteCmd.MarkFlagRequired("conn")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking conn flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add subcommands to scripts command
|
||||||
|
scriptsCmd.AddCommand(scriptsListCmd)
|
||||||
|
scriptsCmd.AddCommand(scriptsExecuteCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runScriptsList(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts List ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Directory: %s\n\n", scriptsDir)
|
||||||
|
|
||||||
|
// Read scripts from directory
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: scriptsDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": scriptsSchemaName,
|
||||||
|
"database_name": scriptsDBName,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "No schemas found\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "No SQL scripts found matching pattern {priority}_{sequence}_{name}.sql\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort scripts by Priority then Sequence
|
||||||
|
sortedScripts := make([]*struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sqlLines int
|
||||||
|
}, len(schema.Scripts))
|
||||||
|
|
||||||
|
for i, script := range schema.Scripts {
|
||||||
|
// Count non-empty lines in SQL
|
||||||
|
sqlLines := 0
|
||||||
|
for _, line := range []byte(script.SQL) {
|
||||||
|
if line == '\n' {
|
||||||
|
sqlLines++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(script.SQL) > 0 {
|
||||||
|
sqlLines++ // Count last line if no trailing newline
|
||||||
|
}
|
||||||
|
|
||||||
|
sortedScripts[i] = &struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sqlLines int
|
||||||
|
}{
|
||||||
|
name: script.Name,
|
||||||
|
priority: script.Priority,
|
||||||
|
sequence: script.Sequence,
|
||||||
|
sqlLines: sqlLines,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||||
|
if sortedScripts[i].priority != sortedScripts[j].priority {
|
||||||
|
return sortedScripts[i].priority < sortedScripts[j].priority
|
||||||
|
}
|
||||||
|
return sortedScripts[i].sequence < sortedScripts[j].sequence
|
||||||
|
})
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "Found %d script(s) in execution order:\n\n", len(sortedScripts))
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "No.", "Priority", "Sequence", "Name", "Lines")
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4s %-10s %-8s %-30s %s\n", "----", "--------", "--------", "------------------------------", "-----")
|
||||||
|
|
||||||
|
for i, script := range sortedScripts {
|
||||||
|
fmt.Fprintf(os.Stderr, "%-4d %-10d %-8d %-30s %d\n",
|
||||||
|
i+1,
|
||||||
|
script.priority,
|
||||||
|
script.sequence,
|
||||||
|
script.name,
|
||||||
|
script.sqlLines,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runScriptsExecute(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== SQL Scripts Execution ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Started at: %s\n", getCurrentTimestamp())
|
||||||
|
fmt.Fprintf(os.Stderr, "Directory: %s\n", scriptsDir)
|
||||||
|
fmt.Fprintf(os.Stderr, "Database: %s\n\n", maskPassword(scriptsConn))
|
||||||
|
|
||||||
|
// Step 1: Read scripts from directory
|
||||||
|
fmt.Fprintf(os.Stderr, "[1/2] Reading SQL scripts...\n")
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: scriptsDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": scriptsSchemaName,
|
||||||
|
"database_name": scriptsDBName,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return fmt.Errorf("no schemas found")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, " No scripts found. Nothing to execute.\n\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, " ✓ Found %d script(s)\n\n", len(schema.Scripts))
|
||||||
|
|
||||||
|
// Step 2: Execute scripts
|
||||||
|
fmt.Fprintf(os.Stderr, "[2/2] Executing scripts in order (Priority → Sequence)...\n\n")
|
||||||
|
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": scriptsConn,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteSchema(schema); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
return fmt.Errorf("execution failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||||
|
fmt.Fprintf(os.Stderr, "Successfully executed %d script(s)\n\n", len(schema.Scripts))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -9,7 +9,7 @@ services:
|
|||||||
POSTGRES_PASSWORD: relspec_test_password
|
POSTGRES_PASSWORD: relspec_test_password
|
||||||
POSTGRES_DB: relspec_test
|
POSTGRES_DB: relspec_test
|
||||||
ports:
|
ports:
|
||||||
- "5433:5432" # Using 5433 to avoid conflicts with local PostgreSQL
|
- "5439:5432" # Using 5439 to avoid conflicts with local PostgreSQL
|
||||||
volumes:
|
volumes:
|
||||||
- ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql
|
- ./tests/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|||||||
360
docs/SCRIPTS_COMMAND.md
Normal file
360
docs/SCRIPTS_COMMAND.md
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
# RelSpec Scripts Command
|
||||||
|
|
||||||
|
The `relspec scripts` command provides tools for managing and executing SQL migration scripts from a directory structure.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The scripts command supports two main operations:
|
||||||
|
- **list**: List SQL scripts from a directory in execution order
|
||||||
|
- **execute**: Execute SQL scripts against a PostgreSQL database
|
||||||
|
|
||||||
|
Scripts are read from a directory (recursively) and executed in a deterministic order based on **Priority** (ascending) and **Sequence** (ascending).
|
||||||
|
|
||||||
|
## File Naming Convention
|
||||||
|
|
||||||
|
SQL scripts must follow this naming pattern (both separators are supported):
|
||||||
|
|
||||||
|
```
|
||||||
|
{priority}_{sequence}_{name}.{sql|pgsql} (underscore format)
|
||||||
|
{priority}-{sequence}-{name}.{sql|pgsql} (hyphen format)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- **priority**: Integer (0-9999) - Execution priority level (lower executes first)
|
||||||
|
- **sequence**: Integer (0-9999) - Order within priority level (lower executes first)
|
||||||
|
- **separator**: Underscore `_` or hyphen `-` (both formats can be mixed)
|
||||||
|
- **name**: Descriptive name (alphanumeric, underscores, hyphens)
|
||||||
|
- **extension**: `.sql` or `.pgsql`
|
||||||
|
|
||||||
|
### Valid Examples
|
||||||
|
|
||||||
|
**Underscore format:**
|
||||||
|
```
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1
|
||||||
|
1_002_create_posts.sql # Priority 1, Sequence 2
|
||||||
|
1_003_create_comments.pgsql # Priority 1, Sequence 3
|
||||||
|
2_001_add_indexes.sql # Priority 2, Sequence 1
|
||||||
|
2_002_add_constraints.sql # Priority 2, Sequence 2
|
||||||
|
3_001_seed_users.sql # Priority 3, Sequence 1
|
||||||
|
```
|
||||||
|
|
||||||
|
**Hyphen format:**
|
||||||
|
```
|
||||||
|
1-001-create-users.sql # Priority 1, Sequence 1
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2
|
||||||
|
1-003-create-comments.pgsql # Priority 1, Sequence 3
|
||||||
|
10-10-create-newid.pgsql # Priority 10, Sequence 10
|
||||||
|
```
|
||||||
|
|
||||||
|
**Mixed format (both in same directory):**
|
||||||
|
```
|
||||||
|
1_001_create_users.sql # Priority 1, Sequence 1 (underscore)
|
||||||
|
1-002-create-posts.sql # Priority 1, Sequence 2 (hyphen)
|
||||||
|
2_001_add_indexes.sql # Priority 2, Sequence 1 (underscore)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Execution Order**: 1→2→3→4→5→6 (sorted by Priority, then Sequence)
|
||||||
|
|
||||||
|
### Invalid Examples (Will be ignored)
|
||||||
|
|
||||||
|
```
|
||||||
|
migration.sql # Missing priority/sequence
|
||||||
|
create_users.sql # Missing priority/sequence
|
||||||
|
1_create_users.sql # Missing sequence
|
||||||
|
1_001_test.txt # Wrong extension
|
||||||
|
README.md # Not a SQL file
|
||||||
|
```
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
Scripts can be organized in subdirectories. The scanner recursively finds all matching SQL files:
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_schema.sql
|
||||||
|
├── 1_002_create_users.sql
|
||||||
|
├── tables/
|
||||||
|
│ ├── 1_003_create_posts.sql
|
||||||
|
│ └── 1_004_create_comments.pgsql
|
||||||
|
├── indexes/
|
||||||
|
│ └── 2_001_add_indexes.sql
|
||||||
|
└── data/
|
||||||
|
└── 3_001_seed_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
All files will be found and executed in Priority→Sequence order regardless of directory structure.
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
### relspec scripts list
|
||||||
|
|
||||||
|
List all SQL scripts in a directory and show their execution order.
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir <directory> [flags]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flags:**
|
||||||
|
- `--dir <path>` (required): Directory containing SQL scripts
|
||||||
|
- `--schema <name>`: Schema name (default: "public")
|
||||||
|
- `--database <name>`: Database name (default: "database")
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir ./migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
```
|
||||||
|
=== SQL Scripts List ===
|
||||||
|
Directory: ./migrations
|
||||||
|
|
||||||
|
Found 5 script(s) in execution order:
|
||||||
|
|
||||||
|
No. Priority Sequence Name Lines
|
||||||
|
---- -------- -------- ------------------------------ -----
|
||||||
|
1 1 1 create_users 7
|
||||||
|
2 1 2 create_posts 8
|
||||||
|
3 2 1 add_indexes 4
|
||||||
|
4 2 2 add_constraints 6
|
||||||
|
5 3 1 seed_data 4
|
||||||
|
```
|
||||||
|
|
||||||
|
### relspec scripts execute
|
||||||
|
|
||||||
|
Execute SQL scripts from a directory against a PostgreSQL database.
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
```bash
|
||||||
|
relspec scripts execute --dir <directory> --conn <connection-string> [flags]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flags:**
|
||||||
|
- `--dir <path>` (required): Directory containing SQL scripts
|
||||||
|
- `--conn <string>` (required): PostgreSQL connection string
|
||||||
|
- `--schema <name>`: Schema name (default: "public")
|
||||||
|
- `--database <name>`: Database name (default: "database")
|
||||||
|
|
||||||
|
**Connection String Formats:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Standard PostgreSQL URLs
|
||||||
|
postgres://username:password@localhost:5432/database_name
|
||||||
|
postgres://username:password@localhost/database_name
|
||||||
|
postgresql://user:pass@host:5432/dbname?sslmode=disable
|
||||||
|
postgresql://user:pass@host/dbname?sslmode=require
|
||||||
|
|
||||||
|
# Key-value format
|
||||||
|
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
|
||||||
|
```
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Execute migration scripts
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/mydb"
|
||||||
|
|
||||||
|
# Execute with custom schema
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--schema public
|
||||||
|
|
||||||
|
# Execute with SSL disabled
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./sql \
|
||||||
|
--conn "postgres://user:pass@localhost/db?sslmode=disable"
|
||||||
|
|
||||||
|
# Execute using key-value connection string
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./migrations \
|
||||||
|
--conn "host=localhost port=5432 user=admin password=secret dbname=prod"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
```
|
||||||
|
=== SQL Scripts Execution ===
|
||||||
|
Started at: 2025-12-30 22:30:15
|
||||||
|
Directory: ./migrations
|
||||||
|
Database: postgres://user:***@localhost:5432/mydb
|
||||||
|
|
||||||
|
[1/2] Reading SQL scripts...
|
||||||
|
✓ Found 4 script(s)
|
||||||
|
|
||||||
|
[2/2] Executing scripts in order (Priority → Sequence)...
|
||||||
|
|
||||||
|
Executing script: create_users (Priority=1, Sequence=1)
|
||||||
|
✓ Successfully executed: create_users
|
||||||
|
Executing script: create_posts (Priority=1, Sequence=2)
|
||||||
|
✓ Successfully executed: create_posts
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
✓ Successfully executed: add_indexes
|
||||||
|
Executing script: seed_data (Priority=2, Sequence=2)
|
||||||
|
✓ Successfully executed: seed_data
|
||||||
|
|
||||||
|
=== Execution Complete ===
|
||||||
|
Completed at: 2025-12-30 22:30:16
|
||||||
|
Successfully executed 4 script(s)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Execution Behavior
|
||||||
|
|
||||||
|
### Execution Order
|
||||||
|
|
||||||
|
Scripts are **always** executed in this order:
|
||||||
|
1. Sort by **Priority** (ascending)
|
||||||
|
2. Within same priority, sort by **Sequence** (ascending)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
Priority 1, Sequence 1 → Executes 1st
|
||||||
|
Priority 1, Sequence 2 → Executes 2nd
|
||||||
|
Priority 1, Sequence 10 → Executes 3rd
|
||||||
|
Priority 2, Sequence 1 → Executes 4th
|
||||||
|
Priority 2, Sequence 5 → Executes 5th
|
||||||
|
Priority 10, Sequence 1 → Executes 6th
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
- **Stop on First Error**: Execution stops immediately when any script fails
|
||||||
|
- **No Automatic Rollback**: Scripts executed before the failure remain committed
|
||||||
|
- **Error Details**: Full error message with script name, priority, and sequence
|
||||||
|
|
||||||
|
Example error output:
|
||||||
|
```
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
Error: execution failed: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||||
|
ERROR: syntax error at or near "IDNEX" (SQLSTATE 42601)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Transaction Behavior
|
||||||
|
|
||||||
|
- Each script executes in its own implicit transaction (PostgreSQL default)
|
||||||
|
- No automatic transaction wrapping across multiple scripts
|
||||||
|
- For atomic migrations, manually wrap SQL in `BEGIN/COMMIT` blocks
|
||||||
|
|
||||||
|
### Empty Scripts
|
||||||
|
|
||||||
|
Scripts with empty SQL content are silently skipped.
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
### Development Migrations
|
||||||
|
|
||||||
|
Organize database changes by priority levels:
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_xxx_schema.sql # Priority 1: Core schema
|
||||||
|
├── 1_xxx_tables.sql
|
||||||
|
├── 2_xxx_indexes.sql # Priority 2: Performance
|
||||||
|
├── 2_xxx_constraints.sql
|
||||||
|
└── 3_xxx_seed.sql # Priority 3: Data
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multi-Environment Deployments
|
||||||
|
|
||||||
|
Use priority levels for environment-specific scripts:
|
||||||
|
|
||||||
|
```
|
||||||
|
deploy/
|
||||||
|
├── 1_xxx_core_schema.sql # Priority 1: All environments
|
||||||
|
├── 2_xxx_dev_data.sql # Priority 2: Dev only
|
||||||
|
├── 2_xxx_staging_data.sql # Priority 2: Staging only
|
||||||
|
└── 3_xxx_prod_data.sql # Priority 3: Production only
|
||||||
|
```
|
||||||
|
|
||||||
|
### Incremental Rollouts
|
||||||
|
|
||||||
|
Use sequence for ordered feature rollouts:
|
||||||
|
|
||||||
|
```
|
||||||
|
features/
|
||||||
|
├── 1_001_feature_a_schema.sql
|
||||||
|
├── 1_002_feature_a_data.sql
|
||||||
|
├── 1_003_feature_b_schema.sql
|
||||||
|
├── 1_004_feature_b_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with RelSpec
|
||||||
|
|
||||||
|
The scripts command uses:
|
||||||
|
- **Reader**: `pkg/readers/sqldir/` - Reads SQL files into `models.Schema.Scripts`
|
||||||
|
- **Writer**: `pkg/writers/sqlexec/` - Executes scripts from `models.Schema.Scripts`
|
||||||
|
|
||||||
|
You can use these packages programmatically:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
db, _ := reader.ReadDatabase()
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/mydb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### Naming
|
||||||
|
|
||||||
|
- Use zero-padded sequences: `001`, `002`, `010` (not `1`, `2`, `10`)
|
||||||
|
- Use descriptive names: `create_users_table`, not `table1`
|
||||||
|
- Group related changes: same priority for related DDL
|
||||||
|
|
||||||
|
### Organization
|
||||||
|
|
||||||
|
- Keep scripts small and focused (one logical change per file)
|
||||||
|
- Use priority levels to organize phases (schema → indexes → data)
|
||||||
|
- Document complex migrations with SQL comments
|
||||||
|
|
||||||
|
### Safety
|
||||||
|
|
||||||
|
- Always test migrations in development first
|
||||||
|
- Use `scripts list` to verify execution order before running
|
||||||
|
- Back up production databases before executing
|
||||||
|
- Consider using transactions for critical changes
|
||||||
|
- Review generated SQL before execution
|
||||||
|
|
||||||
|
### Version Control
|
||||||
|
|
||||||
|
- Commit scripts to version control
|
||||||
|
- Never modify executed scripts (create new ones instead)
|
||||||
|
- Use meaningful commit messages
|
||||||
|
- Tag releases with migration checkpoints
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- PostgreSQL only (currently)
|
||||||
|
- No built-in rollback support
|
||||||
|
- No migration state tracking (no "already executed" detection)
|
||||||
|
- No dry-run mode
|
||||||
|
- Stops on first error (no partial execution tracking)
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
Potential future features:
|
||||||
|
- Migration state tracking (executed scripts table)
|
||||||
|
- Rollback script support (using `models.Script.Rollback` field)
|
||||||
|
- Dry-run mode (validate without executing)
|
||||||
|
- Transaction wrapping (all-or-nothing execution)
|
||||||
|
- Multi-database support (MySQL, SQLite, etc.)
|
||||||
|
- Parallel execution for independent scripts
|
||||||
393
docs/SCRIPTS_EXAMPLES.md
Normal file
393
docs/SCRIPTS_EXAMPLES.md
Normal file
@@ -0,0 +1,393 @@
|
|||||||
|
# RelSpec Scripts Command - Quick Examples
|
||||||
|
|
||||||
|
## Basic Workflow
|
||||||
|
|
||||||
|
### 1. Create migration directory structure
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Create migration scripts
|
||||||
|
|
||||||
|
Both underscore and hyphen formats are supported. Examples below use underscore format,
|
||||||
|
but you can also use: `1-001-create-users-table.sql`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Priority 1: Core schema
|
||||||
|
cat > migrations/1_001_create_users_table.sql << 'EOF'
|
||||||
|
CREATE TABLE users (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
username VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
email VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
password_hash VARCHAR(255) NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_users_username ON users(username);
|
||||||
|
CREATE INDEX idx_users_email ON users(email);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > migrations/1_002_create_posts_table.sql << 'EOF'
|
||||||
|
CREATE TABLE posts (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
title VARCHAR(200) NOT NULL,
|
||||||
|
content TEXT,
|
||||||
|
published BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Priority 2: Additional indexes
|
||||||
|
cat > migrations/2_001_add_post_indexes.sql << 'EOF'
|
||||||
|
CREATE INDEX idx_posts_user_id ON posts(user_id);
|
||||||
|
CREATE INDEX idx_posts_published ON posts(published);
|
||||||
|
CREATE INDEX idx_posts_created_at ON posts(created_at);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Priority 3: Seed data
|
||||||
|
cat > migrations/3_001_seed_admin_user.sql << 'EOF'
|
||||||
|
INSERT INTO users (username, email, password_hash)
|
||||||
|
VALUES ('admin', 'admin@example.com', 'hashed_password_here')
|
||||||
|
ON CONFLICT (username) DO NOTHING;
|
||||||
|
EOF
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. List scripts to verify order
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
```
|
||||||
|
=== SQL Scripts List ===
|
||||||
|
Directory: migrations
|
||||||
|
|
||||||
|
Found 4 script(s) in execution order:
|
||||||
|
|
||||||
|
No. Priority Sequence Name Lines
|
||||||
|
---- -------- -------- ------------------------------ -----
|
||||||
|
1 1 1 create_users_table 13
|
||||||
|
2 1 2 create_posts_table 11
|
||||||
|
3 2 1 add_post_indexes 4
|
||||||
|
4 3 1 seed_admin_user 4
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Execute against database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://myuser:mypass@localhost:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Real-World Examples
|
||||||
|
|
||||||
|
### Example 1: E-commerce Database Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Directory structure
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_products.sql
|
||||||
|
├── 1_003_create_orders.sql
|
||||||
|
├── 1_004_create_order_items.sql
|
||||||
|
├── 2_001_add_indexes.sql
|
||||||
|
├── 2_002_add_constraints.sql
|
||||||
|
├── 3_001_seed_categories.sql
|
||||||
|
└── 3_002_seed_sample_products.sql
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://ecommerce_user:pass@db.example.com:5432/ecommerce_prod?sslmode=require"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: Multi-Schema Database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Organize by schema using subdirectories
|
||||||
|
migrations/
|
||||||
|
├── public/
|
||||||
|
│ ├── 1_001_create_users.sql
|
||||||
|
│ └── 1_002_create_sessions.sql
|
||||||
|
├── analytics/
|
||||||
|
│ ├── 1_001_create_events.sql
|
||||||
|
│ └── 2_001_create_views.sql
|
||||||
|
└── reporting/
|
||||||
|
└── 1_001_create_reports.sql
|
||||||
|
|
||||||
|
# Execute (all schemas processed together)
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/multi_schema_db" \
|
||||||
|
--schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 3: Development Environment Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create local development database
|
||||||
|
createdb myapp_dev
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir ./db/migrations \
|
||||||
|
--conn "postgres://localhost/myapp_dev?sslmode=disable"
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
psql myapp_dev -c "\dt"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 4: CI/CD Pipeline
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# .github/workflows/deploy.yml
|
||||||
|
- name: Run database migrations
|
||||||
|
run: |
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "${{ secrets.DATABASE_URL }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 5: Docker Compose Integration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# docker-compose.yml
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: myapp
|
||||||
|
POSTGRES_USER: myuser
|
||||||
|
POSTGRES_PASSWORD: mypass
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
|
||||||
|
migrate:
|
||||||
|
image: relspec:latest
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
volumes:
|
||||||
|
- ./migrations:/migrations
|
||||||
|
command: >
|
||||||
|
scripts execute
|
||||||
|
--dir /migrations
|
||||||
|
--conn "postgres://myuser:mypass@postgres:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run migrations with docker-compose
|
||||||
|
docker-compose up -d postgres
|
||||||
|
sleep 5 # Wait for postgres to be ready
|
||||||
|
docker-compose run --rm migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 6: Incremental Feature Rollout
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Feature branch structure
|
||||||
|
migrations/
|
||||||
|
├── 1_100_user_profiles_schema.sql # Feature: User profiles
|
||||||
|
├── 1_101_user_profiles_constraints.sql
|
||||||
|
├── 1_102_user_profiles_indexes.sql
|
||||||
|
├── 2_100_notifications_schema.sql # Feature: Notifications
|
||||||
|
├── 2_101_notifications_constraints.sql
|
||||||
|
└── 2_102_notifications_indexes.sql
|
||||||
|
|
||||||
|
# Deploy just user profiles (Priority 1)
|
||||||
|
# Then later deploy notifications (Priority 2)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 7: Rollback Strategy (Manual)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Forward migration
|
||||||
|
cat > migrations/1_001_add_column.sql << 'EOF'
|
||||||
|
ALTER TABLE users ADD COLUMN phone VARCHAR(20);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create manual rollback script (not auto-executed)
|
||||||
|
cat > rollbacks/1_001_remove_column.sql << 'EOF'
|
||||||
|
ALTER TABLE users DROP COLUMN phone;
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# If needed, manually execute rollback
|
||||||
|
psql myapp -f rollbacks/1_001_remove_column.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 8: Complex Schema Changes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# migrations/1_001_alter_users_table.sql
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- Add new column
|
||||||
|
ALTER TABLE users ADD COLUMN full_name VARCHAR(200);
|
||||||
|
|
||||||
|
-- Populate from existing data
|
||||||
|
UPDATE users SET full_name = username WHERE full_name IS NULL;
|
||||||
|
|
||||||
|
-- Make it required
|
||||||
|
ALTER TABLE users ALTER COLUMN full_name SET NOT NULL;
|
||||||
|
|
||||||
|
-- Add index
|
||||||
|
CREATE INDEX idx_users_full_name ON users(full_name);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
Execute:
|
||||||
|
```bash
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## File Naming Format Examples
|
||||||
|
|
||||||
|
### Underscore Format (Traditional)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_posts.sql
|
||||||
|
├── 2_001_add_indexes.sql
|
||||||
|
└── 3_001_seed_data.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Hyphen Format (Alternative)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1-001-create-users.sql
|
||||||
|
├── 1-002-create-posts.sql
|
||||||
|
├── 10-10-create-newid.pgsql
|
||||||
|
└── 2-001-add-indexes.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mixed Format (Both in Same Directory)
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_users.sql # Underscore format
|
||||||
|
├── 1-002-create-posts.sql # Hyphen format
|
||||||
|
├── 2_001_add_indexes.sql # Underscore format
|
||||||
|
└── 10-10-special-migration.pgsql # Hyphen format
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** All three approaches work identically - use whichever naming style you prefer!
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Pattern 1: Schema → Indexes → Constraints → Data
|
||||||
|
|
||||||
|
```
|
||||||
|
1_xxx_*.sql # Tables and basic structure
|
||||||
|
2_xxx_*.sql # Indexes for performance
|
||||||
|
3_xxx_*.sql # Foreign keys and constraints
|
||||||
|
4_xxx_*.sql # Seed/reference data
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 2: Feature-Based Organization
|
||||||
|
|
||||||
|
```
|
||||||
|
1_001_feature_auth_users.sql
|
||||||
|
1_002_feature_auth_sessions.sql
|
||||||
|
1_003_feature_auth_permissions.sql
|
||||||
|
2_001_feature_blog_posts.sql
|
||||||
|
2_002_feature_blog_comments.sql
|
||||||
|
3_001_feature_payments_transactions.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 3: Date-Based Versioning
|
||||||
|
|
||||||
|
```
|
||||||
|
1_20250130_create_users.sql
|
||||||
|
2_20250131_add_user_indexes.sql
|
||||||
|
3_20250201_create_posts.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 4: Environment-Specific Scripts
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Base migrations (all environments)
|
||||||
|
migrations/base/
|
||||||
|
├── 1_001_create_users.sql
|
||||||
|
├── 1_002_create_products.sql
|
||||||
|
|
||||||
|
# Development-specific
|
||||||
|
migrations/dev/
|
||||||
|
└── 9_001_seed_test_data.sql
|
||||||
|
|
||||||
|
# Production-specific
|
||||||
|
migrations/prod/
|
||||||
|
└── 9_001_seed_production_config.sql
|
||||||
|
|
||||||
|
# Execute different paths based on environment
|
||||||
|
ENV=dev
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations/base \
|
||||||
|
--conn "postgres://localhost/myapp_${ENV}"
|
||||||
|
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations/${ENV} \
|
||||||
|
--conn "postgres://localhost/myapp_${ENV}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Check script order before execution
|
||||||
|
```bash
|
||||||
|
relspec scripts list --dir migrations
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test against local database first
|
||||||
|
```bash
|
||||||
|
# Create test database
|
||||||
|
createdb myapp_test
|
||||||
|
|
||||||
|
# Test migrations
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://localhost/myapp_test"
|
||||||
|
|
||||||
|
# Inspect results
|
||||||
|
psql myapp_test
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
dropdb myapp_test
|
||||||
|
```
|
||||||
|
|
||||||
|
### Validate SQL syntax
|
||||||
|
```bash
|
||||||
|
# Use PostgreSQL to check syntax without executing
|
||||||
|
for f in migrations/*.sql; do
|
||||||
|
echo "Checking $f..."
|
||||||
|
psql myapp -c "BEGIN; \i $f; ROLLBACK;" --single-transaction
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
### Debug connection issues
|
||||||
|
```bash
|
||||||
|
# Test connection string
|
||||||
|
psql "postgres://user:pass@localhost:5432/myapp"
|
||||||
|
|
||||||
|
# If that works, use the same string for relspec
|
||||||
|
relspec scripts execute \
|
||||||
|
--dir migrations \
|
||||||
|
--conn "postgres://user:pass@localhost:5432/myapp"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tips
|
||||||
|
|
||||||
|
1. **Always review execution order** with `list` before running `execute`
|
||||||
|
2. **Test in development** before running against production
|
||||||
|
3. **Use zero-padded sequences** (001, 002, not 1, 2) for consistent sorting
|
||||||
|
4. **Keep scripts idempotent** when possible (use IF NOT EXISTS, ON CONFLICT, etc.)
|
||||||
|
5. **Back up production** before running migrations
|
||||||
|
6. **Use transactions** for complex multi-statement migrations
|
||||||
|
7. **Document breaking changes** with SQL comments in the migration files
|
||||||
|
8. **Version control everything** - commit migrations with code changes
|
||||||
9
go.mod
9
go.mod
@@ -1,12 +1,13 @@
|
|||||||
module git.warky.dev/wdevs/relspecgo
|
module git.warky.dev/wdevs/relspecgo
|
||||||
|
|
||||||
go 1.24
|
go 1.24.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/jackc/pgx/v5 v5.7.6
|
github.com/jackc/pgx/v5 v5.7.6
|
||||||
github.com/spf13/cobra v1.10.2
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/stretchr/testify v1.11.1
|
github.com/stretchr/testify v1.11.1
|
||||||
|
github.com/uptrace/bun v1.2.16
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -15,10 +16,16 @@ require (
|
|||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
github.com/kr/pretty v0.3.1 // indirect
|
github.com/kr/pretty v0.3.1 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||||
github.com/spf13/pflag v1.0.10 // indirect
|
github.com/spf13/pflag v1.0.10 // indirect
|
||||||
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
golang.org/x/crypto v0.41.0 // indirect
|
golang.org/x/crypto v0.41.0 // indirect
|
||||||
|
golang.org/x/sys v0.38.0 // indirect
|
||||||
golang.org/x/text v0.28.0 // indirect
|
golang.org/x/text v0.28.0 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
14
go.sum
14
go.sum
@@ -15,6 +15,8 @@ github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk=
|
|||||||
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
||||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
|
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||||
|
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
@@ -22,6 +24,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
|||||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||||
@@ -36,11 +40,21 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
|
|||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo=
|
||||||
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
|
||||||
|
github.com/uptrace/bun v1.2.16 h1:QlObi6ZIK5Ao7kAALnh91HWYNZUBbVwye52fmlQM9kc=
|
||||||
|
github.com/uptrace/bun v1.2.16/go.mod h1:jMoNg2n56ckaawi/O/J92BHaECmrz6IRjuMWqlMaMTM=
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||||
|
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||||
|
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||||
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
||||||
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
||||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||||
|
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||||
|
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||||
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Ask if the user wants to make a release version
|
|
||||||
read -p "Do you want to make a release version? (y/n): " make_release
|
|
||||||
|
|
||||||
if [[ $make_release =~ ^[Yy]$ ]]; then
|
|
||||||
# Get the latest tag from git
|
|
||||||
latest_tag=$(git describe --tags --abbrev=0 2>/dev/null)
|
|
||||||
|
|
||||||
if [ -z "$latest_tag" ]; then
|
|
||||||
# No tags exist yet, start with v1.0.0
|
|
||||||
suggested_version="v1.0.0"
|
|
||||||
echo "No existing tags found. Starting with $suggested_version"
|
|
||||||
else
|
|
||||||
echo "Latest tag: $latest_tag"
|
|
||||||
|
|
||||||
# Remove 'v' prefix if present
|
|
||||||
version_number="${latest_tag#v}"
|
|
||||||
|
|
||||||
# Split version into major.minor.patch
|
|
||||||
IFS='.' read -r major minor patch <<< "$version_number"
|
|
||||||
|
|
||||||
# Increment patch version
|
|
||||||
patch=$((patch + 1))
|
|
||||||
|
|
||||||
# Construct new version
|
|
||||||
suggested_version="v${major}.${minor}.${patch}"
|
|
||||||
echo "Suggested next version: $suggested_version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ask the user for the version number with the suggested version as default
|
|
||||||
read -p "Enter the version number (press Enter for $suggested_version): " version
|
|
||||||
|
|
||||||
# Use suggested version if user pressed Enter without input
|
|
||||||
if [ -z "$version" ]; then
|
|
||||||
version="$suggested_version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Prepend 'v' to the version if it doesn't start with it
|
|
||||||
if ! [[ $version =~ ^v ]]; then
|
|
||||||
version="v$version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get commit logs since the last tag
|
|
||||||
if [ -z "$latest_tag" ]; then
|
|
||||||
# No previous tag, get all commits
|
|
||||||
commit_logs=$(git log --pretty=format:"- %s" --no-merges)
|
|
||||||
else
|
|
||||||
# Get commits since the last tag
|
|
||||||
commit_logs=$(git log "${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges)
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create the tag message
|
|
||||||
if [ -z "$commit_logs" ]; then
|
|
||||||
tag_message="Release $version"
|
|
||||||
else
|
|
||||||
tag_message="Release $version
|
|
||||||
|
|
||||||
${commit_logs}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create an annotated tag with the commit logs
|
|
||||||
git tag -a "$version" -m "$tag_message"
|
|
||||||
|
|
||||||
# Push the tag to the remote repository
|
|
||||||
git push origin "$version"
|
|
||||||
|
|
||||||
echo "Tag $version created and pushed to the remote repository."
|
|
||||||
else
|
|
||||||
echo "No release version created."
|
|
||||||
fi
|
|
||||||
@@ -2,14 +2,15 @@ package diff
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CompareDatabases compares two database models and returns the differences
|
// CompareDatabases compares two database models and returns the differences
|
||||||
func CompareDatabases(source, target *models.Database) *DiffResult {
|
func CompareDatabases(source, target *models.Database) *DiffResult {
|
||||||
result := &DiffResult{
|
result := &DiffResult{
|
||||||
Source: source.Name,
|
Source: source.Name,
|
||||||
Target: target.Name,
|
Target: target.Name,
|
||||||
Schemas: compareSchemas(source.Schemas, target.Schemas),
|
Schemas: compareSchemas(source.Schemas, target.Schemas),
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
|||||||
|
|
||||||
// DiffResult represents the complete difference analysis between two databases
|
// DiffResult represents the complete difference analysis between two databases
|
||||||
type DiffResult struct {
|
type DiffResult struct {
|
||||||
Source string `json:"source"`
|
Source string `json:"source"`
|
||||||
Target string `json:"target"`
|
Target string `json:"target"`
|
||||||
Schemas *SchemaDiff `json:"schemas"`
|
Schemas *SchemaDiff `json:"schemas"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -18,17 +18,17 @@ type SchemaDiff struct {
|
|||||||
|
|
||||||
// SchemaChange represents changes within a schema
|
// SchemaChange represents changes within a schema
|
||||||
type SchemaChange struct {
|
type SchemaChange struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Tables *TableDiff `json:"tables,omitempty"`
|
Tables *TableDiff `json:"tables,omitempty"`
|
||||||
Views *ViewDiff `json:"views,omitempty"`
|
Views *ViewDiff `json:"views,omitempty"`
|
||||||
Sequences *SequenceDiff `json:"sequences,omitempty"`
|
Sequences *SequenceDiff `json:"sequences,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableDiff represents differences in tables
|
// TableDiff represents differences in tables
|
||||||
type TableDiff struct {
|
type TableDiff struct {
|
||||||
Missing []*models.Table `json:"missing"` // Tables in source but not in target
|
Missing []*models.Table `json:"missing"` // Tables in source but not in target
|
||||||
Extra []*models.Table `json:"extra"` // Tables in target but not in source
|
Extra []*models.Table `json:"extra"` // Tables in target but not in source
|
||||||
Modified []*TableChange `json:"modified"` // Tables that exist in both but differ
|
Modified []*TableChange `json:"modified"` // Tables that exist in both but differ
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableChange represents changes within a table
|
// TableChange represents changes within a table
|
||||||
@@ -50,16 +50,16 @@ type ColumnDiff struct {
|
|||||||
|
|
||||||
// ColumnChange represents a modified column
|
// ColumnChange represents a modified column
|
||||||
type ColumnChange struct {
|
type ColumnChange struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Source *models.Column `json:"source"`
|
Source *models.Column `json:"source"`
|
||||||
Target *models.Column `json:"target"`
|
Target *models.Column `json:"target"`
|
||||||
Changes map[string]any `json:"changes"` // Map of field name to what changed
|
Changes map[string]any `json:"changes"` // Map of field name to what changed
|
||||||
}
|
}
|
||||||
|
|
||||||
// IndexDiff represents differences in indexes
|
// IndexDiff represents differences in indexes
|
||||||
type IndexDiff struct {
|
type IndexDiff struct {
|
||||||
Missing []*models.Index `json:"missing"` // Indexes in source but not in target
|
Missing []*models.Index `json:"missing"` // Indexes in source but not in target
|
||||||
Extra []*models.Index `json:"extra"` // Indexes in target but not in source
|
Extra []*models.Index `json:"extra"` // Indexes in target but not in source
|
||||||
Modified []*IndexChange `json:"modified"` // Indexes that exist in both but differ
|
Modified []*IndexChange `json:"modified"` // Indexes that exist in both but differ
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,8 +103,8 @@ type RelationshipChange struct {
|
|||||||
|
|
||||||
// ViewDiff represents differences in views
|
// ViewDiff represents differences in views
|
||||||
type ViewDiff struct {
|
type ViewDiff struct {
|
||||||
Missing []*models.View `json:"missing"` // Views in source but not in target
|
Missing []*models.View `json:"missing"` // Views in source but not in target
|
||||||
Extra []*models.View `json:"extra"` // Views in target but not in source
|
Extra []*models.View `json:"extra"` // Views in target but not in source
|
||||||
Modified []*ViewChange `json:"modified"` // Views that exist in both but differ
|
Modified []*ViewChange `json:"modified"` // Views that exist in both but differ
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -133,14 +133,14 @@ type SequenceChange struct {
|
|||||||
|
|
||||||
// Summary provides counts for quick overview
|
// Summary provides counts for quick overview
|
||||||
type Summary struct {
|
type Summary struct {
|
||||||
Schemas SchemaSummary `json:"schemas"`
|
Schemas SchemaSummary `json:"schemas"`
|
||||||
Tables TableSummary `json:"tables"`
|
Tables TableSummary `json:"tables"`
|
||||||
Columns ColumnSummary `json:"columns"`
|
Columns ColumnSummary `json:"columns"`
|
||||||
Indexes IndexSummary `json:"indexes"`
|
Indexes IndexSummary `json:"indexes"`
|
||||||
Constraints ConstraintSummary `json:"constraints"`
|
Constraints ConstraintSummary `json:"constraints"`
|
||||||
Relationships RelationshipSummary `json:"relationships"`
|
Relationships RelationshipSummary `json:"relationships"`
|
||||||
Views ViewSummary `json:"views"`
|
Views ViewSummary `json:"views"`
|
||||||
Sequences SequenceSummary `json:"sequences"`
|
Sequences SequenceSummary `json:"sequences"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type SchemaSummary struct {
|
type SchemaSummary struct {
|
||||||
|
|||||||
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
177
pkg/inspector/.relspec-rules.yaml.example
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
# RelSpec Inspector Rules Configuration Example
|
||||||
|
# Copy this file to .relspec-rules.yaml and customize as needed
|
||||||
|
|
||||||
|
version: "1.0"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
# ============================================================================
|
||||||
|
# PRIMARY KEY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate primary key column naming convention
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: warn # enforce|warn|off
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id_" # Regex pattern - PK columns must start with "id_"
|
||||||
|
message: "Primary key columns should start with 'id_'"
|
||||||
|
|
||||||
|
# Validate primary key data types
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: warn
|
||||||
|
function: primary_key_datatype
|
||||||
|
allowed_types:
|
||||||
|
- bigserial
|
||||||
|
- bigint
|
||||||
|
- int
|
||||||
|
- serial
|
||||||
|
- integer
|
||||||
|
- int4
|
||||||
|
- int8
|
||||||
|
message: "Primary keys should use integer types (bigserial, bigint, int, serial)"
|
||||||
|
|
||||||
|
# Check if primary keys have auto-increment enabled
|
||||||
|
primary_key_auto_increment:
|
||||||
|
enabled: off # Often disabled as not all PKs need auto-increment
|
||||||
|
function: primary_key_auto_increment
|
||||||
|
require_auto_increment: true
|
||||||
|
message: "Primary key without auto-increment detected"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# FOREIGN KEY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate foreign key column naming convention
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^rid_" # FK columns must start with "rid_" (referenced id)
|
||||||
|
message: "Foreign key columns should start with 'rid_'"
|
||||||
|
|
||||||
|
# Validate foreign key constraint naming convention
|
||||||
|
foreign_key_constraint_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_constraint_naming
|
||||||
|
pattern: "^fk_" # FK constraints must start with "fk_"
|
||||||
|
message: "Foreign key constraint names should start with 'fk_'"
|
||||||
|
|
||||||
|
# Ensure foreign key columns have indexes for performance
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_index
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign key columns should have indexes for optimal performance"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# NAMING CONVENTION RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Validate table naming follows snake_case convention
|
||||||
|
table_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr # Generic regex validator for table names
|
||||||
|
case: lowercase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||||
|
message: "Table names should be lowercase with underscores (snake_case)"
|
||||||
|
|
||||||
|
# Validate column naming follows snake_case convention
|
||||||
|
column_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: column_regexpr # Generic regex validator for column names
|
||||||
|
case: lowercase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$" # Lowercase letters, numbers, underscores only
|
||||||
|
message: "Column names should be lowercase with underscores (snake_case)"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# LENGTH RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Limit table name length (PostgreSQL max is 63, but 64 is common practice)
|
||||||
|
table_name_length:
|
||||||
|
enabled: warn
|
||||||
|
function: table_name_length
|
||||||
|
max_length: 64
|
||||||
|
message: "Table name exceeds recommended maximum length of 64 characters"
|
||||||
|
|
||||||
|
# Limit column name length
|
||||||
|
column_name_length:
|
||||||
|
enabled: warn
|
||||||
|
function: column_name_length
|
||||||
|
max_length: 64
|
||||||
|
message: "Column name exceeds recommended maximum length of 64 characters"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# RESERVED KEYWORDS
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Warn about using SQL reserved keywords as identifiers
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: warn
|
||||||
|
function: reserved_words
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Using SQL reserved keywords as identifiers can cause issues"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# SCHEMA INTEGRITY RULES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Ensure all tables have primary keys
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: warn
|
||||||
|
function: have_primary_key
|
||||||
|
message: "Table is missing a primary key"
|
||||||
|
|
||||||
|
# Detect orphaned foreign keys (referencing non-existent tables)
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: warn
|
||||||
|
function: orphaned_foreign_key
|
||||||
|
message: "Foreign key references a non-existent table"
|
||||||
|
|
||||||
|
# Detect circular foreign key dependencies
|
||||||
|
circular_dependency:
|
||||||
|
enabled: warn
|
||||||
|
function: circular_dependency
|
||||||
|
message: "Circular foreign key dependency detected"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# RULE CONFIGURATION NOTES
|
||||||
|
# ============================================================================
|
||||||
|
#
|
||||||
|
# enabled: Controls rule enforcement level
|
||||||
|
# - enforce: Violations are errors (exit code 1)
|
||||||
|
# - warn: Violations are warnings (exit code 0)
|
||||||
|
# - off: Rule is disabled
|
||||||
|
#
|
||||||
|
# function: The validation function to execute
|
||||||
|
# - Must match a registered validator function
|
||||||
|
# - Generic functions like table_regexpr and column_regexpr can be reused
|
||||||
|
#
|
||||||
|
# pattern: Regular expression for pattern matching
|
||||||
|
# - Used by naming validators
|
||||||
|
# - Must be valid Go regex syntax
|
||||||
|
#
|
||||||
|
# message: Custom message shown when rule is violated
|
||||||
|
# - Should be clear and actionable
|
||||||
|
# - Explains what the violation is and how to fix it
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
|
# CUSTOM RULES EXAMPLES
|
||||||
|
# ============================================================================
|
||||||
|
#
|
||||||
|
# You can add custom rules using the generic validator functions:
|
||||||
|
#
|
||||||
|
# # Example: Ensure table names don't contain numbers
|
||||||
|
# table_no_numbers:
|
||||||
|
# enabled: warn
|
||||||
|
# function: table_regexpr
|
||||||
|
# pattern: "^[a-z_]+$"
|
||||||
|
# message: "Table names should not contain numbers"
|
||||||
|
#
|
||||||
|
# # Example: Audit columns must end with _audit
|
||||||
|
# audit_column_suffix:
|
||||||
|
# enabled: enforce
|
||||||
|
# function: column_regexpr
|
||||||
|
# pattern: ".*_audit$"
|
||||||
|
# message: "Audit columns must end with '_audit'"
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
472
pkg/inspector/PLAN.md
Normal file
472
pkg/inspector/PLAN.md
Normal file
@@ -0,0 +1,472 @@
|
|||||||
|
# Inspector Feature Implementation Plan
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Add a model inspection feature that validates database schemas against configurable rules. The inspector will read any supported format, apply validation rules from a YAML config, and output a report in markdown or JSON format.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Core Components
|
||||||
|
|
||||||
|
1. **CLI Command** (`cmd/relspec/inspect.go`)
|
||||||
|
- New subcommand: `relspec inspect`
|
||||||
|
- Flags:
|
||||||
|
- `--from` (required): Input format (dbml, pgsql, json, etc.)
|
||||||
|
- `--from-path`: File path for file-based formats
|
||||||
|
- `--from-conn`: Connection string for database formats
|
||||||
|
- `--rules` (optional): Path to rules YAML file (default: `.relspec-rules.yaml`)
|
||||||
|
- `--output-format`: Report format (markdown, json) (default: markdown)
|
||||||
|
- `--output`: Output file path (default: stdout)
|
||||||
|
- `--schema`: Schema name filter (optional)
|
||||||
|
|
||||||
|
2. **Inspector Package** (`pkg/inspector/`)
|
||||||
|
- `inspector.go`: Main inspector logic
|
||||||
|
- `rules.go`: Rule definitions and configuration
|
||||||
|
- `validators.go`: Individual validation rule implementations
|
||||||
|
- `report.go`: Report generation (markdown, JSON)
|
||||||
|
- `config.go`: YAML config loading and parsing
|
||||||
|
|
||||||
|
### Data Flow
|
||||||
|
```
|
||||||
|
Input Format → Reader → Database Model → Inspector → Validation Results → Report Formatter → Output
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rules Configuration Structure
|
||||||
|
|
||||||
|
### YAML Schema (`rules.yaml`)
|
||||||
|
```yaml
|
||||||
|
version: "1.0"
|
||||||
|
rules:
|
||||||
|
# Primary Key Rules
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^id_" # regex pattern
|
||||||
|
message: "Primary key columns must start with 'id_'"
|
||||||
|
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
allowed_types: ["bigserial", "bigint", "int", "serial", "integer"]
|
||||||
|
message: "Primary keys must use approved integer types"
|
||||||
|
|
||||||
|
primary_key_auto_increment:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
require_auto_increment: true|false
|
||||||
|
message: "Primary keys without auto-increment detected"
|
||||||
|
|
||||||
|
# Foreign Key Rules
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^rid_"
|
||||||
|
message: "Foreign key columns must start with 'rid_'"
|
||||||
|
|
||||||
|
foreign_key_constraint_naming:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
pattern: "^fk_"
|
||||||
|
message: "Foreign key constraint names must start with 'fk_'"
|
||||||
|
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign keys should have indexes"
|
||||||
|
|
||||||
|
# Naming Convention Rules
|
||||||
|
table_naming_case:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
case: "lowercase" # lowercase, uppercase, snake_case, camelCase
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names must be lowercase with underscores"
|
||||||
|
|
||||||
|
column_naming_case:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
case: "lowercase"
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Column names must be lowercase with underscores"
|
||||||
|
|
||||||
|
# Length Rules
|
||||||
|
table_name_length:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
max_length: 64
|
||||||
|
message: "Table name exceeds maximum length"
|
||||||
|
|
||||||
|
column_name_length:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
max_length: 64
|
||||||
|
message: "Column name exceeds maximum length"
|
||||||
|
|
||||||
|
# Reserved Keywords
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Using reserved SQL keywords"
|
||||||
|
|
||||||
|
# Schema Integrity Rules
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Table missing primary key"
|
||||||
|
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Foreign key references non-existent table"
|
||||||
|
|
||||||
|
circular_dependency:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
message: "Circular foreign key dependency detected"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rule Levels
|
||||||
|
- **enforce**: Violations are errors (exit code 1)
|
||||||
|
- **warn**: Violations are warnings (exit code 0)
|
||||||
|
- **off**: Rule disabled
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### 1. Inspector Core (`pkg/inspector/inspector.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Inspector struct {
|
||||||
|
config *Config
|
||||||
|
db *models.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
type ValidationResult struct {
|
||||||
|
RuleName string
|
||||||
|
Level string // "error" or "warning"
|
||||||
|
Message string
|
||||||
|
Location string // e.g., "schema.table.column"
|
||||||
|
Context map[string]interface{}
|
||||||
|
Passed bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type InspectorReport struct {
|
||||||
|
Summary ReportSummary
|
||||||
|
Violations []ValidationResult
|
||||||
|
GeneratedAt time.Time
|
||||||
|
Database string
|
||||||
|
SourceFormat string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReportSummary struct {
|
||||||
|
TotalRules int
|
||||||
|
RulesChecked int
|
||||||
|
RulesSkipped int
|
||||||
|
ErrorCount int
|
||||||
|
WarningCount int
|
||||||
|
PassedCount int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewInspector(db *models.Database, config *Config) *Inspector
|
||||||
|
func (i *Inspector) Inspect() (*InspectorReport, error)
|
||||||
|
func (i *Inspector) validateDatabase() []ValidationResult
|
||||||
|
func (i *Inspector) validateSchema(schema *models.Schema) []ValidationResult
|
||||||
|
func (i *Inspector) validateTable(table *models.Table) []ValidationResult
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Rule Definitions (`pkg/inspector/rules.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Config struct {
|
||||||
|
Version string
|
||||||
|
Rules map[string]Rule
|
||||||
|
}
|
||||||
|
|
||||||
|
type Rule struct {
|
||||||
|
Enabled string // "enforce", "warn", "off"
|
||||||
|
Message string
|
||||||
|
Pattern string
|
||||||
|
AllowedTypes []string
|
||||||
|
MaxLength int
|
||||||
|
Case string
|
||||||
|
RequireIndex bool
|
||||||
|
CheckTables bool
|
||||||
|
CheckColumns bool
|
||||||
|
// ... rule-specific fields
|
||||||
|
}
|
||||||
|
|
||||||
|
type RuleValidator interface {
|
||||||
|
Name() string
|
||||||
|
Validate(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadConfig(path string) (*Config, error)
|
||||||
|
func GetDefaultConfig() *Config
|
||||||
|
```
|
||||||
|
|
||||||
|
**Configuration Loading Behavior:**
|
||||||
|
- If `--rules` flag is provided but file not found: Use default configuration (don't error)
|
||||||
|
- If file exists but is invalid YAML: Return error
|
||||||
|
- Default configuration has sensible rules enabled at "warn" level
|
||||||
|
- Users can override by creating their own `.relspec-rules.yaml` file
|
||||||
|
|
||||||
|
### 3. Validators (`pkg/inspector/validators.go`)
|
||||||
|
|
||||||
|
Each validator implements rule logic:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Primary Key Validators
|
||||||
|
func validatePrimaryKeyNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validatePrimaryKeyDatatype(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Foreign Key Validators
|
||||||
|
func validateForeignKeyColumnNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateForeignKeyIndex(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Naming Convention Validators
|
||||||
|
func validateTableNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateColumnNamingCase(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Length Validators
|
||||||
|
func validateTableNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateColumnNameLength(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Reserved Keywords Validator
|
||||||
|
func validateReservedKeywords(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Integrity Validators
|
||||||
|
func validateMissingPrimaryKey(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateOrphanedForeignKey(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
func validateCircularDependency(db *models.Database, rule Rule) []ValidationResult
|
||||||
|
|
||||||
|
// Registry of all validators
|
||||||
|
var validators = map[string]RuleValidator{
|
||||||
|
"primary_key_naming": primaryKeyNamingValidator{},
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Report Formatting (`pkg/inspector/report.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type ReportFormatter interface {
|
||||||
|
Format(report *InspectorReport) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type MarkdownFormatter struct {
|
||||||
|
UseColors bool // ANSI colors for terminal output
|
||||||
|
}
|
||||||
|
type JSONFormatter struct{}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error)
|
||||||
|
func (f *JSONFormatter) Format(report *InspectorReport) (string, error)
|
||||||
|
|
||||||
|
// Helper to detect if output is a TTY (terminal)
|
||||||
|
func isTerminal(w io.Writer) bool
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output Behavior:**
|
||||||
|
- Markdown format will use ANSI color codes when outputting to a terminal (TTY)
|
||||||
|
- When piped or redirected to a file, plain markdown without colors
|
||||||
|
- Colors: Red for errors, Yellow for warnings, Green for passed checks
|
||||||
|
|
||||||
|
**Markdown Format Example:**
|
||||||
|
```markdown
|
||||||
|
# RelSpec Inspector Report
|
||||||
|
|
||||||
|
**Database:** my_database
|
||||||
|
**Source Format:** pgsql
|
||||||
|
**Generated:** 2025-12-31 10:30:45
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
- Rules Checked: 12
|
||||||
|
- Errors: 3
|
||||||
|
- Warnings: 5
|
||||||
|
- Passed: 4
|
||||||
|
|
||||||
|
## Violations
|
||||||
|
|
||||||
|
### Errors (3)
|
||||||
|
|
||||||
|
#### primary_key_naming
|
||||||
|
**Table:** users, **Column:** user_id
|
||||||
|
Primary key columns must start with 'id_'
|
||||||
|
|
||||||
|
#### table_name_length
|
||||||
|
**Table:** user_authentication_sessions_with_metadata
|
||||||
|
Table name exceeds maximum length (64 characters)
|
||||||
|
|
||||||
|
### Warnings (5)
|
||||||
|
|
||||||
|
#### foreign_key_index
|
||||||
|
**Table:** orders, **Column:** customer_id
|
||||||
|
Foreign keys should have indexes
|
||||||
|
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
**JSON Format Example:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"summary": {
|
||||||
|
"total_rules": 12,
|
||||||
|
"rules_checked": 12,
|
||||||
|
"error_count": 3,
|
||||||
|
"warning_count": 5,
|
||||||
|
"passed_count": 4
|
||||||
|
},
|
||||||
|
"violations": [
|
||||||
|
{
|
||||||
|
"rule_name": "primary_key_naming",
|
||||||
|
"level": "error",
|
||||||
|
"message": "Primary key columns must start with 'id_'",
|
||||||
|
"location": "public.users.user_id",
|
||||||
|
"context": {
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "user_id",
|
||||||
|
"current_name": "user_id",
|
||||||
|
"expected_pattern": "^id_"
|
||||||
|
},
|
||||||
|
"passed": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"generated_at": "2025-12-31T10:30:45Z",
|
||||||
|
"database": "my_database",
|
||||||
|
"source_format": "pgsql"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. CLI Command (`cmd/relspec/inspect.go`)
|
||||||
|
|
||||||
|
```go
|
||||||
|
var inspectCmd = &cobra.Command{
|
||||||
|
Use: "inspect",
|
||||||
|
Short: "Inspect and validate database schemas against rules",
|
||||||
|
Long: `Read database schemas from various formats and validate against configurable rules.`,
|
||||||
|
RunE: runInspect,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
inspectCmd.Flags().String("from", "", "Input format (dbml, pgsql, json, etc.)")
|
||||||
|
inspectCmd.Flags().String("from-path", "", "Input file path")
|
||||||
|
inspectCmd.Flags().String("from-conn", "", "Database connection string")
|
||||||
|
inspectCmd.Flags().String("rules", ".relspec-rules.yaml", "Rules configuration file")
|
||||||
|
inspectCmd.Flags().String("output-format", "markdown", "Output format (markdown, json)")
|
||||||
|
inspectCmd.Flags().String("output", "", "Output file (default: stdout)")
|
||||||
|
inspectCmd.Flags().String("schema", "", "Filter by schema name")
|
||||||
|
inspectCmd.MarkFlagRequired("from")
|
||||||
|
}
|
||||||
|
|
||||||
|
func runInspect(cmd *cobra.Command, args []string) error {
|
||||||
|
// 1. Parse flags
|
||||||
|
// 2. Create reader (reuse pattern from convert.go)
|
||||||
|
// 3. Read database
|
||||||
|
// 4. Load rules config (use defaults if file not found)
|
||||||
|
// 5. Create inspector
|
||||||
|
// 6. Run inspection
|
||||||
|
// 7. Detect if output is terminal (for color support)
|
||||||
|
// 8. Format report (with/without ANSI colors)
|
||||||
|
// 9. Write output
|
||||||
|
// 10. Exit with appropriate code (0 if no errors, 1 if errors)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Phases
|
||||||
|
|
||||||
|
### Phase 1: Core Infrastructure
|
||||||
|
1. Create `pkg/inspector/` package structure
|
||||||
|
2. Implement `Config` and YAML loading
|
||||||
|
3. Implement `Inspector` core with basic validation framework
|
||||||
|
4. Create CLI command skeleton
|
||||||
|
|
||||||
|
### Phase 2: Basic Validators
|
||||||
|
1. Implement naming convention validators
|
||||||
|
- Primary key naming
|
||||||
|
- Foreign key column naming
|
||||||
|
- Foreign key constraint naming
|
||||||
|
- Table/column case validation
|
||||||
|
2. Implement length validators
|
||||||
|
3. Implement reserved keywords validator (leverage `pkg/pgsql/keywords.go`)
|
||||||
|
|
||||||
|
### Phase 3: Advanced Validators
|
||||||
|
1. Implement datatype validators
|
||||||
|
2. Implement integrity validators (missing PK, orphaned FK, circular deps)
|
||||||
|
3. Implement foreign key index validator
|
||||||
|
|
||||||
|
### Phase 4: Reporting
|
||||||
|
1. Implement `InspectorReport` structure
|
||||||
|
2. Implement markdown formatter
|
||||||
|
3. Implement JSON formatter
|
||||||
|
4. Add summary statistics
|
||||||
|
|
||||||
|
### Phase 5: CLI Integration
|
||||||
|
1. Wire up CLI command with flags
|
||||||
|
2. Integrate reader factory (from convert.go pattern)
|
||||||
|
3. Add output file handling
|
||||||
|
4. Add exit code logic
|
||||||
|
5. Add progress reporting
|
||||||
|
|
||||||
|
### Phase 6: Testing & Documentation
|
||||||
|
1. Unit tests for validators
|
||||||
|
2. Integration tests with sample schemas
|
||||||
|
3. Test with all reader formats
|
||||||
|
4. Update README with inspector documentation
|
||||||
|
5. Create example rules configuration file
|
||||||
|
|
||||||
|
## Files to Create
|
||||||
|
|
||||||
|
1. `pkg/inspector/inspector.go` - Core inspector logic
|
||||||
|
2. `pkg/inspector/rules.go` - Rule definitions and config loading
|
||||||
|
3. `pkg/inspector/validators.go` - Validation implementations
|
||||||
|
4. `pkg/inspector/report.go` - Report formatting
|
||||||
|
5. `pkg/inspector/config.go` - Config utilities
|
||||||
|
6. `cmd/relspec/inspect.go` - CLI command
|
||||||
|
7. `.relspec-rules.yaml.example` - Example configuration
|
||||||
|
8. `pkg/inspector/inspector_test.go` - Tests
|
||||||
|
|
||||||
|
## Files to Modify
|
||||||
|
|
||||||
|
1. `cmd/relspec/root.go` - Register inspect command
|
||||||
|
2. `README.md` - Add inspector documentation (if requested)
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgresql://localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file with custom rules
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Output JSON report to file
|
||||||
|
relspec inspect --from json --from-path db.json --output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
|
||||||
|
# Use custom rules location
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml --rules /path/to/rules.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Exit Codes
|
||||||
|
- 0: Success (no errors, only warnings or all passed)
|
||||||
|
- 1: Validation errors found (rules with level="enforce" failed)
|
||||||
|
- 2: Runtime error (invalid config, reader error, etc.)
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
- Existing: `pkg/models`, `pkg/readers`, `pkg/pgsql/keywords.go`
|
||||||
|
- New: `gopkg.in/yaml.v3` for YAML parsing (may already be in go.mod)
|
||||||
|
|
||||||
|
## Design Decisions
|
||||||
|
|
||||||
|
### Confirmed Choices (from user)
|
||||||
|
1. **Example config file**: Create `.relspec-rules.yaml.example` in repository root with documented examples
|
||||||
|
2. **Missing rules file**: Use sensible built-in defaults (don't error), all rules at "warn" level by default
|
||||||
|
3. **Terminal output**: ANSI colors (red/yellow/green) when outputting to terminal, plain markdown when piped/redirected
|
||||||
|
4. **Foreign key naming**: Separate configurable rules for both FK column names and FK constraint names
|
||||||
|
|
||||||
|
### Architecture Rationale
|
||||||
|
1. **Why YAML for config?**: Human-readable, supports comments, standard for config files
|
||||||
|
2. **Why three levels (enforce/warn/off)?**: Flexibility for gradual adoption, different contexts
|
||||||
|
3. **Why markdown + JSON?**: Markdown for human review, JSON for tooling integration
|
||||||
|
4. **Why pkg/inspector?**: Follows existing package structure, separates concerns
|
||||||
|
5. **Reuse readers**: Leverage existing reader infrastructure, supports all formats automatically
|
||||||
|
6. **Exit codes**: Follow standard conventions (0=success, 1=validation fail, 2=error)
|
||||||
|
|
||||||
|
## Future Enhancements (Not in Scope)
|
||||||
|
- Auto-fix mode (automatically rename columns, etc.)
|
||||||
|
- Custom rule plugins
|
||||||
|
- HTML report format
|
||||||
|
- Rule templates for different databases
|
||||||
|
- CI/CD integration examples
|
||||||
|
- Performance metrics in report
|
||||||
485
pkg/inspector/README.md
Normal file
485
pkg/inspector/README.md
Normal file
@@ -0,0 +1,485 @@
|
|||||||
|
# RelSpec Inspector
|
||||||
|
|
||||||
|
> Database Schema Validation and Linting Tool
|
||||||
|
|
||||||
|
The RelSpec Inspector validates database schemas against configurable rules, helping you maintain consistency, enforce naming conventions, and catch common schema design issues across your database models.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Inspector reads database schemas from any supported RelSpec format and validates them against a set of configurable rules. It generates detailed reports highlighting violations, warnings, and passed checks.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Flexible Rule Configuration**: YAML-based rules with three severity levels (enforce, warn, off)
|
||||||
|
- **Generic Validators**: Reusable regex-based validators for custom naming conventions
|
||||||
|
- **Multiple Input Formats**: Works with all RelSpec readers (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||||
|
- **Multiple Output Formats**: Markdown with ANSI colors for terminals, JSON for tooling integration
|
||||||
|
- **Smart Defaults**: Works out-of-the-box with sensible default rules
|
||||||
|
- **Terminal-Aware**: Automatic color support detection for improved readability
|
||||||
|
- **Exit Codes**: Proper exit codes for CI/CD integration
|
||||||
|
|
||||||
|
[Todo List of Features](./TODO.md)
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect a PostgreSQL database with default rules
|
||||||
|
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
|
||||||
|
|
||||||
|
# Inspect a DBML file
|
||||||
|
relspec inspect --from dbml --from-path schema.dbml
|
||||||
|
|
||||||
|
# Inspect with custom rules
|
||||||
|
relspec inspect --from json --from-path db.json --rules my-rules.yaml
|
||||||
|
|
||||||
|
# Output JSON report to file
|
||||||
|
relspec inspect --from pgsql --from-conn "..." \
|
||||||
|
--output-format json --output report.json
|
||||||
|
|
||||||
|
# Inspect specific schema only
|
||||||
|
relspec inspect --from pgsql --from-conn "..." --schema public
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
Create a `.relspec-rules.yaml` file to customize validation rules. If the file doesn't exist, the inspector uses sensible defaults.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
version: "1.0"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
# Primary key columns must start with "id_"
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce # enforce|warn|off
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id_"
|
||||||
|
message: "Primary key columns must start with 'id_'"
|
||||||
|
|
||||||
|
# Foreign key columns must start with "rid_"
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: warn
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^rid_"
|
||||||
|
message: "Foreign key columns should start with 'rid_'"
|
||||||
|
|
||||||
|
# Table names must be lowercase snake_case
|
||||||
|
table_naming_case:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr # Generic regex validator
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names should be lowercase with underscores"
|
||||||
|
|
||||||
|
# Ensure all tables have primary keys
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: warn
|
||||||
|
function: have_primary_key
|
||||||
|
message: "Table is missing a primary key"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Built-in Validation Rules
|
||||||
|
|
||||||
|
### Primary Key Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `primary_key_naming` | `primary_key_naming` | Validate PK column names against regex pattern |
|
||||||
|
| `primary_key_datatype` | `primary_key_datatype` | Enforce approved PK data types (bigint, serial, etc.) |
|
||||||
|
| `primary_key_auto_increment` | `primary_key_auto_increment` | Check if PKs have auto-increment enabled |
|
||||||
|
|
||||||
|
### Foreign Key Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `foreign_key_column_naming` | `foreign_key_column_naming` | Validate FK column names against regex pattern |
|
||||||
|
| `foreign_key_constraint_naming` | `foreign_key_constraint_naming` | Validate FK constraint names against regex pattern |
|
||||||
|
| `foreign_key_index` | `foreign_key_index` | Ensure FK columns have indexes for performance |
|
||||||
|
|
||||||
|
### Naming Convention Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `table_naming_case` | `table_regexpr` | Generic regex validator for table names |
|
||||||
|
| `column_naming_case` | `column_regexpr` | Generic regex validator for column names |
|
||||||
|
|
||||||
|
### Length Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `table_name_length` | `table_name_length` | Limit table name length (default: 64 chars) |
|
||||||
|
| `column_name_length` | `column_name_length` | Limit column name length (default: 64 chars) |
|
||||||
|
|
||||||
|
### Reserved Keywords
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `reserved_keywords` | `reserved_words` | Detect use of SQL reserved keywords as identifiers |
|
||||||
|
|
||||||
|
### Schema Integrity Rules
|
||||||
|
|
||||||
|
| Rule | Function | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `missing_primary_key` | `have_primary_key` | Ensure tables have primary keys |
|
||||||
|
| `orphaned_foreign_key` | `orphaned_foreign_key` | Detect FKs referencing non-existent tables |
|
||||||
|
| `circular_dependency` | `circular_dependency` | Detect circular FK dependencies |
|
||||||
|
|
||||||
|
## Rule Configuration
|
||||||
|
|
||||||
|
### Severity Levels
|
||||||
|
|
||||||
|
Rules support three severity levels:
|
||||||
|
|
||||||
|
- **`enforce`**: Violations are errors (exit code 1)
|
||||||
|
- **`warn`**: Violations are warnings (exit code 0)
|
||||||
|
- **`off`**: Rule is disabled
|
||||||
|
|
||||||
|
### Rule Structure
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
rule_name:
|
||||||
|
enabled: enforce|warn|off
|
||||||
|
function: validator_function_name
|
||||||
|
message: "Custom message shown on violation"
|
||||||
|
# Rule-specific parameters
|
||||||
|
pattern: "^regex_pattern$" # For pattern-based validators
|
||||||
|
allowed_types: [type1, type2] # For type validators
|
||||||
|
max_length: 64 # For length validators
|
||||||
|
check_tables: true # For keyword validator
|
||||||
|
check_columns: true # For keyword validator
|
||||||
|
require_index: true # For FK index validator
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generic Validators
|
||||||
|
|
||||||
|
The inspector provides generic validator functions that can be reused for custom rules:
|
||||||
|
|
||||||
|
### `table_regexpr`
|
||||||
|
|
||||||
|
Generic regex validator for table names. Create custom table naming rules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Example: Ensure table names don't contain numbers
|
||||||
|
table_no_numbers:
|
||||||
|
enabled: warn
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^[a-z_]+$"
|
||||||
|
message: "Table names should not contain numbers"
|
||||||
|
|
||||||
|
# Example: Tables must start with "tbl_"
|
||||||
|
table_prefix:
|
||||||
|
enabled: enforce
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^tbl_[a-z][a-z0-9_]*$"
|
||||||
|
message: "Table names must start with 'tbl_'"
|
||||||
|
```
|
||||||
|
|
||||||
|
### `column_regexpr`
|
||||||
|
|
||||||
|
Generic regex validator for column names. Create custom column naming rules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Example: Audit columns must end with "_audit"
|
||||||
|
audit_column_suffix:
|
||||||
|
enabled: enforce
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: ".*_audit$"
|
||||||
|
message: "Audit columns must end with '_audit'"
|
||||||
|
|
||||||
|
# Example: Timestamp columns must end with "_at"
|
||||||
|
timestamp_suffix:
|
||||||
|
enabled: warn
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: ".*(created|updated|deleted)_at$"
|
||||||
|
message: "Timestamp columns should end with '_at'"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Formats
|
||||||
|
|
||||||
|
### Markdown (Default)
|
||||||
|
|
||||||
|
Human-readable markdown report with ANSI colors when outputting to a terminal:
|
||||||
|
|
||||||
|
```
|
||||||
|
# RelSpec Inspector Report
|
||||||
|
|
||||||
|
**Database:** my_database
|
||||||
|
**Source Format:** pgsql
|
||||||
|
**Generated:** 2025-12-31T10:30:45Z
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
- Rules Checked: 13
|
||||||
|
- Errors: 2
|
||||||
|
- Warnings: 5
|
||||||
|
- Passed: 120
|
||||||
|
|
||||||
|
## Violations
|
||||||
|
|
||||||
|
### Errors (2)
|
||||||
|
|
||||||
|
#### primary_key_naming
|
||||||
|
**Location:** public.users.user_id
|
||||||
|
**Message:** Primary key columns must start with 'id_'
|
||||||
|
**Details:** expected_pattern=^id_
|
||||||
|
|
||||||
|
### Warnings (5)
|
||||||
|
|
||||||
|
#### foreign_key_index
|
||||||
|
**Location:** public.orders.customer_id
|
||||||
|
**Message:** Foreign key columns should have indexes
|
||||||
|
**Details:** has_index=false
|
||||||
|
```
|
||||||
|
|
||||||
|
### JSON
|
||||||
|
|
||||||
|
Structured JSON output for tooling integration:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"summary": {
|
||||||
|
"total_rules": 13,
|
||||||
|
"rules_checked": 13,
|
||||||
|
"error_count": 2,
|
||||||
|
"warning_count": 5,
|
||||||
|
"passed_count": 120
|
||||||
|
},
|
||||||
|
"violations": [
|
||||||
|
{
|
||||||
|
"rule_name": "primary_key_naming",
|
||||||
|
"level": "error",
|
||||||
|
"message": "Primary key columns must start with 'id_'",
|
||||||
|
"location": "public.users.user_id",
|
||||||
|
"context": {
|
||||||
|
"schema": "public",
|
||||||
|
"table": "users",
|
||||||
|
"column": "user_id",
|
||||||
|
"expected_pattern": "^id_"
|
||||||
|
},
|
||||||
|
"passed": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"generated_at": "2025-12-31T10:30:45Z",
|
||||||
|
"database": "my_database",
|
||||||
|
"source_format": "pgsql"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Reference
|
||||||
|
|
||||||
|
### Flags
|
||||||
|
|
||||||
|
| Flag | Type | Description |
|
||||||
|
|------|------|-------------|
|
||||||
|
| `--from` | string | **Required**. Source format (dbml, pgsql, json, yaml, gorm, etc.) |
|
||||||
|
| `--from-path` | string | Source file path (for file-based formats) |
|
||||||
|
| `--from-conn` | string | Connection string (for database formats) |
|
||||||
|
| `--rules` | string | Path to rules YAML file (default: `.relspec-rules.yaml`) |
|
||||||
|
| `--output-format` | string | Output format: `markdown` or `json` (default: `markdown`) |
|
||||||
|
| `--output` | string | Output file path (default: stdout) |
|
||||||
|
| `--schema` | string | Filter to specific schema by name |
|
||||||
|
|
||||||
|
### Exit Codes
|
||||||
|
|
||||||
|
| Code | Meaning |
|
||||||
|
|------|---------|
|
||||||
|
| 0 | Success (no errors, only warnings or all passed) |
|
||||||
|
| 1 | Validation errors found (rules with `enabled: enforce` failed) |
|
||||||
|
| 2 | Runtime error (invalid config, reader error, etc.) |
|
||||||
|
|
||||||
|
## CI/CD Integration
|
||||||
|
|
||||||
|
### GitHub Actions Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: Schema Validation
|
||||||
|
|
||||||
|
on: [pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install RelSpec
|
||||||
|
run: go install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
||||||
|
|
||||||
|
- name: Validate Schema
|
||||||
|
run: |
|
||||||
|
relspec inspect \
|
||||||
|
--from dbml \
|
||||||
|
--from-path schema.dbml \
|
||||||
|
--rules .relspec-rules.yaml \
|
||||||
|
--output-format json \
|
||||||
|
--output validation-report.json
|
||||||
|
|
||||||
|
- name: Upload Report
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: validation-report
|
||||||
|
path: validation-report.json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pre-commit Hook Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# .git/hooks/pre-commit
|
||||||
|
|
||||||
|
echo "Running schema validation..."
|
||||||
|
|
||||||
|
relspec inspect \
|
||||||
|
--from dbml \
|
||||||
|
--from-path schema.dbml \
|
||||||
|
--rules .relspec-rules.yaml
|
||||||
|
|
||||||
|
exit $?
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Configuration File
|
||||||
|
|
||||||
|
See [`.relspec-rules.yaml.example`](../../.relspec-rules.yaml.example) for a fully documented example configuration with all available rules and customization options.
|
||||||
|
|
||||||
|
## Common Use Cases
|
||||||
|
|
||||||
|
### Enforce Naming Standards
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Ensure consistent naming across your schema
|
||||||
|
table_naming_case:
|
||||||
|
enabled: enforce
|
||||||
|
function: table_regexpr
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Tables must use snake_case"
|
||||||
|
|
||||||
|
column_naming_case:
|
||||||
|
enabled: enforce
|
||||||
|
function: column_regexpr
|
||||||
|
pattern: "^[a-z][a-z0-9_]*$"
|
||||||
|
message: "Columns must use snake_case"
|
||||||
|
|
||||||
|
primary_key_naming:
|
||||||
|
enabled: enforce
|
||||||
|
function: primary_key_naming
|
||||||
|
pattern: "^id$"
|
||||||
|
message: "Primary key must be named 'id'"
|
||||||
|
|
||||||
|
foreign_key_column_naming:
|
||||||
|
enabled: enforce
|
||||||
|
function: foreign_key_column_naming
|
||||||
|
pattern: "^[a-z]+_id$"
|
||||||
|
message: "Foreign keys must end with '_id'"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Performance Best Practices
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Ensure optimal database performance
|
||||||
|
foreign_key_index:
|
||||||
|
enabled: enforce
|
||||||
|
function: foreign_key_index
|
||||||
|
require_index: true
|
||||||
|
message: "Foreign keys must have indexes"
|
||||||
|
|
||||||
|
primary_key_datatype:
|
||||||
|
enabled: enforce
|
||||||
|
function: primary_key_datatype
|
||||||
|
allowed_types: [bigserial, bigint]
|
||||||
|
message: "Use bigserial or bigint for primary keys"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Integrity
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Prevent common schema issues
|
||||||
|
missing_primary_key:
|
||||||
|
enabled: enforce
|
||||||
|
function: have_primary_key
|
||||||
|
message: "All tables must have a primary key"
|
||||||
|
|
||||||
|
orphaned_foreign_key:
|
||||||
|
enabled: enforce
|
||||||
|
function: orphaned_foreign_key
|
||||||
|
message: "Foreign keys must reference existing tables"
|
||||||
|
|
||||||
|
circular_dependency:
|
||||||
|
enabled: warn
|
||||||
|
function: circular_dependency
|
||||||
|
message: "Circular dependencies detected"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Avoid Reserved Keywords
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
reserved_keywords:
|
||||||
|
enabled: warn
|
||||||
|
function: reserved_words
|
||||||
|
check_tables: true
|
||||||
|
check_columns: true
|
||||||
|
message: "Avoid using SQL reserved keywords"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Programmatic Usage
|
||||||
|
|
||||||
|
You can use the inspector programmatically in your Go code:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/inspector"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Load your database model
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "my_database",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
// ... your schema
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load rules configuration
|
||||||
|
config, err := inspector.LoadConfig(".relspec-rules.yaml")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create and run inspector
|
||||||
|
insp := inspector.NewInspector(db, config)
|
||||||
|
report, err := insp.Inspect()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate report
|
||||||
|
formatter := inspector.NewMarkdownFormatter(os.Stdout)
|
||||||
|
output, err := formatter.Format(report)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(output)
|
||||||
|
|
||||||
|
// Check for errors
|
||||||
|
if report.HasErrors() {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions are welcome! To add a new validator:
|
||||||
|
|
||||||
|
1. Add the validator function to `validators.go`
|
||||||
|
2. Register it in `inspector.go` `getValidator()` function
|
||||||
|
3. Add default configuration to `rules.go` `GetDefaultConfig()`
|
||||||
|
4. Update this README with the new rule documentation
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Apache License 2.0 - See [LICENSE](../../LICENSE) for details.
|
||||||
65
pkg/inspector/TODO.md
Normal file
65
pkg/inspector/TODO.md
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
## Inspector TODO
|
||||||
|
|
||||||
|
See the [Inspector README](./README.md) for complete documentation of implemented features.
|
||||||
|
|
||||||
|
### Implemented ✓
|
||||||
|
|
||||||
|
- [x] Core validation framework with configurable rules
|
||||||
|
- [x] YAML configuration with three severity levels (enforce/warn/off)
|
||||||
|
- [x] Generic validators (table_regexpr, column_regexpr)
|
||||||
|
- [x] Primary key validation (naming, datatype, auto-increment)
|
||||||
|
- [x] Foreign key validation (column naming, constraint naming, indexes)
|
||||||
|
- [x] Naming convention validation (snake_case, custom patterns)
|
||||||
|
- [x] Length validation (table names, column names)
|
||||||
|
- [x] Reserved keywords detection
|
||||||
|
- [x] Schema integrity checks (missing PKs, orphaned FKs, circular dependencies)
|
||||||
|
- [x] Multiple output formats (Markdown with ANSI colors, JSON)
|
||||||
|
- [x] Terminal-aware color output
|
||||||
|
- [x] All input formats supported (PostgreSQL, DBML, JSON, GORM, Bun, etc.)
|
||||||
|
- [x] CI/CD integration support (proper exit codes)
|
||||||
|
- [x] Comprehensive documentation and examples
|
||||||
|
|
||||||
|
### Future Enhancements
|
||||||
|
|
||||||
|
#### Reporting Enhancements
|
||||||
|
- [ ] Add verbose mode to show all passing checks in detail
|
||||||
|
- [ ] Add summary-only mode (suppress violation details)
|
||||||
|
- [ ] Group violations by table/schema in report
|
||||||
|
- [ ] Add statistics: most violated rules, tables with most issues
|
||||||
|
- [ ] HTML report format with interactive filtering
|
||||||
|
|
||||||
|
#### Additional Validators
|
||||||
|
- [ ] Optimal column order for space and storage efficiency
|
||||||
|
- [ ] Similar-sounding column names detection (synonyms, typos)
|
||||||
|
- [ ] Plural/singular table name consistency
|
||||||
|
- [ ] Column order validation (PK first, FKs next, data columns, timestamps last)
|
||||||
|
- [ ] Data type consistency across related columns
|
||||||
|
- [ ] Index coverage analysis
|
||||||
|
- [ ] Unused indexes detection
|
||||||
|
- [ ] Missing indexes on commonly filtered columns
|
||||||
|
- [ ] Table size estimates and warnings for large tables
|
||||||
|
- [ ] Function naming conventions (here we have my rules used in Bitech etc. Work from a rules file.)
|
||||||
|
- [ ] View naming conventions
|
||||||
|
- [ ] Enum naming conventions
|
||||||
|
- [ ] Custom type naming conventions
|
||||||
|
- [ ] Table name consistency across related tables
|
||||||
|
|
||||||
|
#### Auto-Fix Capabilities
|
||||||
|
- [ ] Auto-fix mode (`relspec inspect --fix`)
|
||||||
|
- [ ] Update foreign key types to match primary key types
|
||||||
|
- [ ] Rename foreign keys to match primary key names with configurable prefix/suffix
|
||||||
|
- [ ] Reorder columns according to rules
|
||||||
|
- [ ] Add missing indexes on foreign keys
|
||||||
|
- [ ] Generate migration scripts for fixes
|
||||||
|
- [ ] Dry-run mode to preview changes
|
||||||
|
|
||||||
|
#### Advanced Features
|
||||||
|
- [ ] Custom validator plugins (Go plugin system)
|
||||||
|
- [ ] Rule templates for different databases (PostgreSQL, MySQL, etc.)
|
||||||
|
- [ ] Rule inheritance and composition
|
||||||
|
- [ ] Conditional rules (apply only to certain schemas/tables)
|
||||||
|
- [ ] Performance metrics in report (validation time per rule)
|
||||||
|
- [ ] Caching for large databases
|
||||||
|
- [ ] Incremental validation (only changed tables)
|
||||||
|
- [ ] Watch mode for continuous validation
|
||||||
|
|
||||||
182
pkg/inspector/inspector.go
Normal file
182
pkg/inspector/inspector.go
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Inspector performs validation on database models
|
||||||
|
type Inspector struct {
|
||||||
|
config *Config
|
||||||
|
db *models.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidationResult represents the result of a single validation check
|
||||||
|
type ValidationResult struct {
|
||||||
|
RuleName string `json:"rule_name"`
|
||||||
|
Level string `json:"level"` // "error" or "warning"
|
||||||
|
Message string `json:"message"`
|
||||||
|
Location string `json:"location"` // e.g., "schema.table.column"
|
||||||
|
Context map[string]interface{} `json:"context"`
|
||||||
|
Passed bool `json:"passed"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InspectorReport contains the complete validation report
|
||||||
|
type InspectorReport struct {
|
||||||
|
Summary ReportSummary `json:"summary"`
|
||||||
|
Violations []ValidationResult `json:"violations"`
|
||||||
|
GeneratedAt time.Time `json:"generated_at"`
|
||||||
|
Database string `json:"database"`
|
||||||
|
SourceFormat string `json:"source_format"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReportSummary contains aggregate statistics
|
||||||
|
type ReportSummary struct {
|
||||||
|
TotalRules int `json:"total_rules"`
|
||||||
|
RulesChecked int `json:"rules_checked"`
|
||||||
|
RulesSkipped int `json:"rules_skipped"`
|
||||||
|
ErrorCount int `json:"error_count"`
|
||||||
|
WarningCount int `json:"warning_count"`
|
||||||
|
PassedCount int `json:"passed_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewInspector creates a new inspector with the given database and configuration
|
||||||
|
func NewInspector(db *models.Database, config *Config) *Inspector {
|
||||||
|
return &Inspector{
|
||||||
|
config: config,
|
||||||
|
db: db,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inspect runs all enabled validation rules and returns a report
|
||||||
|
func (i *Inspector) Inspect() (*InspectorReport, error) {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Run all enabled validators
|
||||||
|
for ruleName, rule := range i.config.Rules {
|
||||||
|
if !rule.IsEnabled() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the validator function for this rule using the function field
|
||||||
|
validator, exists := getValidator(rule.Function)
|
||||||
|
if !exists {
|
||||||
|
// Skip unknown validator functions
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the validator
|
||||||
|
ruleResults := validator(i.db, rule, ruleName)
|
||||||
|
|
||||||
|
// Set the level based on rule configuration
|
||||||
|
level := "warning"
|
||||||
|
if rule.IsEnforced() {
|
||||||
|
level = "error"
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range ruleResults {
|
||||||
|
ruleResults[idx].Level = level
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, ruleResults...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate summary
|
||||||
|
summary := i.generateSummary(results)
|
||||||
|
|
||||||
|
report := &InspectorReport{
|
||||||
|
Summary: summary,
|
||||||
|
Violations: results,
|
||||||
|
GeneratedAt: time.Now(),
|
||||||
|
Database: i.db.Name,
|
||||||
|
SourceFormat: i.db.SourceFormat,
|
||||||
|
}
|
||||||
|
|
||||||
|
return report, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateSummary creates summary statistics from validation results
|
||||||
|
func (i *Inspector) generateSummary(results []ValidationResult) ReportSummary {
|
||||||
|
summary := ReportSummary{
|
||||||
|
TotalRules: len(i.config.Rules),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count enabled rules
|
||||||
|
for _, rule := range i.config.Rules {
|
||||||
|
if rule.IsEnabled() {
|
||||||
|
summary.RulesChecked++
|
||||||
|
} else {
|
||||||
|
summary.RulesSkipped++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count violations by level
|
||||||
|
for _, result := range results {
|
||||||
|
if result.Passed {
|
||||||
|
summary.PassedCount++
|
||||||
|
} else {
|
||||||
|
if result.Level == "error" {
|
||||||
|
summary.ErrorCount++
|
||||||
|
} else {
|
||||||
|
summary.WarningCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasErrors returns true if the report contains any errors
|
||||||
|
func (r *InspectorReport) HasErrors() bool {
|
||||||
|
return r.Summary.ErrorCount > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatorFunc is a function that validates a rule against a database
|
||||||
|
type validatorFunc func(*models.Database, Rule, string) []ValidationResult
|
||||||
|
|
||||||
|
// getValidator returns the validator function for a given function name
|
||||||
|
func getValidator(functionName string) (validatorFunc, bool) {
|
||||||
|
validators := map[string]validatorFunc{
|
||||||
|
"primary_key_naming": validatePrimaryKeyNaming,
|
||||||
|
"primary_key_datatype": validatePrimaryKeyDatatype,
|
||||||
|
"primary_key_auto_increment": validatePrimaryKeyAutoIncrement,
|
||||||
|
"foreign_key_column_naming": validateForeignKeyColumnNaming,
|
||||||
|
"foreign_key_constraint_naming": validateForeignKeyConstraintNaming,
|
||||||
|
"foreign_key_index": validateForeignKeyIndex,
|
||||||
|
"table_regexpr": validateTableNamingCase,
|
||||||
|
"column_regexpr": validateColumnNamingCase,
|
||||||
|
"table_name_length": validateTableNameLength,
|
||||||
|
"column_name_length": validateColumnNameLength,
|
||||||
|
"reserved_words": validateReservedKeywords,
|
||||||
|
"have_primary_key": validateMissingPrimaryKey,
|
||||||
|
"orphaned_foreign_key": validateOrphanedForeignKey,
|
||||||
|
"circular_dependency": validateCircularDependency,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn, exists := validators[functionName]
|
||||||
|
return fn, exists
|
||||||
|
}
|
||||||
|
|
||||||
|
// createResult is a helper to create a validation result
|
||||||
|
func createResult(ruleName string, passed bool, message string, location string, context map[string]interface{}) ValidationResult {
|
||||||
|
return ValidationResult{
|
||||||
|
RuleName: ruleName,
|
||||||
|
Message: message,
|
||||||
|
Location: location,
|
||||||
|
Context: context,
|
||||||
|
Passed: passed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatLocation creates a location string from schema, table, and optional column
|
||||||
|
func formatLocation(schema, table, column string) string {
|
||||||
|
if column != "" {
|
||||||
|
return fmt.Sprintf("%s.%s.%s", schema, table, column)
|
||||||
|
}
|
||||||
|
if table != "" {
|
||||||
|
return fmt.Sprintf("%s.%s", schema, table)
|
||||||
|
}
|
||||||
|
return schema
|
||||||
|
}
|
||||||
229
pkg/inspector/report.go
Normal file
229
pkg/inspector/report.go
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ANSI color codes
|
||||||
|
const (
|
||||||
|
colorReset = "\033[0m"
|
||||||
|
colorRed = "\033[31m"
|
||||||
|
colorYellow = "\033[33m"
|
||||||
|
colorGreen = "\033[32m"
|
||||||
|
colorBold = "\033[1m"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ReportFormatter defines the interface for report formatters
|
||||||
|
type ReportFormatter interface {
|
||||||
|
Format(report *InspectorReport) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarkdownFormatter formats reports as markdown
|
||||||
|
type MarkdownFormatter struct {
|
||||||
|
UseColors bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// JSONFormatter formats reports as JSON
|
||||||
|
type JSONFormatter struct{}
|
||||||
|
|
||||||
|
// NewMarkdownFormatter creates a markdown formatter with color support detection
|
||||||
|
func NewMarkdownFormatter(writer io.Writer) *MarkdownFormatter {
|
||||||
|
return &MarkdownFormatter{
|
||||||
|
UseColors: isTerminal(writer),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewJSONFormatter creates a JSON formatter
|
||||||
|
func NewJSONFormatter() *JSONFormatter {
|
||||||
|
return &JSONFormatter{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format generates a markdown report
|
||||||
|
func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error) {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Header
|
||||||
|
sb.WriteString(f.formatHeader("RelSpec Inspector Report"))
|
||||||
|
sb.WriteString("\n\n")
|
||||||
|
|
||||||
|
// Metadata
|
||||||
|
sb.WriteString(f.formatBold("Database:") + " " + report.Database + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Source Format:") + " " + report.SourceFormat + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Generated:") + " " + report.GeneratedAt.Format(time.RFC3339) + "\n")
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
sb.WriteString(f.formatHeader("Summary"))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
sb.WriteString(fmt.Sprintf("- Rules Checked: %d\n", report.Summary.RulesChecked))
|
||||||
|
|
||||||
|
// Color-code error and warning counts
|
||||||
|
if report.Summary.ErrorCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount), colorRed))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount))
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.WarningCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount), colorYellow))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount))
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.Summary.PassedCount > 0 {
|
||||||
|
sb.WriteString(f.colorize(fmt.Sprintf("- Passed: %d\n", report.Summary.PassedCount), colorGreen))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Group violations by level
|
||||||
|
errors := []ValidationResult{}
|
||||||
|
warnings := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, v := range report.Violations {
|
||||||
|
if !v.Passed {
|
||||||
|
if v.Level == "error" {
|
||||||
|
errors = append(errors, v)
|
||||||
|
} else {
|
||||||
|
warnings = append(warnings, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Report violations
|
||||||
|
if len(errors) > 0 || len(warnings) > 0 {
|
||||||
|
sb.WriteString(f.formatHeader("Violations"))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Errors
|
||||||
|
if len(errors) > 0 {
|
||||||
|
sb.WriteString(f.formatSubheader(fmt.Sprintf("Errors (%d)", len(errors)), colorRed))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
for _, violation := range errors {
|
||||||
|
sb.WriteString(f.formatViolation(violation, colorRed))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warnings
|
||||||
|
if len(warnings) > 0 {
|
||||||
|
sb.WriteString(f.formatSubheader(fmt.Sprintf("Warnings (%d)", len(warnings)), colorYellow))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
for _, violation := range warnings {
|
||||||
|
sb.WriteString(f.formatViolation(violation, colorYellow))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sb.WriteString(f.colorize("✓ No violations found!\n", colorGreen))
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format generates a JSON report
|
||||||
|
func (f *JSONFormatter) Format(report *InspectorReport) (string, error) {
|
||||||
|
data, err := json.MarshalIndent(report, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to marshal report to JSON: %w", err)
|
||||||
|
}
|
||||||
|
return string(data), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper methods for MarkdownFormatter
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatHeader(text string) string {
|
||||||
|
return f.formatBold("# " + text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatSubheader(text string, color string) string {
|
||||||
|
header := "### " + text
|
||||||
|
if f.UseColors {
|
||||||
|
return color + colorBold + header + colorReset
|
||||||
|
}
|
||||||
|
return header
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatBold(text string) string {
|
||||||
|
if f.UseColors {
|
||||||
|
return colorBold + text + colorReset
|
||||||
|
}
|
||||||
|
return "**" + text + "**"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) colorize(text string, color string) string {
|
||||||
|
if f.UseColors {
|
||||||
|
return color + text + colorReset
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatViolation(v ValidationResult, color string) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Rule name as header
|
||||||
|
if f.UseColors {
|
||||||
|
sb.WriteString(color + "#### " + v.RuleName + colorReset + "\n")
|
||||||
|
} else {
|
||||||
|
sb.WriteString("#### " + v.RuleName + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Location and message
|
||||||
|
sb.WriteString(f.formatBold("Location:") + " " + v.Location + "\n")
|
||||||
|
sb.WriteString(f.formatBold("Message:") + " " + v.Message + "\n")
|
||||||
|
|
||||||
|
// Context details (optional, only show interesting ones)
|
||||||
|
if len(v.Context) > 0 {
|
||||||
|
contextStr := f.formatContext(v.Context)
|
||||||
|
if contextStr != "" {
|
||||||
|
sb.WriteString(f.formatBold("Details:") + " " + contextStr + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MarkdownFormatter) formatContext(context map[string]interface{}) string {
|
||||||
|
// Extract relevant context information
|
||||||
|
var parts []string
|
||||||
|
|
||||||
|
// Skip schema, table, column as they're in location
|
||||||
|
skipKeys := map[string]bool{
|
||||||
|
"schema": true,
|
||||||
|
"table": true,
|
||||||
|
"column": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value := range context {
|
||||||
|
if skipKeys[key] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
parts = append(parts, fmt.Sprintf("%s=%v", key, value))
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(parts, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// isTerminal checks if the writer is a terminal (supports ANSI colors)
|
||||||
|
func isTerminal(w io.Writer) bool {
|
||||||
|
file, ok := w.(*os.File)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the file descriptor is a terminal
|
||||||
|
stat, err := file.Stat()
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a character device (terminal)
|
||||||
|
// This works on Unix-like systems
|
||||||
|
return (stat.Mode() & os.ModeCharDevice) != 0
|
||||||
|
}
|
||||||
169
pkg/inspector/rules.go
Normal file
169
pkg/inspector/rules.go
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config represents the inspector rules configuration
|
||||||
|
type Config struct {
|
||||||
|
Version string `yaml:"version"`
|
||||||
|
Rules map[string]Rule `yaml:"rules"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rule represents a single validation rule
|
||||||
|
type Rule struct {
|
||||||
|
Enabled string `yaml:"enabled"` // "enforce", "warn", "off"
|
||||||
|
Function string `yaml:"function"` // validator function name
|
||||||
|
Message string `yaml:"message"`
|
||||||
|
Pattern string `yaml:"pattern,omitempty"`
|
||||||
|
AllowedTypes []string `yaml:"allowed_types,omitempty"`
|
||||||
|
MaxLength int `yaml:"max_length,omitempty"`
|
||||||
|
Case string `yaml:"case,omitempty"`
|
||||||
|
RequireIndex bool `yaml:"require_index,omitempty"`
|
||||||
|
CheckTables bool `yaml:"check_tables,omitempty"`
|
||||||
|
CheckColumns bool `yaml:"check_columns,omitempty"`
|
||||||
|
RequireAutoIncrement bool `yaml:"require_auto_increment,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadConfig loads configuration from a YAML file
|
||||||
|
// If the file doesn't exist, returns default configuration
|
||||||
|
// If the file exists but is invalid, returns an error
|
||||||
|
func LoadConfig(path string) (*Config, error) {
|
||||||
|
// Check if file exists
|
||||||
|
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||||
|
// File doesn't exist, use defaults
|
||||||
|
return GetDefaultConfig(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read file
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read config file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse YAML
|
||||||
|
var config Config
|
||||||
|
if err := yaml.Unmarshal(data, &config); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse config YAML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDefaultConfig returns the default inspector configuration
|
||||||
|
// All rules are enabled at "warn" level by default
|
||||||
|
func GetDefaultConfig() *Config {
|
||||||
|
return &Config{
|
||||||
|
Version: "1.0",
|
||||||
|
Rules: map[string]Rule{
|
||||||
|
// Primary Key Rules
|
||||||
|
"primary_key_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "primary_key_naming",
|
||||||
|
Pattern: "^id_",
|
||||||
|
Message: "Primary key columns should start with 'id_'",
|
||||||
|
},
|
||||||
|
"primary_key_datatype": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "primary_key_datatype",
|
||||||
|
AllowedTypes: []string{"bigserial", "bigint", "int", "serial", "integer", "int4", "int8"},
|
||||||
|
Message: "Primary keys should use integer types (bigserial, bigint, int, serial)",
|
||||||
|
},
|
||||||
|
"primary_key_auto_increment": {
|
||||||
|
Enabled: "off",
|
||||||
|
Function: "primary_key_auto_increment",
|
||||||
|
RequireAutoIncrement: true,
|
||||||
|
Message: "Primary key without auto-increment detected",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Foreign Key Rules
|
||||||
|
"foreign_key_column_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_column_naming",
|
||||||
|
Pattern: "^rid_",
|
||||||
|
Message: "Foreign key columns should start with 'rid_'",
|
||||||
|
},
|
||||||
|
"foreign_key_constraint_naming": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_constraint_naming",
|
||||||
|
Pattern: "^fk_",
|
||||||
|
Message: "Foreign key constraint names should start with 'fk_'",
|
||||||
|
},
|
||||||
|
"foreign_key_index": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "foreign_key_index",
|
||||||
|
RequireIndex: true,
|
||||||
|
Message: "Foreign key columns should have indexes for optimal performance",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Naming Convention Rules
|
||||||
|
"table_naming_case": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "table_regexpr",
|
||||||
|
Case: "lowercase",
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Message: "Table names should be lowercase with underscores (snake_case)",
|
||||||
|
},
|
||||||
|
"column_naming_case": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "column_regexpr",
|
||||||
|
Case: "lowercase",
|
||||||
|
Pattern: "^[a-z][a-z0-9_]*$",
|
||||||
|
Message: "Column names should be lowercase with underscores (snake_case)",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Length Rules
|
||||||
|
"table_name_length": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "table_name_length",
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Table name exceeds recommended maximum length of 64 characters",
|
||||||
|
},
|
||||||
|
"column_name_length": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "column_name_length",
|
||||||
|
MaxLength: 64,
|
||||||
|
Message: "Column name exceeds recommended maximum length of 64 characters",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Reserved Keywords
|
||||||
|
"reserved_keywords": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "reserved_words",
|
||||||
|
CheckTables: true,
|
||||||
|
CheckColumns: true,
|
||||||
|
Message: "Using SQL reserved keywords as identifiers can cause issues",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Schema Integrity Rules
|
||||||
|
"missing_primary_key": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "have_primary_key",
|
||||||
|
Message: "Table is missing a primary key",
|
||||||
|
},
|
||||||
|
"orphaned_foreign_key": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "orphaned_foreign_key",
|
||||||
|
Message: "Foreign key references a non-existent table",
|
||||||
|
},
|
||||||
|
"circular_dependency": {
|
||||||
|
Enabled: "warn",
|
||||||
|
Function: "circular_dependency",
|
||||||
|
Message: "Circular foreign key dependency detected",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEnabled returns true if the rule is enabled (either "enforce" or "warn")
|
||||||
|
func (r *Rule) IsEnabled() bool {
|
||||||
|
return r.Enabled == "enforce" || r.Enabled == "warn"
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEnforced returns true if the rule is set to "enforce" level
|
||||||
|
func (r *Rule) IsEnforced() bool {
|
||||||
|
return r.Enabled == "enforce"
|
||||||
|
}
|
||||||
603
pkg/inspector/validators.go
Normal file
603
pkg/inspector/validators.go
Normal file
@@ -0,0 +1,603 @@
|
|||||||
|
package inspector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
|
||||||
|
)
|
||||||
|
|
||||||
|
// validatePrimaryKeyNaming checks that primary key column names match a pattern
|
||||||
|
func validatePrimaryKeyNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := pattern.MatchString(col.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatePrimaryKeyDatatype checks that primary keys use approved data types
|
||||||
|
func validatePrimaryKeyDatatype(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
|
||||||
|
// Normalize type (remove size/precision)
|
||||||
|
normalizedType := normalizeDataType(col.Type)
|
||||||
|
passed := contains(rule.AllowedTypes, normalizedType)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"current_type": col.Type,
|
||||||
|
"allowed_types": rule.AllowedTypes,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validatePrimaryKeyAutoIncrement checks primary key auto-increment settings
|
||||||
|
func validatePrimaryKeyAutoIncrement(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
|
||||||
|
// Check if auto-increment matches requirement
|
||||||
|
passed := col.AutoIncrement == rule.RequireAutoIncrement
|
||||||
|
|
||||||
|
if !passed {
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
false,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"has_auto_increment": col.AutoIncrement,
|
||||||
|
"require_auto_increment": rule.RequireAutoIncrement,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyColumnNaming checks that foreign key column names match a pattern
|
||||||
|
func validateForeignKeyColumnNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check foreign key constraints
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, colName)
|
||||||
|
passed := pattern.MatchString(colName)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": colName,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyConstraintNaming checks that foreign key constraint names match a pattern
|
||||||
|
func validateForeignKeyConstraintNaming(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := pattern.MatchString(constraint.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateForeignKeyIndex checks that foreign key columns have indexes
|
||||||
|
func validateForeignKeyIndex(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
if !rule.RequireIndex {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Get all foreign key columns
|
||||||
|
fkColumns := make(map[string]bool)
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, col := range constraint.Columns {
|
||||||
|
fkColumns[col] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if each FK column has an index
|
||||||
|
for fkCol := range fkColumns {
|
||||||
|
hasIndex := false
|
||||||
|
|
||||||
|
// Check table indexes
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
// Index is good if FK column is the first column
|
||||||
|
if len(index.Columns) > 0 && index.Columns[0] == fkCol {
|
||||||
|
hasIndex = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, fkCol)
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
hasIndex,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": fkCol,
|
||||||
|
"has_index": hasIndex,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateTableNamingCase checks table name casing
|
||||||
|
func validateTableNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := pattern.MatchString(table.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"expected_case": rule.Case,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateColumnNamingCase checks column name casing
|
||||||
|
func validateColumnNamingCase(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
pattern, err := regexp.Compile(rule.Pattern)
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := pattern.MatchString(col.Name)
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"expected_case": rule.Case,
|
||||||
|
"expected_pattern": rule.Pattern,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateTableNameLength checks table name length
|
||||||
|
func validateTableNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := len(table.Name) <= rule.MaxLength
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"length": len(table.Name),
|
||||||
|
"max_length": rule.MaxLength,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateColumnNameLength checks column name length
|
||||||
|
func validateColumnNameLength(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := len(col.Name) <= rule.MaxLength
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"length": len(col.Name),
|
||||||
|
"max_length": rule.MaxLength,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateReservedKeywords checks for reserved SQL keywords
|
||||||
|
func validateReservedKeywords(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build keyword map from PostgreSQL keywords
|
||||||
|
keywordSlice := pgsql.GetPostgresKeywords()
|
||||||
|
keywords := make(map[string]bool)
|
||||||
|
for _, kw := range keywordSlice {
|
||||||
|
keywords[strings.ToUpper(kw)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check table name
|
||||||
|
if rule.CheckTables {
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := !keywords[strings.ToUpper(table.Name)]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"object_type": "table",
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check column names
|
||||||
|
if rule.CheckColumns {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
location := formatLocation(schema.Name, table.Name, col.Name)
|
||||||
|
passed := !keywords[strings.ToUpper(col.Name)]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"column": col.Name,
|
||||||
|
"object_type": "column",
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateMissingPrimaryKey checks for tables without primary keys
|
||||||
|
func validateMissingPrimaryKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
hasPrimaryKey := false
|
||||||
|
|
||||||
|
// Check columns for primary key
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
hasPrimaryKey = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also check constraints
|
||||||
|
if !hasPrimaryKey {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.PrimaryKeyConstraint {
|
||||||
|
hasPrimaryKey = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
hasPrimaryKey,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateOrphanedForeignKey checks for foreign keys referencing non-existent tables
|
||||||
|
func validateOrphanedForeignKey(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build a map of existing tables for quick lookup
|
||||||
|
tableExists := make(map[string]bool)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
key := schema.Name + "." + table.Name
|
||||||
|
tableExists[key] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check all foreign key constraints
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
// Build referenced table key
|
||||||
|
refSchema := constraint.ReferencedSchema
|
||||||
|
if refSchema == "" {
|
||||||
|
refSchema = schema.Name
|
||||||
|
}
|
||||||
|
refKey := refSchema + "." + constraint.ReferencedTable
|
||||||
|
|
||||||
|
location := formatLocation(schema.Name, table.Name, "")
|
||||||
|
passed := tableExists[refKey]
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
passed,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": schema.Name,
|
||||||
|
"table": table.Name,
|
||||||
|
"constraint": constraint.Name,
|
||||||
|
"referenced_schema": refSchema,
|
||||||
|
"referenced_table": constraint.ReferencedTable,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateCircularDependency checks for circular foreign key dependencies
|
||||||
|
func validateCircularDependency(db *models.Database, rule Rule, ruleName string) []ValidationResult {
|
||||||
|
results := []ValidationResult{}
|
||||||
|
|
||||||
|
// Build dependency graph
|
||||||
|
dependencies := make(map[string][]string)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
refSchema := constraint.ReferencedSchema
|
||||||
|
if refSchema == "" {
|
||||||
|
refSchema = schema.Name
|
||||||
|
}
|
||||||
|
refKey := refSchema + "." + constraint.ReferencedTable
|
||||||
|
|
||||||
|
dependencies[tableKey] = append(dependencies[tableKey], refKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for cycles using DFS
|
||||||
|
for tableKey := range dependencies {
|
||||||
|
visited := make(map[string]bool)
|
||||||
|
recStack := make(map[string]bool)
|
||||||
|
|
||||||
|
if hasCycle(tableKey, dependencies, visited, recStack) {
|
||||||
|
parts := strings.Split(tableKey, ".")
|
||||||
|
location := formatLocation(parts[0], parts[1], "")
|
||||||
|
|
||||||
|
results = append(results, createResult(
|
||||||
|
ruleName,
|
||||||
|
false,
|
||||||
|
rule.Message,
|
||||||
|
location,
|
||||||
|
map[string]interface{}{
|
||||||
|
"schema": parts[0],
|
||||||
|
"table": parts[1],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
|
||||||
|
// hasCycle performs DFS to detect cycles in dependency graph
|
||||||
|
func hasCycle(node string, graph map[string][]string, visited, recStack map[string]bool) bool {
|
||||||
|
visited[node] = true
|
||||||
|
recStack[node] = true
|
||||||
|
|
||||||
|
for _, neighbor := range graph[node] {
|
||||||
|
if !visited[neighbor] {
|
||||||
|
if hasCycle(neighbor, graph, visited, recStack) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if recStack[neighbor] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
recStack[node] = false
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalizeDataType removes size/precision from data type
|
||||||
|
func normalizeDataType(dataType string) string {
|
||||||
|
// Remove everything in parentheses
|
||||||
|
idx := strings.Index(dataType, "(")
|
||||||
|
if idx > 0 {
|
||||||
|
dataType = dataType[:idx]
|
||||||
|
}
|
||||||
|
return strings.ToLower(strings.TrimSpace(dataType))
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains checks if a string slice contains a value
|
||||||
|
func contains(slice []string, value string) bool {
|
||||||
|
for _, item := range slice {
|
||||||
|
if strings.EqualFold(item, value) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
@@ -2,7 +2,13 @@ package models
|
|||||||
|
|
||||||
import "encoding/xml"
|
import "encoding/xml"
|
||||||
|
|
||||||
// DCTXDictionary represents the root element of a DCTX file
|
// DCTX File Format Models
|
||||||
|
//
|
||||||
|
// This file defines the data structures for parsing and generating DCTX
|
||||||
|
// (Data Dictionary) XML files, which are used by Clarion development tools
|
||||||
|
// for database schema definitions.
|
||||||
|
|
||||||
|
// DCTXDictionary represents the root element of a DCTX file.
|
||||||
type DCTXDictionary struct {
|
type DCTXDictionary struct {
|
||||||
XMLName xml.Name `xml:"Dictionary"`
|
XMLName xml.Name `xml:"Dictionary"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -11,7 +17,7 @@ type DCTXDictionary struct {
|
|||||||
Relations []DCTXRelation `xml:"Relation,omitempty"`
|
Relations []DCTXRelation `xml:"Relation,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXTable represents a table definition in DCTX
|
// DCTXTable represents a table definition in DCTX format.
|
||||||
type DCTXTable struct {
|
type DCTXTable struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -25,7 +31,8 @@ type DCTXTable struct {
|
|||||||
Options []DCTXOption `xml:"Option,omitempty"`
|
Options []DCTXOption `xml:"Option,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXField represents a field/column definition in DCTX
|
// DCTXField represents a field/column definition in DCTX format.
|
||||||
|
// Fields can be nested for GROUP structures.
|
||||||
type DCTXField struct {
|
type DCTXField struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -37,7 +44,7 @@ type DCTXField struct {
|
|||||||
Options []DCTXOption `xml:"Option,omitempty"`
|
Options []DCTXOption `xml:"Option,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXKey represents an index or key definition in DCTX
|
// DCTXKey represents an index or key definition in DCTX format.
|
||||||
type DCTXKey struct {
|
type DCTXKey struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -49,7 +56,7 @@ type DCTXKey struct {
|
|||||||
Components []DCTXComponent `xml:"Component"`
|
Components []DCTXComponent `xml:"Component"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXComponent represents a component of a key (field reference)
|
// DCTXComponent represents a component of a key, referencing a field in the index.
|
||||||
type DCTXComponent struct {
|
type DCTXComponent struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
FieldId string `xml:"FieldId,attr,omitempty"`
|
FieldId string `xml:"FieldId,attr,omitempty"`
|
||||||
@@ -57,14 +64,14 @@ type DCTXComponent struct {
|
|||||||
Ascend bool `xml:"Ascend,attr,omitempty"`
|
Ascend bool `xml:"Ascend,attr,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXOption represents a property option in DCTX
|
// DCTXOption represents a property option in DCTX format for metadata storage.
|
||||||
type DCTXOption struct {
|
type DCTXOption struct {
|
||||||
Property string `xml:"Property,attr"`
|
Property string `xml:"Property,attr"`
|
||||||
PropertyType string `xml:"PropertyType,attr,omitempty"`
|
PropertyType string `xml:"PropertyType,attr,omitempty"`
|
||||||
PropertyValue string `xml:"PropertyValue,attr"`
|
PropertyValue string `xml:"PropertyValue,attr"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXRelation represents a relationship/foreign key in DCTX
|
// DCTXRelation represents a relationship/foreign key in DCTX format.
|
||||||
type DCTXRelation struct {
|
type DCTXRelation struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
PrimaryTable string `xml:"PrimaryTable,attr"`
|
PrimaryTable string `xml:"PrimaryTable,attr"`
|
||||||
@@ -77,7 +84,7 @@ type DCTXRelation struct {
|
|||||||
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping,omitempty"`
|
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXFieldMapping represents a field mapping in a relation
|
// DCTXFieldMapping represents a field mapping in a relation for multi-column foreign keys.
|
||||||
type DCTXFieldMapping struct {
|
type DCTXFieldMapping struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Field string `xml:"Field,attr"`
|
Field string `xml:"Field,attr"`
|
||||||
|
|||||||
@@ -2,11 +2,14 @@ package models
|
|||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
// =============================================================================
|
// Flat/Denormalized Views
|
||||||
// Flat/Denormalized Views - Flattened structures with fully qualified names
|
//
|
||||||
// =============================================================================
|
// This file provides flattened data structures with fully qualified names
|
||||||
|
// for easier querying and analysis of database schemas without navigating
|
||||||
|
// nested hierarchies.
|
||||||
|
|
||||||
// FlatColumn represents a column with full context in a single structure
|
// FlatColumn represents a column with full database context in a single structure.
|
||||||
|
// It includes fully qualified names for easy identification and querying.
|
||||||
type FlatColumn struct {
|
type FlatColumn struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -25,7 +28,7 @@ type FlatColumn struct {
|
|||||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatColumns converts a Database to a slice of FlatColumns
|
// ToFlatColumns converts a Database to a slice of FlatColumns for denormalized access to all columns.
|
||||||
func (d *Database) ToFlatColumns() []*FlatColumn {
|
func (d *Database) ToFlatColumns() []*FlatColumn {
|
||||||
flatColumns := make([]*FlatColumn, 0)
|
flatColumns := make([]*FlatColumn, 0)
|
||||||
|
|
||||||
@@ -56,7 +59,7 @@ func (d *Database) ToFlatColumns() []*FlatColumn {
|
|||||||
return flatColumns
|
return flatColumns
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatTable represents a table with full context
|
// FlatTable represents a table with full database context and aggregated counts.
|
||||||
type FlatTable struct {
|
type FlatTable struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -70,7 +73,7 @@ type FlatTable struct {
|
|||||||
IndexCount int `json:"index_count" yaml:"index_count" xml:"index_count"`
|
IndexCount int `json:"index_count" yaml:"index_count" xml:"index_count"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatTables converts a Database to a slice of FlatTables
|
// ToFlatTables converts a Database to a slice of FlatTables for denormalized access to all tables.
|
||||||
func (d *Database) ToFlatTables() []*FlatTable {
|
func (d *Database) ToFlatTables() []*FlatTable {
|
||||||
flatTables := make([]*FlatTable, 0)
|
flatTables := make([]*FlatTable, 0)
|
||||||
|
|
||||||
@@ -94,7 +97,7 @@ func (d *Database) ToFlatTables() []*FlatTable {
|
|||||||
return flatTables
|
return flatTables
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatConstraint represents a constraint with full context
|
// FlatConstraint represents a constraint with full database context and resolved references.
|
||||||
type FlatConstraint struct {
|
type FlatConstraint struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -112,7 +115,7 @@ type FlatConstraint struct {
|
|||||||
OnUpdate string `json:"on_update,omitempty" yaml:"on_update,omitempty" xml:"on_update,omitempty"`
|
OnUpdate string `json:"on_update,omitempty" yaml:"on_update,omitempty" xml:"on_update,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatConstraints converts a Database to a slice of FlatConstraints
|
// ToFlatConstraints converts a Database to a slice of FlatConstraints for denormalized access to all constraints.
|
||||||
func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
||||||
flatConstraints := make([]*FlatConstraint, 0)
|
flatConstraints := make([]*FlatConstraint, 0)
|
||||||
|
|
||||||
@@ -148,7 +151,7 @@ func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
|||||||
return flatConstraints
|
return flatConstraints
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatRelationship represents a relationship with full context
|
// FlatRelationship represents a relationship with full database context and fully qualified table names.
|
||||||
type FlatRelationship struct {
|
type FlatRelationship struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
RelationshipName string `json:"relationship_name" yaml:"relationship_name" xml:"relationship_name"`
|
RelationshipName string `json:"relationship_name" yaml:"relationship_name" xml:"relationship_name"`
|
||||||
@@ -164,7 +167,7 @@ type FlatRelationship struct {
|
|||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatRelationships converts a Database to a slice of FlatRelationships
|
// ToFlatRelationships converts a Database to a slice of FlatRelationships for denormalized access to all relationships.
|
||||||
func (d *Database) ToFlatRelationships() []*FlatRelationship {
|
func (d *Database) ToFlatRelationships() []*FlatRelationship {
|
||||||
flatRelationships := make([]*FlatRelationship, 0)
|
flatRelationships := make([]*FlatRelationship, 0)
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
|
// Package models provides the core data structures for representing database schemas.
|
||||||
|
// It defines types for databases, schemas, tables, columns, relationships, constraints,
|
||||||
|
// indexes, views, sequences, and other database objects. These models serve as the
|
||||||
|
// intermediate representation for converting between various database schema formats.
|
||||||
package models
|
package models
|
||||||
|
|
||||||
import "strings"
|
import "strings"
|
||||||
|
|
||||||
|
// DatabaseType represents the type of database system.
|
||||||
type DatabaseType string
|
type DatabaseType string
|
||||||
|
|
||||||
|
// Supported database types.
|
||||||
const (
|
const (
|
||||||
PostgresqlDatabaseType DatabaseType = "pgsql"
|
PostgresqlDatabaseType DatabaseType = "pgsql" // PostgreSQL database
|
||||||
MSSQLDatabaseType DatabaseType = "mssql"
|
MSSQLDatabaseType DatabaseType = "mssql" // Microsoft SQL Server database
|
||||||
SqlLiteDatabaseType DatabaseType = "sqlite"
|
SqlLiteDatabaseType DatabaseType = "sqlite" // SQLite database
|
||||||
)
|
)
|
||||||
|
|
||||||
// Database represents the complete database schema
|
// Database represents the complete database schema
|
||||||
@@ -21,11 +27,13 @@ type Database struct {
|
|||||||
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLNamer returns the database name in lowercase
|
// SQLName returns the database name in lowercase for SQL compatibility.
|
||||||
func (d *Database) SQLName() string {
|
func (d *Database) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Schema represents a database schema, which is a logical grouping of database objects
|
||||||
|
// such as tables, views, sequences, and relationships within a database.
|
||||||
type Schema struct {
|
type Schema struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -40,13 +48,16 @@ type Schema struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
|
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
|
||||||
|
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the schema name in lowercase
|
// SQLName returns the schema name in lowercase for SQL compatibility.
|
||||||
func (d *Schema) SQLName() string {
|
func (d *Schema) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Table represents a database table with its columns, constraints, indexes,
|
||||||
|
// and relationships. Tables are the primary data storage structures in a database.
|
||||||
type Table struct {
|
type Table struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -62,11 +73,12 @@ type Table struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the table name in lowercase
|
// SQLName returns the table name in lowercase for SQL compatibility.
|
||||||
func (d *Table) SQLName() string {
|
func (d *Table) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetPrimaryKey returns the primary key column for the table, or nil if none exists.
|
||||||
func (m Table) GetPrimaryKey() *Column {
|
func (m Table) GetPrimaryKey() *Column {
|
||||||
for _, column := range m.Columns {
|
for _, column := range m.Columns {
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
@@ -76,6 +88,7 @@ func (m Table) GetPrimaryKey() *Column {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetForeignKeys returns all foreign key constraints for the table.
|
||||||
func (m Table) GetForeignKeys() []*Constraint {
|
func (m Table) GetForeignKeys() []*Constraint {
|
||||||
keys := make([]*Constraint, 0)
|
keys := make([]*Constraint, 0)
|
||||||
|
|
||||||
@@ -100,7 +113,7 @@ type View struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the view name in lowercase
|
// SQLName returns the view name in lowercase for SQL compatibility.
|
||||||
func (d *View) SQLName() string {
|
func (d *View) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
@@ -123,7 +136,7 @@ type Sequence struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the sequence name in lowercase
|
// SQLName returns the sequence name in lowercase for SQL compatibility.
|
||||||
func (d *Sequence) SQLName() string {
|
func (d *Sequence) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
@@ -147,11 +160,13 @@ type Column struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the table name in lowercase
|
// SQLName returns the column name in lowercase for SQL compatibility.
|
||||||
func (d *Column) SQLName() string {
|
func (d *Column) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Index represents a database index for optimizing query performance.
|
||||||
|
// Indexes can be unique, partial, or include additional columns.
|
||||||
type Index struct {
|
type Index struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -167,19 +182,23 @@ type Index struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the Indexin lowercase
|
// SQLName returns the index name in lowercase for SQL compatibility.
|
||||||
func (d *Index) SQLName() string {
|
func (d *Index) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RelationType represents the type of relationship between database tables.
|
||||||
type RelationType string
|
type RelationType string
|
||||||
|
|
||||||
|
// Supported relationship types.
|
||||||
const (
|
const (
|
||||||
OneToOne RelationType = "one_to_one"
|
OneToOne RelationType = "one_to_one" // One record in table A relates to one record in table B
|
||||||
OneToMany RelationType = "one_to_many"
|
OneToMany RelationType = "one_to_many" // One record in table A relates to many records in table B
|
||||||
ManyToMany RelationType = "many_to_many"
|
ManyToMany RelationType = "many_to_many" // Many records in table A relate to many records in table B
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Relationship represents a relationship between two database tables.
|
||||||
|
// Relationships can be one-to-one, one-to-many, or many-to-many.
|
||||||
type Relationship struct {
|
type Relationship struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Type RelationType `json:"type" yaml:"type" xml:"type"`
|
Type RelationType `json:"type" yaml:"type" xml:"type"`
|
||||||
@@ -197,11 +216,13 @@ type Relationship struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the Relationship lowercase
|
// SQLName returns the relationship name in lowercase for SQL compatibility.
|
||||||
func (d *Relationship) SQLName() string {
|
func (d *Relationship) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Constraint represents a database constraint that enforces data integrity rules.
|
||||||
|
// Constraints can be primary keys, foreign keys, unique constraints, check constraints, or not-null constraints.
|
||||||
type Constraint struct {
|
type Constraint struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Type ConstraintType `json:"type" yaml:"type" xml:"type"`
|
Type ConstraintType `json:"type" yaml:"type" xml:"type"`
|
||||||
@@ -219,20 +240,37 @@ type Constraint struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLName returns the constraint name in lowercase for SQL compatibility.
|
||||||
func (d *Constraint) SQLName() string {
|
func (d *Constraint) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ConstraintType represents the type of database constraint.
|
||||||
type ConstraintType string
|
type ConstraintType string
|
||||||
|
|
||||||
|
// Enum represents a database enumeration type with a set of allowed values.
|
||||||
|
type Enum struct {
|
||||||
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
|
Values []string `json:"values" yaml:"values" xml:"values"`
|
||||||
|
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLName returns the enum name in lowercase for SQL compatibility.
|
||||||
|
func (d *Enum) SQLName() string {
|
||||||
|
return strings.ToLower(d.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supported constraint types.
|
||||||
const (
|
const (
|
||||||
PrimaryKeyConstraint ConstraintType = "primary_key"
|
PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record
|
||||||
ForeignKeyConstraint ConstraintType = "foreign_key"
|
ForeignKeyConstraint ConstraintType = "foreign_key" // Foreign key references another table
|
||||||
UniqueConstraint ConstraintType = "unique"
|
UniqueConstraint ConstraintType = "unique" // Unique constraint ensures all values are different
|
||||||
CheckConstraint ConstraintType = "check"
|
CheckConstraint ConstraintType = "check" // Check constraint validates data against an expression
|
||||||
NotNullConstraint ConstraintType = "not_null"
|
NotNullConstraint ConstraintType = "not_null" // Not null constraint requires a value
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Script represents a database migration or initialization script.
|
||||||
|
// Scripts can have dependencies and rollback capabilities.
|
||||||
type Script struct {
|
type Script struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description" yaml:"description" xml:"description"`
|
Description string `json:"description" yaml:"description" xml:"description"`
|
||||||
@@ -245,11 +283,12 @@ type Script struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLName returns the script name in lowercase for SQL compatibility.
|
||||||
func (d *Script) SQLName() string {
|
func (d *Script) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize functions
|
// Initialization functions for creating new model instances with proper defaults.
|
||||||
|
|
||||||
// InitDatabase initializes a new Database with empty slices
|
// InitDatabase initializes a new Database with empty slices
|
||||||
func InitDatabase(name string) *Database {
|
func InitDatabase(name string) *Database {
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
// =============================================================================
|
// Summary/Compact Views
|
||||||
// Summary/Compact Views - Lightweight views with essential fields
|
//
|
||||||
// =============================================================================
|
// This file provides lightweight summary structures with essential fields
|
||||||
|
// and aggregated counts for quick database schema overviews without loading
|
||||||
|
// full object graphs.
|
||||||
|
|
||||||
// DatabaseSummary provides a compact overview of a database
|
// DatabaseSummary provides a compact overview of a database with aggregated statistics.
|
||||||
type DatabaseSummary struct {
|
type DatabaseSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -15,7 +17,7 @@ type DatabaseSummary struct {
|
|||||||
TotalColumns int `json:"total_columns" yaml:"total_columns" xml:"total_columns"`
|
TotalColumns int `json:"total_columns" yaml:"total_columns" xml:"total_columns"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Database to a DatabaseSummary
|
// ToSummary converts a Database to a DatabaseSummary with calculated counts.
|
||||||
func (d *Database) ToSummary() *DatabaseSummary {
|
func (d *Database) ToSummary() *DatabaseSummary {
|
||||||
summary := &DatabaseSummary{
|
summary := &DatabaseSummary{
|
||||||
Name: d.Name,
|
Name: d.Name,
|
||||||
@@ -36,7 +38,7 @@ func (d *Database) ToSummary() *DatabaseSummary {
|
|||||||
return summary
|
return summary
|
||||||
}
|
}
|
||||||
|
|
||||||
// SchemaSummary provides a compact overview of a schema
|
// SchemaSummary provides a compact overview of a schema with aggregated statistics.
|
||||||
type SchemaSummary struct {
|
type SchemaSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -47,7 +49,7 @@ type SchemaSummary struct {
|
|||||||
TotalConstraints int `json:"total_constraints" yaml:"total_constraints" xml:"total_constraints"`
|
TotalConstraints int `json:"total_constraints" yaml:"total_constraints" xml:"total_constraints"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Schema to a SchemaSummary
|
// ToSummary converts a Schema to a SchemaSummary with calculated counts.
|
||||||
func (s *Schema) ToSummary() *SchemaSummary {
|
func (s *Schema) ToSummary() *SchemaSummary {
|
||||||
summary := &SchemaSummary{
|
summary := &SchemaSummary{
|
||||||
Name: s.Name,
|
Name: s.Name,
|
||||||
@@ -66,7 +68,7 @@ func (s *Schema) ToSummary() *SchemaSummary {
|
|||||||
return summary
|
return summary
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableSummary provides a compact overview of a table
|
// TableSummary provides a compact overview of a table with aggregated statistics.
|
||||||
type TableSummary struct {
|
type TableSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
||||||
@@ -79,7 +81,7 @@ type TableSummary struct {
|
|||||||
ForeignKeyCount int `json:"foreign_key_count" yaml:"foreign_key_count" xml:"foreign_key_count"`
|
ForeignKeyCount int `json:"foreign_key_count" yaml:"foreign_key_count" xml:"foreign_key_count"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Table to a TableSummary
|
// ToSummary converts a Table to a TableSummary with calculated counts.
|
||||||
func (t *Table) ToSummary() *TableSummary {
|
func (t *Table) ToSummary() *TableSummary {
|
||||||
summary := &TableSummary{
|
summary := &TableSummary{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
|
|||||||
106
pkg/readers/bun/README.md
Normal file
106
pkg/readers/bun/README.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
# Bun Reader
|
||||||
|
|
||||||
|
Reads Go source files containing Bun model definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Bun Reader parses Go source code files that define Bun models (structs with `bun` struct tags) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Bun struct tags to extract column definitions
|
||||||
|
- Extracts table names from `bun:"table:tablename"` tags
|
||||||
|
- Identifies primary keys, foreign keys, and indexes
|
||||||
|
- Supports relationship detection
|
||||||
|
- Handles both single files and directories
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/models.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := bun.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read Bun models and convert to JSON
|
||||||
|
relspec --input bun --in-file models/ --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert Bun models to GORM
|
||||||
|
relspec --input bun --in-file models.go --output gorm --out-file gorm_models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Bun Tags
|
||||||
|
|
||||||
|
The reader recognizes the following Bun struct tags:
|
||||||
|
|
||||||
|
- `table` - Table name
|
||||||
|
- `column` - Column name
|
||||||
|
- `type` - SQL data type
|
||||||
|
- `pk` - Primary key
|
||||||
|
- `notnull` - NOT NULL constraint
|
||||||
|
- `autoincrement` - Auto-increment column
|
||||||
|
- `default` - Default value
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `rel` - Relationship definition
|
||||||
|
|
||||||
|
## Example Bun Model
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement"`
|
||||||
|
Username string `bun:"username,notnull,unique"`
|
||||||
|
Email string `bun:"email,notnull"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull,default:now()"`
|
||||||
|
|
||||||
|
Posts []*Post `bun:"rel:has-many,join:id=user_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post struct {
|
||||||
|
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk"`
|
||||||
|
UserID int64 `bun:"user_id,notnull"`
|
||||||
|
Title string `bun:"title,notnull"`
|
||||||
|
Content string `bun:"content"`
|
||||||
|
|
||||||
|
User *User `bun:"rel:belongs-to,join:user_id=id"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Test files (ending in `_test.go`) are automatically excluded
|
||||||
|
- The `bun.BaseModel` embedded struct is automatically recognized
|
||||||
|
- Schema defaults to `public` if not specified
|
||||||
@@ -382,6 +382,23 @@ func (r *Reader) isRelationship(tag string) bool {
|
|||||||
return strings.Contains(tag, "bun:\"rel:") || strings.Contains(tag, ",rel:")
|
return strings.Contains(tag, "bun:\"rel:") || strings.Contains(tag, ",rel:")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getRelationType extracts the relationship type from a bun tag
|
||||||
|
func (r *Reader) getRelationType(bunTag string) string {
|
||||||
|
if strings.Contains(bunTag, "rel:has-many") {
|
||||||
|
return "has-many"
|
||||||
|
}
|
||||||
|
if strings.Contains(bunTag, "rel:belongs-to") {
|
||||||
|
return "belongs-to"
|
||||||
|
}
|
||||||
|
if strings.Contains(bunTag, "rel:has-one") {
|
||||||
|
return "has-one"
|
||||||
|
}
|
||||||
|
if strings.Contains(bunTag, "rel:many-to-many") {
|
||||||
|
return "many-to-many"
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
// parseRelationshipConstraints parses relationship fields to extract foreign key constraints
|
// parseRelationshipConstraints parses relationship fields to extract foreign key constraints
|
||||||
func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *ast.StructType, structMap map[string]*models.Table) {
|
func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *ast.StructType, structMap map[string]*models.Table) {
|
||||||
for _, field := range structType.Fields.List {
|
for _, field := range structType.Fields.List {
|
||||||
@@ -409,27 +426,51 @@ func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *a
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Parse the join information: join:user_id=id
|
// Parse the join information: join:user_id=id
|
||||||
// This means: referencedTable.user_id = thisTable.id
|
// This means: thisTable.user_id = referencedTable.id
|
||||||
joinInfo := r.parseJoinInfo(bunTag)
|
joinInfo := r.parseJoinInfo(bunTag)
|
||||||
if joinInfo == nil {
|
if joinInfo == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// The FK is on the referenced table
|
// Determine which table gets the FK based on relationship type
|
||||||
|
relType := r.getRelationType(bunTag)
|
||||||
|
|
||||||
|
var fkTable *models.Table
|
||||||
|
var fkColumn, refTable, refColumn string
|
||||||
|
|
||||||
|
switch strings.ToLower(relType) {
|
||||||
|
case "belongs-to":
|
||||||
|
// For belongs-to: FK is on the current table
|
||||||
|
// join:user_id=id means table.user_id references referencedTable.id
|
||||||
|
fkTable = table
|
||||||
|
fkColumn = joinInfo.ForeignKey
|
||||||
|
refTable = referencedTable.Name
|
||||||
|
refColumn = joinInfo.ReferencedKey
|
||||||
|
case "has-many":
|
||||||
|
// For has-many: FK is on the referenced table
|
||||||
|
// join:id=user_id means referencedTable.user_id references table.id
|
||||||
|
fkTable = referencedTable
|
||||||
|
fkColumn = joinInfo.ReferencedKey
|
||||||
|
refTable = table.Name
|
||||||
|
refColumn = joinInfo.ForeignKey
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
constraint := &models.Constraint{
|
constraint := &models.Constraint{
|
||||||
Name: fmt.Sprintf("fk_%s_%s", referencedTable.Name, table.Name),
|
Name: fmt.Sprintf("fk_%s_%s", fkTable.Name, refTable),
|
||||||
Type: models.ForeignKeyConstraint,
|
Type: models.ForeignKeyConstraint,
|
||||||
Table: referencedTable.Name,
|
Table: fkTable.Name,
|
||||||
Schema: referencedTable.Schema,
|
Schema: fkTable.Schema,
|
||||||
Columns: []string{joinInfo.ForeignKey},
|
Columns: []string{fkColumn},
|
||||||
ReferencedTable: table.Name,
|
ReferencedTable: refTable,
|
||||||
ReferencedSchema: table.Schema,
|
ReferencedSchema: fkTable.Schema,
|
||||||
ReferencedColumns: []string{joinInfo.ReferencedKey},
|
ReferencedColumns: []string{refColumn},
|
||||||
OnDelete: "NO ACTION", // Bun doesn't specify this in tags
|
OnDelete: "NO ACTION", // Bun doesn't specify this in tags
|
||||||
OnUpdate: "NO ACTION",
|
OnUpdate: "NO ACTION",
|
||||||
}
|
}
|
||||||
|
|
||||||
referencedTable.Constraints[constraint.Name] = constraint
|
fkTable.Constraints[constraint.Name] = constraint
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -626,17 +667,14 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
|||||||
// - nullzero tag means the field is nullable (can be NULL in DB)
|
// - nullzero tag means the field is nullable (can be NULL in DB)
|
||||||
// - absence of nullzero means the field is NOT NULL
|
// - absence of nullzero means the field is NOT NULL
|
||||||
// - primitive types (int64, bool, string) are NOT NULL by default
|
// - primitive types (int64, bool, string) are NOT NULL by default
|
||||||
|
column.NotNull = true
|
||||||
|
// Primary keys are always NOT NULL
|
||||||
|
|
||||||
if strings.Contains(bunTag, "nullzero") {
|
if strings.Contains(bunTag, "nullzero") {
|
||||||
column.NotNull = false
|
column.NotNull = false
|
||||||
} else if r.isNullableGoType(fieldType) {
|
|
||||||
// SqlString, SqlInt, etc. without nullzero tag means NOT NULL
|
|
||||||
column.NotNull = true
|
|
||||||
} else {
|
} else {
|
||||||
// Primitive types are NOT NULL by default
|
column.NotNull = !r.isNullableGoType(fieldType)
|
||||||
column.NotNull = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Primary keys are always NOT NULL
|
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
}
|
}
|
||||||
|
|||||||
522
pkg/readers/bun/reader_test.go
Normal file
522
pkg/readers/bun/reader_test.go
Normal file
@@ -0,0 +1,522 @@
|
|||||||
|
package bun
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[0]
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify id column - primary key should be NOT NULL
|
||||||
|
idCol, exists := table.Columns["id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'id' not found")
|
||||||
|
}
|
||||||
|
if !idCol.IsPrimaryKey {
|
||||||
|
t.Error("Column 'id' should be primary key")
|
||||||
|
}
|
||||||
|
if !idCol.AutoIncrement {
|
||||||
|
t.Error("Column 'id' should be auto-increment")
|
||||||
|
}
|
||||||
|
if !idCol.NotNull {
|
||||||
|
t.Error("Column 'id' should be NOT NULL (primary keys are always NOT NULL)")
|
||||||
|
}
|
||||||
|
if idCol.Type != "bigint" {
|
||||||
|
t.Errorf("Expected id type 'bigint', got '%s'", idCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify email column - explicit notnull tag should be NOT NULL
|
||||||
|
emailCol, exists := table.Columns["email"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'email' not found")
|
||||||
|
}
|
||||||
|
if !emailCol.NotNull {
|
||||||
|
t.Error("Column 'email' should be NOT NULL (explicit 'notnull' tag)")
|
||||||
|
}
|
||||||
|
if emailCol.Type != "varchar" || emailCol.Length != 255 {
|
||||||
|
t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify name column - primitive string type should be NOT NULL by default in Bun
|
||||||
|
nameCol, exists := table.Columns["name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'name' not found")
|
||||||
|
}
|
||||||
|
if !nameCol.NotNull {
|
||||||
|
t.Error("Column 'name' should be NOT NULL (primitive string type, no nullzero tag)")
|
||||||
|
}
|
||||||
|
if nameCol.Type != "text" {
|
||||||
|
t.Errorf("Expected name type 'text', got '%s'", nameCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify age column - pointer type should be nullable (NOT NULL = false)
|
||||||
|
ageCol, exists := table.Columns["age"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'age' not found")
|
||||||
|
}
|
||||||
|
if ageCol.NotNull {
|
||||||
|
t.Error("Column 'age' should be nullable (pointer type *int)")
|
||||||
|
}
|
||||||
|
if ageCol.Type != "integer" {
|
||||||
|
t.Errorf("Expected age type 'integer', got '%s'", ageCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify is_active column - primitive bool type should be NOT NULL by default
|
||||||
|
isActiveCol, exists := table.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type, no nullzero tag)")
|
||||||
|
}
|
||||||
|
if isActiveCol.Type != "boolean" {
|
||||||
|
t.Errorf("Expected is_active type 'boolean', got '%s'", isActiveCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify created_at column - time.Time should be NOT NULL by default
|
||||||
|
createdAtCol, exists := table.Columns["created_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'created_at' not found")
|
||||||
|
}
|
||||||
|
if !createdAtCol.NotNull {
|
||||||
|
t.Error("Column 'created_at' should be NOT NULL (time.Time is NOT NULL by default)")
|
||||||
|
}
|
||||||
|
if createdAtCol.Type != "timestamp" {
|
||||||
|
t.Errorf("Expected created_at type 'timestamp', got '%s'", createdAtCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify unique index on email
|
||||||
|
if len(table.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on users table")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "complex.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify schema
|
||||||
|
if len(db.Schemas) != 1 {
|
||||||
|
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify tables
|
||||||
|
if len(schema.Tables) != 3 {
|
||||||
|
t.Fatalf("Expected 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find tables
|
||||||
|
var usersTable, postsTable, commentsTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
switch table.Name {
|
||||||
|
case "users":
|
||||||
|
usersTable = table
|
||||||
|
case "posts":
|
||||||
|
postsTable = table
|
||||||
|
case "comments":
|
||||||
|
commentsTable = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if usersTable == nil {
|
||||||
|
t.Fatal("Users table not found")
|
||||||
|
}
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
if commentsTable == nil {
|
||||||
|
t.Fatal("Comments table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table - test NOT NULL logic for various field types
|
||||||
|
if len(usersTable.Columns) != 10 {
|
||||||
|
t.Errorf("Expected 10 columns in users table, got %d", len(usersTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// username - NOT NULL (explicit notnull tag)
|
||||||
|
usernameCol, exists := usersTable.Columns["username"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'username' not found")
|
||||||
|
}
|
||||||
|
if !usernameCol.NotNull {
|
||||||
|
t.Error("Column 'username' should be NOT NULL (explicit 'notnull' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// first_name - nullable (pointer type)
|
||||||
|
firstNameCol, exists := usersTable.Columns["first_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'first_name' not found")
|
||||||
|
}
|
||||||
|
if firstNameCol.NotNull {
|
||||||
|
t.Error("Column 'first_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// last_name - nullable (pointer type)
|
||||||
|
lastNameCol, exists := usersTable.Columns["last_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'last_name' not found")
|
||||||
|
}
|
||||||
|
if lastNameCol.NotNull {
|
||||||
|
t.Error("Column 'last_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// bio - nullable (pointer type)
|
||||||
|
bioCol, exists := usersTable.Columns["bio"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'bio' not found")
|
||||||
|
}
|
||||||
|
if bioCol.NotNull {
|
||||||
|
t.Error("Column 'bio' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// is_active - NOT NULL (primitive bool without nullzero)
|
||||||
|
isActiveCol, exists := usersTable.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type without nullzero)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table indexes
|
||||||
|
if len(usersTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on users table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table
|
||||||
|
if len(postsTable.Columns) != 11 {
|
||||||
|
t.Errorf("Expected 11 columns in posts table, got %d", len(postsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// excerpt - nullable (pointer type)
|
||||||
|
excerptCol, exists := postsTable.Columns["excerpt"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'excerpt' not found")
|
||||||
|
}
|
||||||
|
if excerptCol.NotNull {
|
||||||
|
t.Error("Column 'excerpt' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// published - NOT NULL (primitive bool without nullzero)
|
||||||
|
publishedCol, exists := postsTable.Columns["published"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published' not found")
|
||||||
|
}
|
||||||
|
if !publishedCol.NotNull {
|
||||||
|
t.Error("Column 'published' should be NOT NULL (primitive bool type without nullzero)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// published_at - nullable (has nullzero tag)
|
||||||
|
publishedAtCol, exists := postsTable.Columns["published_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published_at' not found")
|
||||||
|
}
|
||||||
|
if publishedAtCol.NotNull {
|
||||||
|
t.Error("Column 'published_at' should be nullable (has nullzero tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// view_count - NOT NULL (primitive int64 without nullzero)
|
||||||
|
viewCountCol, exists := postsTable.Columns["view_count"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'view_count' not found")
|
||||||
|
}
|
||||||
|
if !viewCountCol.NotNull {
|
||||||
|
t.Error("Column 'view_count' should be NOT NULL (primitive int64 type without nullzero)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table indexes
|
||||||
|
if len(postsTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on posts table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify comments table
|
||||||
|
if len(commentsTable.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns in comments table, got %d", len(commentsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// user_id - nullable (pointer type)
|
||||||
|
userIDCol, exists := commentsTable.Columns["user_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'user_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if userIDCol.NotNull {
|
||||||
|
t.Error("Column 'user_id' should be nullable (pointer type *int64)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// post_id - NOT NULL (explicit notnull tag)
|
||||||
|
postIDCol, exists := commentsTable.Columns["post_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'post_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if !postIDCol.NotNull {
|
||||||
|
t.Error("Column 'post_id' should be NOT NULL (explicit 'notnull' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify foreign key constraints are created from relationship tags
|
||||||
|
// In Bun, relationships are defined with rel: tags
|
||||||
|
// The constraints should be created on the referenced tables
|
||||||
|
if len(postsTable.Constraints) > 0 {
|
||||||
|
// Check if FK constraint exists
|
||||||
|
var fkPostsUser *models.Constraint
|
||||||
|
for _, c := range postsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint && c.ReferencedTable == "users" {
|
||||||
|
fkPostsUser = c
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkPostsUser != nil {
|
||||||
|
if len(fkPostsUser.Columns) != 1 || fkPostsUser.Columns[0] != "user_id" {
|
||||||
|
t.Error("Expected FK column 'user_id'")
|
||||||
|
}
|
||||||
|
if len(fkPostsUser.ReferencedColumns) != 1 || fkPostsUser.ReferencedColumns[0] != "id" {
|
||||||
|
t.Error("Expected FK referenced column 'id'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(commentsTable.Constraints) > 0 {
|
||||||
|
// Check if FK constraints exist
|
||||||
|
var fkCommentsPost, fkCommentsUser *models.Constraint
|
||||||
|
for _, c := range commentsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint {
|
||||||
|
if c.ReferencedTable == "posts" {
|
||||||
|
fkCommentsPost = c
|
||||||
|
} else if c.ReferencedTable == "users" {
|
||||||
|
fkCommentsUser = c
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsPost != nil {
|
||||||
|
if len(fkCommentsPost.Columns) != 1 || fkCommentsPost.Columns[0] != "post_id" {
|
||||||
|
t.Error("Expected FK column 'post_id'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsUser != nil {
|
||||||
|
if len(fkCommentsUser.Columns) != 1 || fkCommentsUser.Columns[0] != "user_id" {
|
||||||
|
t.Error("Expected FK column 'user_id'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema == nil {
|
||||||
|
t.Fatal("ReadSchema() returned nil schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadTable() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if table == nil {
|
||||||
|
t.Fatal("ReadTable() returned nil table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Directory(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should read both simple.go and complex.go
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
// Should have at least 3 tables from complex.go (users, posts, comments)
|
||||||
|
// plus 1 from simple.go (users) - but same table name, so may be overwritten
|
||||||
|
if len(schema.Tables) < 3 {
|
||||||
|
t.Errorf("Expected at least 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_InvalidPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/file.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for invalid file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_EmptyPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for empty file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_NullableTypes(t *testing.T) {
|
||||||
|
// This test specifically verifies the NOT NULL logic changes
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "complex.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find posts table
|
||||||
|
var postsTable *models.Table
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "posts" {
|
||||||
|
postsTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test all nullability scenarios
|
||||||
|
tests := []struct {
|
||||||
|
column string
|
||||||
|
notNull bool
|
||||||
|
reason string
|
||||||
|
}{
|
||||||
|
{"id", true, "primary key"},
|
||||||
|
{"user_id", true, "explicit notnull tag"},
|
||||||
|
{"title", true, "explicit notnull tag"},
|
||||||
|
{"slug", true, "explicit notnull tag"},
|
||||||
|
{"content", true, "explicit notnull tag"},
|
||||||
|
{"excerpt", false, "pointer type *string"},
|
||||||
|
{"published", true, "primitive bool without nullzero"},
|
||||||
|
{"view_count", true, "primitive int64 without nullzero"},
|
||||||
|
{"published_at", false, "has nullzero tag"},
|
||||||
|
{"created_at", true, "time.Time without nullzero"},
|
||||||
|
{"updated_at", true, "time.Time without nullzero"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
col, exists := postsTable.Columns[tt.column]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Column '%s' not found", tt.column)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.NotNull != tt.notNull {
|
||||||
|
if tt.notNull {
|
||||||
|
t.Errorf("Column '%s' should be NOT NULL (%s), but NotNull=%v",
|
||||||
|
tt.column, tt.reason, col.NotNull)
|
||||||
|
} else {
|
||||||
|
t.Errorf("Column '%s' should be nullable (%s), but NotNull=%v",
|
||||||
|
tt.column, tt.reason, col.NotNull)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
101
pkg/readers/dbml/README.md
Normal file
101
pkg/readers/dbml/README.md
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# DBML Reader
|
||||||
|
|
||||||
|
Reads Database Markup Language (DBML) files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DBML Reader parses `.dbml` files that define database schemas using the DBML syntax (used by dbdiagram.io) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses DBML syntax
|
||||||
|
- Extracts tables, columns, and relationships
|
||||||
|
- Supports DBML-specific features:
|
||||||
|
- Table groups and notes
|
||||||
|
- Enum definitions
|
||||||
|
- Indexes
|
||||||
|
- Foreign key relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.dbml",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := dbml.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read DBML file and convert to JSON
|
||||||
|
relspec --input dbml --in-file schema.dbml --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DBML to GORM models
|
||||||
|
relspec --input dbml --in-file database.dbml --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example DBML File
|
||||||
|
|
||||||
|
```dbml
|
||||||
|
Table users {
|
||||||
|
id bigserial [pk, increment]
|
||||||
|
username varchar(50) [not null, unique]
|
||||||
|
email varchar(100) [not null]
|
||||||
|
created_at timestamp [not null, default: `now()`]
|
||||||
|
|
||||||
|
Note: 'Users table'
|
||||||
|
}
|
||||||
|
|
||||||
|
Table posts {
|
||||||
|
id bigserial [pk]
|
||||||
|
user_id bigint [not null, ref: > users.id]
|
||||||
|
title varchar(200) [not null]
|
||||||
|
content text
|
||||||
|
|
||||||
|
indexes {
|
||||||
|
user_id
|
||||||
|
(user_id, created_at) [name: 'idx_user_posts']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ref: posts.user_id > users.id [delete: cascade]
|
||||||
|
```
|
||||||
|
|
||||||
|
## DBML Features Supported
|
||||||
|
|
||||||
|
- Table definitions with columns
|
||||||
|
- Primary keys (`pk`)
|
||||||
|
- Not null constraints (`not null`)
|
||||||
|
- Unique constraints (`unique`)
|
||||||
|
- Default values (`default`)
|
||||||
|
- Inline references (`ref`)
|
||||||
|
- Standalone `Ref` blocks
|
||||||
|
- Indexes and composite indexes
|
||||||
|
- Table notes and column notes
|
||||||
|
- Enums
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DBML is designed for database documentation and diagramming
|
||||||
|
- Schema name defaults to `public`
|
||||||
|
- Relationship cardinality is preserved
|
||||||
96
pkg/readers/dctx/README.md
Normal file
96
pkg/readers/dctx/README.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# DCTX Reader
|
||||||
|
|
||||||
|
Reads Clarion database dictionary (DCTX) files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DCTX Reader parses Clarion dictionary files (`.dctx`) that define database structures in the Clarion development system and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Clarion DCTX XML format
|
||||||
|
- Extracts file (table) and field (column) definitions
|
||||||
|
- Supports Clarion data types
|
||||||
|
- Handles keys (indexes) and relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/database.dctx",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := dctx.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read DCTX file and convert to JSON
|
||||||
|
relspec --input dctx --in-file legacy.dctx --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DCTX to GORM models for migration
|
||||||
|
relspec --input dctx --in-file app.dctx --output gorm --out-file models.go
|
||||||
|
|
||||||
|
# Export DCTX to PostgreSQL DDL
|
||||||
|
relspec --input dctx --in-file database.dctx --output pgsql --out-file schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example DCTX Structure
|
||||||
|
|
||||||
|
DCTX files are XML-based Clarion dictionary files that define:
|
||||||
|
|
||||||
|
- Files (equivalent to tables)
|
||||||
|
- Fields (columns) with Clarion-specific types
|
||||||
|
- Keys (indexes)
|
||||||
|
- Relationships between files
|
||||||
|
|
||||||
|
Common Clarion data types:
|
||||||
|
- `STRING` - Fixed-length string
|
||||||
|
- `CSTRING` - C-style null-terminated string
|
||||||
|
- `LONG` - 32-bit integer
|
||||||
|
- `SHORT` - 16-bit integer
|
||||||
|
- `DECIMAL` - Decimal number
|
||||||
|
- `REAL` - Floating point
|
||||||
|
- `DATE` - Date field
|
||||||
|
- `TIME` - Time field
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
The reader automatically maps Clarion data types to standard SQL types:
|
||||||
|
|
||||||
|
| Clarion Type | SQL Type |
|
||||||
|
|--------------|----------|
|
||||||
|
| STRING | VARCHAR |
|
||||||
|
| CSTRING | VARCHAR |
|
||||||
|
| LONG | INTEGER |
|
||||||
|
| SHORT | SMALLINT |
|
||||||
|
| DECIMAL | NUMERIC |
|
||||||
|
| REAL | REAL |
|
||||||
|
| DATE | DATE |
|
||||||
|
| TIME | TIME |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DCTX is specific to Clarion development platform
|
||||||
|
- Useful for migrating legacy Clarion applications
|
||||||
|
- Schema name defaults to `public`
|
||||||
|
- Preserves field properties and constraints where possible
|
||||||
96
pkg/readers/drawdb/README.md
Normal file
96
pkg/readers/drawdb/README.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# DrawDB Reader
|
||||||
|
|
||||||
|
Reads DrawDB schema files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DrawDB Reader parses JSON files exported from DrawDB (a free online database design tool) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses DrawDB JSON format
|
||||||
|
- Extracts tables, fields, and relationships
|
||||||
|
- Supports DrawDB-specific metadata
|
||||||
|
- Preserves visual layout information
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/diagram.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := drawdb.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read DrawDB export and convert to JSON schema
|
||||||
|
relspec --input drawdb --in-file diagram.json --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DrawDB design to GORM models
|
||||||
|
relspec --input drawdb --in-file design.json --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example DrawDB Export
|
||||||
|
|
||||||
|
DrawDB exports database designs as JSON files containing:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "users",
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"primary": true,
|
||||||
|
"autoIncrement": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "username",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 50,
|
||||||
|
"notNull": true,
|
||||||
|
"unique": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"relationships": [
|
||||||
|
{
|
||||||
|
"source": "posts",
|
||||||
|
"target": "users",
|
||||||
|
"type": "many-to-one"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DrawDB is a free online database designer at drawdb.vercel.app
|
||||||
|
- Export format preserves visual design metadata
|
||||||
|
- Useful for converting visual designs to code
|
||||||
|
- Schema defaults to `public`
|
||||||
90
pkg/readers/drizzle/README.md
Normal file
90
pkg/readers/drizzle/README.md
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# Drizzle Reader
|
||||||
|
|
||||||
|
Reads TypeScript/JavaScript files containing Drizzle ORM schema definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Drizzle Reader parses Drizzle ORM schema files (TypeScript/JavaScript) that define database tables using Drizzle's schema builder and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Drizzle schema definitions
|
||||||
|
- Extracts table, column, and relationship information
|
||||||
|
- Supports various Drizzle column types
|
||||||
|
- Handles constraints and indexes
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.ts",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := drizzle.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read Drizzle schema and convert to JSON
|
||||||
|
relspec --input drizzle --in-file schema.ts --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert Drizzle to GORM models
|
||||||
|
relspec --input drizzle --in-file schema/ --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Drizzle Schema
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { pgTable, serial, varchar, text, timestamp, integer } from 'drizzle-orm/pg-core';
|
||||||
|
import { relations } from 'drizzle-orm';
|
||||||
|
|
||||||
|
export const users = pgTable('users', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
username: varchar('username', { length: 50 }).notNull().unique(),
|
||||||
|
email: varchar('email', { length: 100 }).notNull(),
|
||||||
|
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const posts = pgTable('posts', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }),
|
||||||
|
title: varchar('title', { length: 200 }).notNull(),
|
||||||
|
content: text('content'),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const usersRelations = relations(users, ({ many }) => ({
|
||||||
|
posts: many(posts),
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const postsRelations = relations(posts, ({ one }) => ({
|
||||||
|
user: one(users, {
|
||||||
|
fields: [posts.userId],
|
||||||
|
references: [users.id],
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Supports both PostgreSQL and MySQL Drizzle schemas
|
||||||
|
- Extracts relationship information from `relations` definitions
|
||||||
|
- Schema defaults to `public` for PostgreSQL
|
||||||
619
pkg/readers/drizzle/reader.go
Normal file
619
pkg/readers/drizzle/reader.go
Normal file
@@ -0,0 +1,619 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for Drizzle schema format
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new Drizzle reader with the given options
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads and parses Drizzle schema input, returning a Database model
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Drizzle reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a file or directory
|
||||||
|
info, err := os.Stat(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to stat path: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
// Read all .ts files in the directory
|
||||||
|
return r.readDirectory(r.options.FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read single file
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parseDrizzle(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads and parses Drizzle schema input, returning a Schema model
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in Drizzle schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first schema
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads and parses Drizzle schema input, returning a Table model
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in Drizzle schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first table
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// readDirectory reads all .ts files in a directory and parses them
|
||||||
|
func (r *Reader) readDirectory(dirPath string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
db.Name = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default schema for Drizzle
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Enums = make([]*models.Enum, 0)
|
||||||
|
|
||||||
|
// Read all .ts files
|
||||||
|
files, err := filepath.Glob(filepath.Join(dirPath, "*.ts"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to glob directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse each file
|
||||||
|
for _, file := range files {
|
||||||
|
content, err := os.ReadFile(file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file %s: %w", file, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse and merge into schema
|
||||||
|
fileDB, err := r.parseDrizzle(string(content))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse file %s: %w", file, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge schemas
|
||||||
|
if len(fileDB.Schemas) > 0 {
|
||||||
|
fileSchema := fileDB.Schemas[0]
|
||||||
|
schema.Tables = append(schema.Tables, fileSchema.Tables...)
|
||||||
|
schema.Enums = append(schema.Enums, fileSchema.Enums...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDrizzle parses Drizzle schema content and returns a Database model
|
||||||
|
func (r *Reader) parseDrizzle(content string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
db.Name = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default schema for Drizzle (PostgreSQL)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Enums = make([]*models.Enum, 0)
|
||||||
|
db.DatabaseType = models.PostgresqlDatabaseType
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
// Regex patterns
|
||||||
|
// Match: export const users = pgTable('users', {
|
||||||
|
pgTableRegex := regexp.MustCompile(`export\s+const\s+(\w+)\s*=\s*pgTable\s*\(\s*['"](\w+)['"]`)
|
||||||
|
// Match: export const userRole = pgEnum('UserRole', ['admin', 'user']);
|
||||||
|
pgEnumRegex := regexp.MustCompile(`export\s+const\s+(\w+)\s*=\s*pgEnum\s*\(\s*['"](\w+)['"]`)
|
||||||
|
|
||||||
|
// State tracking
|
||||||
|
var currentTable *models.Table
|
||||||
|
var currentTableVarName string
|
||||||
|
var inTableBlock bool
|
||||||
|
var blockDepth int
|
||||||
|
var tableLines []string
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for pgEnum definition
|
||||||
|
if matches := pgEnumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
enum := r.parsePgEnum(trimmed, matches)
|
||||||
|
if enum != nil {
|
||||||
|
schema.Enums = append(schema.Enums, enum)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for pgTable definition
|
||||||
|
if matches := pgTableRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
varName := matches[1]
|
||||||
|
tableName := matches[2]
|
||||||
|
|
||||||
|
currentTableVarName = varName
|
||||||
|
currentTable = models.InitTable(tableName, "public")
|
||||||
|
inTableBlock = true
|
||||||
|
// Count braces in the first line
|
||||||
|
blockDepth = strings.Count(line, "{") - strings.Count(line, "}")
|
||||||
|
tableLines = []string{line}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're in a table block, accumulate lines
|
||||||
|
if inTableBlock {
|
||||||
|
tableLines = append(tableLines, line)
|
||||||
|
|
||||||
|
// Track brace depth
|
||||||
|
blockDepth += strings.Count(line, "{")
|
||||||
|
blockDepth -= strings.Count(line, "}")
|
||||||
|
|
||||||
|
// Check if we've closed the table definition
|
||||||
|
if blockDepth < 0 || (blockDepth == 0 && strings.Contains(line, ");")) {
|
||||||
|
// Parse the complete table block
|
||||||
|
if currentTable != nil {
|
||||||
|
r.parseTableBlock(tableLines, currentTable, currentTableVarName)
|
||||||
|
schema.Tables = append(schema.Tables, currentTable)
|
||||||
|
currentTable = nil
|
||||||
|
}
|
||||||
|
inTableBlock = false
|
||||||
|
tableLines = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePgEnum parses a pgEnum definition
|
||||||
|
func (r *Reader) parsePgEnum(line string, matches []string) *models.Enum {
|
||||||
|
// matches[1] = variable name
|
||||||
|
// matches[2] = enum name
|
||||||
|
|
||||||
|
enumName := matches[2]
|
||||||
|
|
||||||
|
// Extract values from the array
|
||||||
|
// Example: pgEnum('UserRole', ['admin', 'user', 'guest'])
|
||||||
|
valuesRegex := regexp.MustCompile(`\[(.*?)\]`)
|
||||||
|
valuesMatch := valuesRegex.FindStringSubmatch(line)
|
||||||
|
if valuesMatch == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
valuesStr := valuesMatch[1]
|
||||||
|
// Split by comma and clean up
|
||||||
|
valueParts := strings.Split(valuesStr, ",")
|
||||||
|
values := make([]string, 0)
|
||||||
|
for _, part := range valueParts {
|
||||||
|
// Remove quotes and whitespace
|
||||||
|
cleaned := strings.TrimSpace(part)
|
||||||
|
cleaned = strings.Trim(cleaned, "'\"")
|
||||||
|
if cleaned != "" {
|
||||||
|
values = append(values, cleaned)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &models.Enum{
|
||||||
|
Name: enumName,
|
||||||
|
Values: values,
|
||||||
|
Schema: "public",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTableBlock parses a complete pgTable definition block
|
||||||
|
func (r *Reader) parseTableBlock(lines []string, table *models.Table, tableVarName string) {
|
||||||
|
// Join all lines into a single string for easier parsing
|
||||||
|
fullText := strings.Join(lines, "\n")
|
||||||
|
|
||||||
|
// Extract the columns block and index callback separately
|
||||||
|
// The structure is: pgTable('name', { columns }, (table) => [indexes])
|
||||||
|
|
||||||
|
// Find the main object block (columns)
|
||||||
|
columnsStart := strings.Index(fullText, "{")
|
||||||
|
if columnsStart == -1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find matching closing brace for columns
|
||||||
|
depth := 0
|
||||||
|
columnsEnd := -1
|
||||||
|
for i := columnsStart; i < len(fullText); i++ {
|
||||||
|
if fullText[i] == '{' {
|
||||||
|
depth++
|
||||||
|
} else if fullText[i] == '}' {
|
||||||
|
depth--
|
||||||
|
if depth == 0 {
|
||||||
|
columnsEnd = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if columnsEnd == -1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
columnsBlock := fullText[columnsStart+1 : columnsEnd]
|
||||||
|
|
||||||
|
// Parse columns
|
||||||
|
r.parseColumnsBlock(columnsBlock, table, tableVarName)
|
||||||
|
|
||||||
|
// Check for index callback: , (table) => [ or , ({ col1, col2 }) => [
|
||||||
|
// Match: }, followed by arrow function with any parameters
|
||||||
|
// Use (?s) flag to make . match newlines
|
||||||
|
indexCallbackRegex := regexp.MustCompile(`(?s)}\s*,\s*\(.*?\)\s*=>\s*\[`)
|
||||||
|
if indexCallbackRegex.MatchString(fullText[columnsEnd:]) {
|
||||||
|
// Find the index array
|
||||||
|
indexStart := strings.Index(fullText[columnsEnd:], "[")
|
||||||
|
if indexStart != -1 {
|
||||||
|
indexStart += columnsEnd
|
||||||
|
indexDepth := 0
|
||||||
|
indexEnd := -1
|
||||||
|
for i := indexStart; i < len(fullText); i++ {
|
||||||
|
if fullText[i] == '[' {
|
||||||
|
indexDepth++
|
||||||
|
} else if fullText[i] == ']' {
|
||||||
|
indexDepth--
|
||||||
|
if indexDepth == 0 {
|
||||||
|
indexEnd = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if indexEnd != -1 {
|
||||||
|
indexBlock := fullText[indexStart+1 : indexEnd]
|
||||||
|
r.parseIndexBlock(indexBlock, table, tableVarName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnsBlock parses the columns block of a table
|
||||||
|
func (r *Reader) parseColumnsBlock(block string, table *models.Table, tableVarName string) {
|
||||||
|
// Split by lines and parse each column definition
|
||||||
|
lines := strings.Split(block, "\n")
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match: fieldName: columnType('columnName').modifier().modifier(),
|
||||||
|
// Example: id: integer('id').primaryKey(),
|
||||||
|
columnRegex := regexp.MustCompile(`(\w+):\s*(\w+)\s*\(`)
|
||||||
|
matches := columnRegex.FindStringSubmatch(trimmed)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldName := matches[1]
|
||||||
|
columnType := matches[2]
|
||||||
|
|
||||||
|
// Parse the column definition
|
||||||
|
col := r.parseColumnDefinition(trimmed, fieldName, columnType, table)
|
||||||
|
if col != nil {
|
||||||
|
table.Columns[col.Name] = col
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnDefinition parses a single column definition line
|
||||||
|
func (r *Reader) parseColumnDefinition(line, fieldName, drizzleType string, table *models.Table) *models.Column {
|
||||||
|
// Check for enum column syntax: pgEnum('EnumName')('column_name')
|
||||||
|
enumRegex := regexp.MustCompile(`pgEnum\s*\(['"](\w+)['"]\)\s*\(['"](\w+)['"]\)`)
|
||||||
|
if enumMatch := enumRegex.FindStringSubmatch(line); enumMatch != nil {
|
||||||
|
enumName := enumMatch[1]
|
||||||
|
columnName := enumMatch[2]
|
||||||
|
|
||||||
|
column := models.InitColumn(columnName, table.Name, table.Schema)
|
||||||
|
column.Type = enumName
|
||||||
|
column.NotNull = false
|
||||||
|
|
||||||
|
// Parse modifiers
|
||||||
|
r.parseColumnModifiers(line, column, table)
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract column name from the first argument
|
||||||
|
// Example: integer('id')
|
||||||
|
nameRegex := regexp.MustCompile(`\w+\s*\(['"](\w+)['"]\)`)
|
||||||
|
nameMatch := nameRegex.FindStringSubmatch(line)
|
||||||
|
if nameMatch == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
columnName := nameMatch[1]
|
||||||
|
column := models.InitColumn(columnName, table.Name, table.Schema)
|
||||||
|
|
||||||
|
// Map Drizzle type to SQL type
|
||||||
|
column.Type = r.drizzleTypeToSQL(drizzleType)
|
||||||
|
|
||||||
|
// Default: columns are nullable unless specified
|
||||||
|
column.NotNull = false
|
||||||
|
|
||||||
|
// Parse modifiers
|
||||||
|
r.parseColumnModifiers(line, column, table)
|
||||||
|
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// drizzleTypeToSQL converts Drizzle column types to SQL types
|
||||||
|
func (r *Reader) drizzleTypeToSQL(drizzleType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "integer",
|
||||||
|
"bigint": "bigint",
|
||||||
|
"smallint": "smallint",
|
||||||
|
|
||||||
|
// Serial types
|
||||||
|
"serial": "serial",
|
||||||
|
"bigserial": "bigserial",
|
||||||
|
"smallserial": "smallserial",
|
||||||
|
|
||||||
|
// Numeric types
|
||||||
|
"numeric": "numeric",
|
||||||
|
"real": "real",
|
||||||
|
"doublePrecision": "double precision",
|
||||||
|
|
||||||
|
// Character types
|
||||||
|
"text": "text",
|
||||||
|
"varchar": "varchar",
|
||||||
|
"char": "char",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "boolean",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "bytea",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "json",
|
||||||
|
"jsonb": "jsonb",
|
||||||
|
|
||||||
|
// Date/Time
|
||||||
|
"time": "time",
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"date": "date",
|
||||||
|
"interval": "interval",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "uuid",
|
||||||
|
|
||||||
|
// Geometric
|
||||||
|
"point": "point",
|
||||||
|
"line": "line",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sqlType, ok := typeMap[drizzleType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not found, might be an enum - return as-is
|
||||||
|
return drizzleType
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnModifiers parses column modifiers like .primaryKey(), .notNull(), etc.
|
||||||
|
func (r *Reader) parseColumnModifiers(line string, column *models.Column, table *models.Table) {
|
||||||
|
// Check for .primaryKey()
|
||||||
|
if strings.Contains(line, ".primaryKey()") {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .notNull()
|
||||||
|
if strings.Contains(line, ".notNull()") {
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .unique()
|
||||||
|
if strings.Contains(line, ".unique()") {
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s", column.Name),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = []string{column.Name}
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .default(...)
|
||||||
|
// Need to handle nested backticks and parentheses in SQL expressions
|
||||||
|
defaultIdx := strings.Index(line, ".default(")
|
||||||
|
if defaultIdx != -1 {
|
||||||
|
start := defaultIdx + len(".default(")
|
||||||
|
depth := 1
|
||||||
|
inBacktick := false
|
||||||
|
i := start
|
||||||
|
|
||||||
|
for i < len(line) && depth > 0 {
|
||||||
|
ch := line[i]
|
||||||
|
if ch == '`' {
|
||||||
|
inBacktick = !inBacktick
|
||||||
|
} else if !inBacktick {
|
||||||
|
switch ch {
|
||||||
|
case '(':
|
||||||
|
depth++
|
||||||
|
case ')':
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
if depth == 0 {
|
||||||
|
defaultValue := strings.TrimSpace(line[start : i-1])
|
||||||
|
r.parseDefaultValue(defaultValue, column)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .generatedAlwaysAsIdentity()
|
||||||
|
if strings.Contains(line, ".generatedAlwaysAsIdentity()") {
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .references(() => otherTable.column)
|
||||||
|
referencesRegex := regexp.MustCompile(`\.references\(\(\)\s*=>\s*(\w+)\.(\w+)\)`)
|
||||||
|
if matches := referencesRegex.FindStringSubmatch(line); matches != nil {
|
||||||
|
refTableVar := matches[1]
|
||||||
|
refColumn := matches[2]
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraintName := fmt.Sprintf("fk_%s_%s", table.Name, column.Name)
|
||||||
|
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||||
|
constraint.Schema = table.Schema
|
||||||
|
constraint.Table = table.Name
|
||||||
|
constraint.Columns = []string{column.Name}
|
||||||
|
constraint.ReferencedSchema = table.Schema // Assume same schema
|
||||||
|
constraint.ReferencedTable = r.varNameToTableName(refTableVar)
|
||||||
|
constraint.ReferencedColumns = []string{refColumn}
|
||||||
|
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDefaultValue parses a default value expression
|
||||||
|
func (r *Reader) parseDefaultValue(defaultExpr string, column *models.Column) {
|
||||||
|
defaultExpr = strings.TrimSpace(defaultExpr)
|
||||||
|
|
||||||
|
// Handle SQL expressions like sql`now()`
|
||||||
|
sqlRegex := regexp.MustCompile("sql`([^`]+)`")
|
||||||
|
if match := sqlRegex.FindStringSubmatch(defaultExpr); match != nil {
|
||||||
|
column.Default = match[1]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle boolean values
|
||||||
|
if defaultExpr == "true" {
|
||||||
|
column.Default = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if defaultExpr == "false" {
|
||||||
|
column.Default = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle string literals
|
||||||
|
if strings.HasPrefix(defaultExpr, "'") && strings.HasSuffix(defaultExpr, "'") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(defaultExpr, "\"") && strings.HasSuffix(defaultExpr, "\"") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse as number
|
||||||
|
column.Default = defaultExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIndexBlock parses the index callback block
|
||||||
|
func (r *Reader) parseIndexBlock(block string, table *models.Table, tableVarName string) {
|
||||||
|
// Split by lines
|
||||||
|
lines := strings.Split(block, "\n")
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match: index('index_name').on(table.col1, table.col2)
|
||||||
|
// or: uniqueIndex('index_name').on(table.col1, table.col2)
|
||||||
|
indexRegex := regexp.MustCompile(`(uniqueIndex|index)\s*\(['"](\w+)['"]\)\s*\.on\s*\((.*?)\)`)
|
||||||
|
matches := indexRegex.FindStringSubmatch(trimmed)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
indexType := matches[1]
|
||||||
|
indexName := matches[2]
|
||||||
|
columnsStr := matches[3]
|
||||||
|
|
||||||
|
// Parse column list
|
||||||
|
columnParts := strings.Split(columnsStr, ",")
|
||||||
|
columns := make([]string, 0)
|
||||||
|
for _, part := range columnParts {
|
||||||
|
// Remove table prefix: table.column -> column
|
||||||
|
cleaned := strings.TrimSpace(part)
|
||||||
|
if strings.Contains(cleaned, ".") {
|
||||||
|
parts := strings.Split(cleaned, ".")
|
||||||
|
cleaned = parts[len(parts)-1]
|
||||||
|
}
|
||||||
|
columns = append(columns, cleaned)
|
||||||
|
}
|
||||||
|
|
||||||
|
if indexType == "uniqueIndex" {
|
||||||
|
// Create unique constraint
|
||||||
|
constraint := models.InitConstraint(indexName, models.UniqueConstraint)
|
||||||
|
constraint.Schema = table.Schema
|
||||||
|
constraint.Table = table.Name
|
||||||
|
constraint.Columns = columns
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
} else {
|
||||||
|
// Create index
|
||||||
|
index := models.InitIndex(indexName, table.Name, table.Schema)
|
||||||
|
index.Columns = columns
|
||||||
|
index.Unique = false
|
||||||
|
table.Indexes[index.Name] = index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// varNameToTableName converts a variable name to a table name
|
||||||
|
// For now, just return as-is (could add inflection later)
|
||||||
|
func (r *Reader) varNameToTableName(varName string) string {
|
||||||
|
// TODO: Could add conversion logic here if needed
|
||||||
|
// For now, assume variable name matches table name
|
||||||
|
return varName
|
||||||
|
}
|
||||||
141
pkg/readers/gorm/README.md
Normal file
141
pkg/readers/gorm/README.md
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
# GORM Reader
|
||||||
|
|
||||||
|
Reads Go source files containing GORM model definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The GORM Reader parses Go source code files that define GORM models (structs with `gorm` struct tags) and converts them into RelSpec's internal database model representation. It supports reading from individual files or entire directories.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses GORM struct tags to extract column definitions
|
||||||
|
- Extracts table names from `TableName()` methods
|
||||||
|
- Identifies primary keys, foreign keys, and indexes
|
||||||
|
- Supports relationship detection (has-many, belongs-to)
|
||||||
|
- Handles both single files and directories
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Read from a single file
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/models.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := gorm.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reading from Directory
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Read all .go files from a directory
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/models/",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := gorm.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read GORM models and convert to JSON
|
||||||
|
relspec --input gorm --in-file models/ --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert GORM models to Bun
|
||||||
|
relspec --input gorm --in-file models.go --output bun --out-file bun_models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported GORM Tags
|
||||||
|
|
||||||
|
The reader recognizes the following GORM struct tags:
|
||||||
|
|
||||||
|
- `column` - Column name
|
||||||
|
- `type` - SQL data type (e.g., `varchar(255)`, `bigint`)
|
||||||
|
- `primaryKey` or `primary_key` - Mark as primary key
|
||||||
|
- `not null` - NOT NULL constraint
|
||||||
|
- `autoIncrement` - Auto-increment column
|
||||||
|
- `default` - Default value
|
||||||
|
- `size` - Column size/length
|
||||||
|
- `index` - Create index
|
||||||
|
- `uniqueIndex` - Create unique index
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `foreignKey` - Foreign key column
|
||||||
|
- `references` - Referenced column
|
||||||
|
- `constraint` - Constraint behavior (OnDelete, OnUpdate)
|
||||||
|
|
||||||
|
## Example GORM Model
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelUser struct {
|
||||||
|
gorm.Model
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey;autoIncrement"`
|
||||||
|
Username string `gorm:"column:username;type:varchar(50);not null;uniqueIndex"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(100);not null"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;not null;default:now()"`
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
Posts []*ModelPost `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelUser) TableName() string {
|
||||||
|
return "public.users"
|
||||||
|
}
|
||||||
|
|
||||||
|
type ModelPost struct {
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey"`
|
||||||
|
UserID int64 `gorm:"column:user_id;type:bigint;not null"`
|
||||||
|
Title string `gorm:"column:title;type:varchar(200);not null"`
|
||||||
|
Content string `gorm:"column:content;type:text"`
|
||||||
|
|
||||||
|
// Belongs-to relationship
|
||||||
|
User *ModelUser `gorm:"foreignKey:UserID;references:ID"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelPost) TableName() string {
|
||||||
|
return "public.posts"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Test files (ending in `_test.go`) are automatically excluded
|
||||||
|
- The `gorm.Model` embedded struct is automatically recognized and skipped
|
||||||
|
- Table names are derived from struct names if `TableName()` method is not present
|
||||||
|
- Schema defaults to `public` if not specified in `TableName()`
|
||||||
|
- Relationships are inferred from GORM relationship tags
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- Complex relationship types (many-to-many with join tables) may need manual verification
|
||||||
|
- Custom GORM types may not be fully supported
|
||||||
|
- Some advanced GORM features may not be captured
|
||||||
@@ -693,7 +693,7 @@ func (r *Reader) deriveTableName(structName string) string {
|
|||||||
|
|
||||||
// parseColumn parses a struct field into a Column model
|
// parseColumn parses a struct field into a Column model
|
||||||
// Returns the column and any inline reference information (e.g., "mainaccount(id_mainaccount)")
|
// Returns the column and any inline reference information (e.g., "mainaccount(id_mainaccount)")
|
||||||
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) (*models.Column, string) {
|
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) (col *models.Column, ref string) {
|
||||||
// Extract gorm tag
|
// Extract gorm tag
|
||||||
gormTag := r.extractGormTag(tag)
|
gormTag := r.extractGormTag(tag)
|
||||||
if gormTag == "" {
|
if gormTag == "" {
|
||||||
@@ -756,20 +756,14 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
|||||||
// - explicit "not null" tag means NOT NULL
|
// - explicit "not null" tag means NOT NULL
|
||||||
// - absence of "not null" tag with sql_types means nullable
|
// - absence of "not null" tag with sql_types means nullable
|
||||||
// - primitive types (string, int64, bool) default to NOT NULL unless explicitly nullable
|
// - primitive types (string, int64, bool) default to NOT NULL unless explicitly nullable
|
||||||
|
// Primary keys are always NOT NULL
|
||||||
|
column.NotNull = false
|
||||||
if _, hasNotNull := parts["not null"]; hasNotNull {
|
if _, hasNotNull := parts["not null"]; hasNotNull {
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
} else {
|
} else {
|
||||||
// If no explicit "not null" tag, check the Go type
|
// sql_types.SqlString, etc. are nullable by default
|
||||||
if r.isNullableGoType(fieldType) {
|
column.NotNull = !r.isNullableGoType(fieldType)
|
||||||
// sql_types.SqlString, etc. are nullable by default
|
|
||||||
column.NotNull = false
|
|
||||||
} else {
|
|
||||||
// Primitive types default to NOT NULL
|
|
||||||
column.NotNull = false // Default to nullable unless explicitly set
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Primary keys are always NOT NULL
|
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
}
|
}
|
||||||
|
|||||||
464
pkg/readers/gorm/reader_test.go
Normal file
464
pkg/readers/gorm/reader_test.go
Normal file
@@ -0,0 +1,464 @@
|
|||||||
|
package gorm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[0]
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify id column - primary key should be NOT NULL
|
||||||
|
idCol, exists := table.Columns["id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'id' not found")
|
||||||
|
}
|
||||||
|
if !idCol.IsPrimaryKey {
|
||||||
|
t.Error("Column 'id' should be primary key")
|
||||||
|
}
|
||||||
|
if !idCol.AutoIncrement {
|
||||||
|
t.Error("Column 'id' should be auto-increment")
|
||||||
|
}
|
||||||
|
if !idCol.NotNull {
|
||||||
|
t.Error("Column 'id' should be NOT NULL (primary keys are always NOT NULL)")
|
||||||
|
}
|
||||||
|
if idCol.Type != "bigint" {
|
||||||
|
t.Errorf("Expected id type 'bigint', got '%s'", idCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify email column - explicit "not null" tag should be NOT NULL
|
||||||
|
emailCol, exists := table.Columns["email"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'email' not found")
|
||||||
|
}
|
||||||
|
if !emailCol.NotNull {
|
||||||
|
t.Error("Column 'email' should be NOT NULL (explicit 'not null' tag)")
|
||||||
|
}
|
||||||
|
if emailCol.Type != "varchar" || emailCol.Length != 255 {
|
||||||
|
t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify name column - primitive string type should be NOT NULL by default
|
||||||
|
nameCol, exists := table.Columns["name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'name' not found")
|
||||||
|
}
|
||||||
|
if !nameCol.NotNull {
|
||||||
|
t.Error("Column 'name' should be NOT NULL (primitive string type defaults to NOT NULL)")
|
||||||
|
}
|
||||||
|
if nameCol.Type != "text" {
|
||||||
|
t.Errorf("Expected name type 'text', got '%s'", nameCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify age column - pointer type should be nullable (NOT NULL = false)
|
||||||
|
ageCol, exists := table.Columns["age"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'age' not found")
|
||||||
|
}
|
||||||
|
if ageCol.NotNull {
|
||||||
|
t.Error("Column 'age' should be nullable (pointer type *int)")
|
||||||
|
}
|
||||||
|
if ageCol.Type != "integer" {
|
||||||
|
t.Errorf("Expected age type 'integer', got '%s'", ageCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify is_active column - primitive bool type should be NOT NULL by default
|
||||||
|
isActiveCol, exists := table.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type defaults to NOT NULL)")
|
||||||
|
}
|
||||||
|
if isActiveCol.Type != "boolean" {
|
||||||
|
t.Errorf("Expected is_active type 'boolean', got '%s'", isActiveCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify created_at column - time.Time should be NOT NULL by default
|
||||||
|
createdAtCol, exists := table.Columns["created_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'created_at' not found")
|
||||||
|
}
|
||||||
|
if !createdAtCol.NotNull {
|
||||||
|
t.Error("Column 'created_at' should be NOT NULL (time.Time is NOT NULL by default)")
|
||||||
|
}
|
||||||
|
if createdAtCol.Type != "timestamp" {
|
||||||
|
t.Errorf("Expected created_at type 'timestamp', got '%s'", createdAtCol.Type)
|
||||||
|
}
|
||||||
|
if createdAtCol.Default != "now()" {
|
||||||
|
t.Errorf("Expected created_at default 'now()', got '%v'", createdAtCol.Default)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "complex.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify schema
|
||||||
|
if len(db.Schemas) != 1 {
|
||||||
|
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify tables
|
||||||
|
if len(schema.Tables) != 3 {
|
||||||
|
t.Fatalf("Expected 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find tables
|
||||||
|
var usersTable, postsTable, commentsTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
switch table.Name {
|
||||||
|
case "users":
|
||||||
|
usersTable = table
|
||||||
|
case "posts":
|
||||||
|
postsTable = table
|
||||||
|
case "comments":
|
||||||
|
commentsTable = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if usersTable == nil {
|
||||||
|
t.Fatal("Users table not found")
|
||||||
|
}
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
if commentsTable == nil {
|
||||||
|
t.Fatal("Comments table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table - test NOT NULL logic for various field types
|
||||||
|
if len(usersTable.Columns) != 10 {
|
||||||
|
t.Errorf("Expected 10 columns in users table, got %d", len(usersTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// username - NOT NULL (explicit tag)
|
||||||
|
usernameCol, exists := usersTable.Columns["username"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'username' not found")
|
||||||
|
}
|
||||||
|
if !usernameCol.NotNull {
|
||||||
|
t.Error("Column 'username' should be NOT NULL (explicit 'not null' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// first_name - nullable (pointer type)
|
||||||
|
firstNameCol, exists := usersTable.Columns["first_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'first_name' not found")
|
||||||
|
}
|
||||||
|
if firstNameCol.NotNull {
|
||||||
|
t.Error("Column 'first_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// last_name - nullable (pointer type)
|
||||||
|
lastNameCol, exists := usersTable.Columns["last_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'last_name' not found")
|
||||||
|
}
|
||||||
|
if lastNameCol.NotNull {
|
||||||
|
t.Error("Column 'last_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// bio - nullable (pointer type)
|
||||||
|
bioCol, exists := usersTable.Columns["bio"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'bio' not found")
|
||||||
|
}
|
||||||
|
if bioCol.NotNull {
|
||||||
|
t.Error("Column 'bio' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// is_active - NOT NULL (primitive bool)
|
||||||
|
isActiveCol, exists := usersTable.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table indexes
|
||||||
|
if len(usersTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on users table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table
|
||||||
|
if len(postsTable.Columns) != 11 {
|
||||||
|
t.Errorf("Expected 11 columns in posts table, got %d", len(postsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// excerpt - nullable (pointer type)
|
||||||
|
excerptCol, exists := postsTable.Columns["excerpt"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'excerpt' not found")
|
||||||
|
}
|
||||||
|
if excerptCol.NotNull {
|
||||||
|
t.Error("Column 'excerpt' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// published - NOT NULL (primitive bool with default)
|
||||||
|
publishedCol, exists := postsTable.Columns["published"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published' not found")
|
||||||
|
}
|
||||||
|
if !publishedCol.NotNull {
|
||||||
|
t.Error("Column 'published' should be NOT NULL (primitive bool type)")
|
||||||
|
}
|
||||||
|
if publishedCol.Default != "false" {
|
||||||
|
t.Errorf("Expected published default 'false', got '%v'", publishedCol.Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
// published_at - nullable (pointer to time.Time)
|
||||||
|
publishedAtCol, exists := postsTable.Columns["published_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published_at' not found")
|
||||||
|
}
|
||||||
|
if publishedAtCol.NotNull {
|
||||||
|
t.Error("Column 'published_at' should be nullable (pointer type *time.Time)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// view_count - NOT NULL (primitive int64 with default)
|
||||||
|
viewCountCol, exists := postsTable.Columns["view_count"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'view_count' not found")
|
||||||
|
}
|
||||||
|
if !viewCountCol.NotNull {
|
||||||
|
t.Error("Column 'view_count' should be NOT NULL (primitive int64 type)")
|
||||||
|
}
|
||||||
|
if viewCountCol.Default != "0" {
|
||||||
|
t.Errorf("Expected view_count default '0', got '%v'", viewCountCol.Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table indexes
|
||||||
|
if len(postsTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on posts table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify comments table
|
||||||
|
if len(commentsTable.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns in comments table, got %d", len(commentsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// user_id - nullable (pointer type)
|
||||||
|
userIDCol, exists := commentsTable.Columns["user_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'user_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if userIDCol.NotNull {
|
||||||
|
t.Error("Column 'user_id' should be nullable (pointer type *int64)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// post_id - NOT NULL (explicit tag)
|
||||||
|
postIDCol, exists := commentsTable.Columns["post_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'post_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if !postIDCol.NotNull {
|
||||||
|
t.Error("Column 'post_id' should be NOT NULL (explicit 'not null' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify foreign key constraints
|
||||||
|
if len(postsTable.Constraints) == 0 {
|
||||||
|
t.Error("Expected at least one constraint on posts table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find FK constraint to users
|
||||||
|
var fkPostsUser *models.Constraint
|
||||||
|
for _, c := range postsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint && c.ReferencedTable == "users" {
|
||||||
|
fkPostsUser = c
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkPostsUser != nil {
|
||||||
|
if fkPostsUser.OnDelete != "CASCADE" {
|
||||||
|
t.Errorf("Expected ON DELETE CASCADE for posts->users FK, got '%s'", fkPostsUser.OnDelete)
|
||||||
|
}
|
||||||
|
if fkPostsUser.OnUpdate != "CASCADE" {
|
||||||
|
t.Errorf("Expected ON UPDATE CASCADE for posts->users FK, got '%s'", fkPostsUser.OnUpdate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify comments table constraints
|
||||||
|
if len(commentsTable.Constraints) == 0 {
|
||||||
|
t.Error("Expected at least one constraint on comments table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find FK constraints
|
||||||
|
var fkCommentsPost, fkCommentsUser *models.Constraint
|
||||||
|
for _, c := range commentsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint {
|
||||||
|
if c.ReferencedTable == "posts" {
|
||||||
|
fkCommentsPost = c
|
||||||
|
} else if c.ReferencedTable == "users" {
|
||||||
|
fkCommentsUser = c
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsPost != nil {
|
||||||
|
if fkCommentsPost.OnDelete != "CASCADE" {
|
||||||
|
t.Errorf("Expected ON DELETE CASCADE for comments->posts FK, got '%s'", fkCommentsPost.OnDelete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsUser != nil {
|
||||||
|
if fkCommentsUser.OnDelete != "SET NULL" {
|
||||||
|
t.Errorf("Expected ON DELETE SET NULL for comments->users FK, got '%s'", fkCommentsUser.OnDelete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema == nil {
|
||||||
|
t.Fatal("ReadSchema() returned nil schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadTable() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if table == nil {
|
||||||
|
t.Fatal("ReadTable() returned nil table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Directory(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should read both simple.go and complex.go
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
// Should have at least 3 tables from complex.go (users, posts, comments)
|
||||||
|
// plus 1 from simple.go (users) - but same table name, so may be overwritten
|
||||||
|
if len(schema.Tables) < 3 {
|
||||||
|
t.Errorf("Expected at least 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_InvalidPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/file.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for invalid file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_EmptyPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for empty file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
203
pkg/readers/graphql/README.md
Normal file
203
pkg/readers/graphql/README.md
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
# GraphQL Schema Reader
|
||||||
|
|
||||||
|
The GraphQL reader parses GraphQL Schema Definition Language (SDL) files and converts them into RelSpec's internal database model.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Standard GraphQL SDL** support (generic, non-framework-specific)
|
||||||
|
- **Type to Table mapping**: GraphQL types become database tables
|
||||||
|
- **Field to Column mapping**: GraphQL fields become table columns
|
||||||
|
- **Enum support**: GraphQL enums are preserved
|
||||||
|
- **Custom scalars**: DateTime, JSON, Date automatically mapped to appropriate SQL types
|
||||||
|
- **Implicit relationships**: Detects relationships from field types
|
||||||
|
- **Many-to-many support**: Creates junction tables for bidirectional array relationships
|
||||||
|
- **Configurable ID mapping**: Choose between bigint (default) or UUID for ID fields
|
||||||
|
|
||||||
|
## Supported GraphQL Features
|
||||||
|
|
||||||
|
### Built-in Scalars
|
||||||
|
- `ID` → bigint (default) or uuid (configurable)
|
||||||
|
- `String` → text
|
||||||
|
- `Int` → integer
|
||||||
|
- `Float` → double precision
|
||||||
|
- `Boolean` → boolean
|
||||||
|
|
||||||
|
### Custom Scalars
|
||||||
|
- `DateTime` → timestamp
|
||||||
|
- `JSON` → jsonb
|
||||||
|
- `Date` → date
|
||||||
|
- `Time` → time
|
||||||
|
- `Decimal` → numeric
|
||||||
|
|
||||||
|
Additional custom scalars can be mapped via metadata.
|
||||||
|
|
||||||
|
### Relationships
|
||||||
|
|
||||||
|
Relationships are inferred from field types:
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
author: User! # Many-to-one (creates authorId FK column, NOT NULL)
|
||||||
|
reviewer: User # Many-to-one nullable (creates reviewerId FK column, NULL)
|
||||||
|
tags: [Tag!]! # One-to-many or many-to-many (depending on reverse)
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Reverse of Post.author (no FK created)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Many-to-many with Post (creates PostTag junction table)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Relationship Detection Rules:**
|
||||||
|
- Single type reference (`user: User`) → Creates FK column (e.g., `userId`)
|
||||||
|
- Array type reference (`posts: [Post!]!`) → One-to-many reverse (no FK on this table)
|
||||||
|
- Bidirectional arrays → Many-to-many (creates junction table)
|
||||||
|
|
||||||
|
### Enums
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
GUEST
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
role: Role!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Enums are preserved in the schema and can be used as column types.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
)
|
||||||
|
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := graphql.NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
```
|
||||||
|
|
||||||
|
### With UUID ID Type
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"idType": "uuid", // Map ID scalar to uuid instead of bigint
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := graphql.NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Per-Type ID Mapping
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"typeIdMappings": map[string]string{
|
||||||
|
"User": "uuid", // User.id → uuid
|
||||||
|
"Post": "bigint", // Post.id → bigint
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Custom Scalar Mappings
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"customScalarMappings": map[string]string{
|
||||||
|
"Upload": "bytea",
|
||||||
|
"Decimal": "numeric(10,2)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Convert GraphQL to JSON
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to json --to-path schema.json
|
||||||
|
|
||||||
|
# Convert GraphQL to GORM models
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to gorm --to-path models/ --package models
|
||||||
|
|
||||||
|
# Convert GraphQL to PostgreSQL SQL
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to pgsql --to-path schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Metadata Options
|
||||||
|
|
||||||
|
| Option | Type | Description | Default |
|
||||||
|
|--------|------|-------------|---------|
|
||||||
|
| `idType` | string | Global ID type mapping ("bigint" or "uuid") | "bigint" |
|
||||||
|
| `typeIdMappings` | map[string]string | Per-type ID mappings | {} |
|
||||||
|
| `customScalarMappings` | map[string]string | Custom scalar to SQL type mappings | {} |
|
||||||
|
| `schemaName` | string | Schema name for all tables | "public" |
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- Only supports GraphQL SDL (Schema Definition Language), not queries or mutations
|
||||||
|
- Directives are ignored (except for future extensibility)
|
||||||
|
- Interfaces and Unions are not supported
|
||||||
|
- GraphQL's concept of "schema" is different from database schemas; all types go into a single database schema (default: "public")
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
**Input** (`schema.graphql`):
|
||||||
|
```graphql
|
||||||
|
scalar DateTime
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
role: Role!
|
||||||
|
createdAt: DateTime!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result**: Database with:
|
||||||
|
- 2 tables: `User` and `Post`
|
||||||
|
- `Post` table has `authorId` foreign key to `User.id`
|
||||||
|
- `Role` enum with values: ADMIN, USER
|
||||||
|
- Custom scalar `DateTime` mapped to `timestamp`
|
||||||
279
pkg/readers/graphql/reader.go
Normal file
279
pkg/readers/graphql/reader.go
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parseGraphQL(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type parseContext struct {
|
||||||
|
inType bool
|
||||||
|
inEnum bool
|
||||||
|
currentType string
|
||||||
|
typeLines []string
|
||||||
|
currentEnum string
|
||||||
|
enumLines []string
|
||||||
|
customScalars map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseGraphQL(content string) (*models.Database, error) {
|
||||||
|
dbName := "database"
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
dbName = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db := models.InitDatabase(dbName)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
ctx := &parseContext{
|
||||||
|
customScalars: make(map[string]bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
// First pass: collect custom scalars and enums
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
scalarRegex := regexp.MustCompile(`^\s*scalar\s+(\w+)`)
|
||||||
|
enumRegex := regexp.MustCompile(`^\s*enum\s+(\w+)\s*\{`)
|
||||||
|
closingBraceRegex := regexp.MustCompile(`^\s*\}`)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := scalarRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.customScalars[matches[1]] = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.inEnum = true
|
||||||
|
ctx.currentEnum = matches[1]
|
||||||
|
ctx.enumLines = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if closingBraceRegex.MatchString(trimmed) && ctx.inEnum {
|
||||||
|
r.parseEnum(ctx.currentEnum, ctx.enumLines, schema)
|
||||||
|
// Add enum name to custom scalars for type detection
|
||||||
|
ctx.customScalars[ctx.currentEnum] = true
|
||||||
|
ctx.inEnum = false
|
||||||
|
ctx.currentEnum = ""
|
||||||
|
ctx.enumLines = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.inEnum {
|
||||||
|
ctx.enumLines = append(ctx.enumLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanner error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: parse types
|
||||||
|
scanner = bufio.NewScanner(strings.NewReader(content))
|
||||||
|
typeRegex := regexp.MustCompile(`^\s*type\s+(\w+)\s*\{`)
|
||||||
|
ctx.inType = false
|
||||||
|
ctx.inEnum = false
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := typeRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.inType = true
|
||||||
|
ctx.currentType = matches[1]
|
||||||
|
ctx.typeLines = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if closingBraceRegex.MatchString(trimmed) && ctx.inType {
|
||||||
|
if err := r.parseType(ctx.currentType, ctx.typeLines, schema, ctx); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse type %s: %w", ctx.currentType, err)
|
||||||
|
}
|
||||||
|
ctx.inType = false
|
||||||
|
ctx.currentType = ""
|
||||||
|
ctx.typeLines = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.inType {
|
||||||
|
ctx.typeLines = append(ctx.typeLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanner error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
// Third pass: detect and create relationships
|
||||||
|
if err := r.detectAndCreateRelationships(schema, ctx); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create relationships: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type fieldInfo struct {
|
||||||
|
name string
|
||||||
|
typeName string
|
||||||
|
isArray bool
|
||||||
|
isNullable bool
|
||||||
|
innerNullable bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseType(typeName string, lines []string, schema *models.Schema, ctx *parseContext) error {
|
||||||
|
table := models.InitTable(typeName, schema.Name)
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
|
||||||
|
// Store field info for relationship detection
|
||||||
|
relationFields := make(map[string]*fieldInfo)
|
||||||
|
|
||||||
|
fieldRegex := regexp.MustCompile(`^\s*(\w+)\s*:\s*(\[)?(\w+)(!)?(\])?(!)?\s*`)
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
matches := fieldRegex.FindStringSubmatch(trimmed)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldName := matches[1]
|
||||||
|
hasOpenBracket := matches[2] == "["
|
||||||
|
baseType := matches[3]
|
||||||
|
innerNonNull := matches[4] == "!"
|
||||||
|
hasCloseBracket := matches[5] == "]"
|
||||||
|
outerNonNull := matches[6] == "!"
|
||||||
|
|
||||||
|
isArray := hasOpenBracket && hasCloseBracket
|
||||||
|
|
||||||
|
// Determine if this is a scalar or a relation
|
||||||
|
if r.isScalarType(baseType, ctx) {
|
||||||
|
// This is a scalar field
|
||||||
|
column := models.InitColumn(fieldName, table.Name, schema.Name)
|
||||||
|
column.Type = r.graphQLTypeToSQL(baseType, fieldName, typeName)
|
||||||
|
|
||||||
|
if isArray {
|
||||||
|
// Array of scalars: use array type
|
||||||
|
column.Type += "[]"
|
||||||
|
column.NotNull = outerNonNull
|
||||||
|
} else {
|
||||||
|
column.NotNull = !isArray && innerNonNull
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a primary key (convention: field named "id")
|
||||||
|
if fieldName == "id" {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Columns[fieldName] = column
|
||||||
|
} else {
|
||||||
|
// This is a relation field - store for later processing
|
||||||
|
relationFields[fieldName] = &fieldInfo{
|
||||||
|
name: fieldName,
|
||||||
|
typeName: baseType,
|
||||||
|
isArray: isArray,
|
||||||
|
isNullable: !innerNonNull && !isArray,
|
||||||
|
innerNullable: !innerNonNull && isArray,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store relation fields in table metadata for relationship detection
|
||||||
|
if len(relationFields) > 0 {
|
||||||
|
table.Metadata["relationFields"] = relationFields
|
||||||
|
}
|
||||||
|
|
||||||
|
schema.Tables = append(schema.Tables, table)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
|
||||||
|
enum := &models.Enum{
|
||||||
|
Name: enumName,
|
||||||
|
Schema: schema.Name,
|
||||||
|
Values: make([]string, 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Enum values are simple identifiers
|
||||||
|
enum.Values = append(enum.Values, trimmed)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema.Enums = append(schema.Enums, enum)
|
||||||
|
}
|
||||||
362
pkg/readers/graphql/reader_test.go
Normal file
362
pkg/readers/graphql/reader_test.go
Normal file
@@ -0,0 +1,362 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
if userTable.Name != "User" {
|
||||||
|
t.Errorf("Expected table name 'User', got '%s'", userTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify columns
|
||||||
|
expectedColumns := map[string]struct {
|
||||||
|
sqlType string
|
||||||
|
notNull bool
|
||||||
|
isPK bool
|
||||||
|
}{
|
||||||
|
"id": {"bigint", true, true},
|
||||||
|
"email": {"text", true, false},
|
||||||
|
"name": {"text", false, false},
|
||||||
|
"age": {"integer", false, false},
|
||||||
|
"active": {"boolean", true, false},
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(userTable.Columns) != len(expectedColumns) {
|
||||||
|
t.Fatalf("Expected %d columns, got %d", len(expectedColumns), len(userTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
for colName, expected := range expectedColumns {
|
||||||
|
col, exists := userTable.Columns[colName]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected column '%s' not found", colName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.Type != expected.sqlType {
|
||||||
|
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expected.sqlType, col.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.NotNull != expected.notNull {
|
||||||
|
t.Errorf("Column '%s': expected NotNull=%v, got %v", colName, expected.notNull, col.NotNull)
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.IsPrimaryKey != expected.isPK {
|
||||||
|
t.Errorf("Column '%s': expected IsPrimaryKey=%v, got %v", colName, expected.isPK, col.IsPrimaryKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_WithRelations(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "relations.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
if len(schema.Tables) != 2 {
|
||||||
|
t.Fatalf("Expected 2 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find Post table (should have FK to User)
|
||||||
|
var postTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "Post" {
|
||||||
|
postTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if postTable == nil {
|
||||||
|
t.Fatal("Post table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify authorId FK column was created
|
||||||
|
authorIdCol, exists := postTable.Columns["authorId"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'authorId' FK column not found in Post table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if authorIdCol.Type != "bigint" {
|
||||||
|
t.Errorf("Expected authorId type 'bigint', got '%s'", authorIdCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !authorIdCol.NotNull {
|
||||||
|
t.Error("Expected authorId to be NOT NULL")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify FK constraint
|
||||||
|
fkConstraintFound := false
|
||||||
|
for _, constraint := range postTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
if constraint.ReferencedTable == "User" && len(constraint.Columns) > 0 && constraint.Columns[0] == "authorId" {
|
||||||
|
fkConstraintFound = true
|
||||||
|
if constraint.OnDelete != "CASCADE" {
|
||||||
|
t.Errorf("Expected OnDelete CASCADE, got %s", constraint.OnDelete)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !fkConstraintFound {
|
||||||
|
t.Error("Foreign key constraint from Post to User not found")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_WithEnums(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "enums.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
if len(schema.Enums) != 1 {
|
||||||
|
t.Fatalf("Expected 1 enum, got %d", len(schema.Enums))
|
||||||
|
}
|
||||||
|
|
||||||
|
roleEnum := schema.Enums[0]
|
||||||
|
if roleEnum.Name != "Role" {
|
||||||
|
t.Errorf("Expected enum name 'Role', got '%s'", roleEnum.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedValues := []string{"ADMIN", "USER", "GUEST"}
|
||||||
|
if len(roleEnum.Values) != len(expectedValues) {
|
||||||
|
t.Fatalf("Expected %d enum values, got %d", len(expectedValues), len(roleEnum.Values))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, expected := range expectedValues {
|
||||||
|
if roleEnum.Values[i] != expected {
|
||||||
|
t.Errorf("Expected enum value '%s' at index %d, got '%s'", expected, i, roleEnum.Values[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify role column in User table
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
roleCol, exists := userTable.Columns["role"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'role' column not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
if roleCol.Type != "Role" {
|
||||||
|
t.Errorf("Expected role type 'Role', got '%s'", roleCol.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_CustomScalars(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "custom_scalars.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
|
||||||
|
// Verify custom scalar mappings
|
||||||
|
expectedTypes := map[string]string{
|
||||||
|
"createdAt": "timestamp",
|
||||||
|
"metadata": "jsonb",
|
||||||
|
"birthDate": "date",
|
||||||
|
}
|
||||||
|
|
||||||
|
for colName, expectedType := range expectedTypes {
|
||||||
|
col, exists := userTable.Columns[colName]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected column '%s' not found", colName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.Type != expectedType {
|
||||||
|
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expectedType, col.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_UUIDMetadata(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"idType": "uuid",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
|
||||||
|
idCol, exists := userTable.Columns["id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'id' column not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
if idCol.Type != "uuid" {
|
||||||
|
t.Errorf("Expected id type 'uuid' with metadata, got '%s'", idCol.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "complex.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
// Should have 5 tables: User, Profile, Post, Tag, and PostTag (join table)
|
||||||
|
expectedTableCount := 5
|
||||||
|
if len(schema.Tables) != expectedTableCount {
|
||||||
|
t.Fatalf("Expected %d tables, got %d", expectedTableCount, len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify PostTag join table exists (many-to-many between Post and Tag)
|
||||||
|
var joinTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "PostTag" {
|
||||||
|
joinTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if joinTable == nil {
|
||||||
|
t.Fatal("Expected PostTag join table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify join table has both FK columns
|
||||||
|
if _, exists := joinTable.Columns["postId"]; !exists {
|
||||||
|
t.Error("Expected 'postId' column in PostTag join table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := joinTable.Columns["tagId"]; !exists {
|
||||||
|
t.Error("Expected 'tagId' column in PostTag join table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify composite primary key
|
||||||
|
pkFound := false
|
||||||
|
for _, constraint := range joinTable.Constraints {
|
||||||
|
if constraint.Type == models.PrimaryKeyConstraint {
|
||||||
|
if len(constraint.Columns) == 2 {
|
||||||
|
pkFound = true
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pkFound {
|
||||||
|
t.Error("Expected composite primary key in PostTag join table")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadTable() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Name != "User" {
|
||||||
|
t.Errorf("Expected table name 'User', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/path.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for invalid path, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_EmptyPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for empty path, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
225
pkg/readers/graphql/relationships.go
Normal file
225
pkg/readers/graphql/relationships.go
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *Reader) detectAndCreateRelationships(schema *models.Schema, ctx *parseContext) error {
|
||||||
|
// Build table lookup map
|
||||||
|
tableMap := make(map[string]*models.Table)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableMap[table.Name] = table
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each table's relation fields
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||||
|
if !ok || len(relationFields) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for fieldName, fieldInfo := range relationFields {
|
||||||
|
targetTable, exists := tableMap[fieldInfo.typeName]
|
||||||
|
if !exists {
|
||||||
|
// Referenced type doesn't exist - might be an interface/union, skip
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldInfo.isArray {
|
||||||
|
// This is a one-to-many or many-to-many reverse side
|
||||||
|
// Check if target table has a reverse array field
|
||||||
|
if r.hasReverseArrayField(targetTable, table.Name) {
|
||||||
|
// Bidirectional array = many-to-many
|
||||||
|
// Only create join table once (lexicographically first table creates it)
|
||||||
|
if table.Name < targetTable.Name {
|
||||||
|
if err := r.createManyToManyJoinTable(schema, table, targetTable, fieldName, tableMap); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For one-to-many, no action needed (FK is on the other table)
|
||||||
|
} else {
|
||||||
|
// This is a many-to-one or one-to-one
|
||||||
|
// Create FK column on this table
|
||||||
|
if err := r.createForeignKeyColumn(table, targetTable, fieldName, fieldInfo.isNullable, schema); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up metadata
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
delete(table.Metadata, "relationFields")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) hasReverseArrayField(table *models.Table, targetTypeName string) bool {
|
||||||
|
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fieldInfo := range relationFields {
|
||||||
|
if fieldInfo.typeName == targetTypeName && fieldInfo.isArray {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) createForeignKeyColumn(fromTable, toTable *models.Table, fieldName string, nullable bool, schema *models.Schema) error {
|
||||||
|
// Get primary key from target table
|
||||||
|
pkCol := toTable.GetPrimaryKey()
|
||||||
|
if pkCol == nil {
|
||||||
|
return fmt.Errorf("target table %s has no primary key for relationship", toTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column name: {fieldName}Id
|
||||||
|
fkColName := fieldName + "Id"
|
||||||
|
|
||||||
|
// Check if column already exists (shouldn't happen but be safe)
|
||||||
|
if _, exists := fromTable.Columns[fkColName]; exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column
|
||||||
|
fkCol := models.InitColumn(fkColName, fromTable.Name, schema.Name)
|
||||||
|
fkCol.Type = pkCol.Type
|
||||||
|
fkCol.NotNull = !nullable
|
||||||
|
|
||||||
|
fromTable.Columns[fkColName] = fkCol
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", fromTable.Name, fieldName),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
constraint.Schema = schema.Name
|
||||||
|
constraint.Table = fromTable.Name
|
||||||
|
constraint.Columns = []string{fkColName}
|
||||||
|
constraint.ReferencedSchema = schema.Name
|
||||||
|
constraint.ReferencedTable = toTable.Name
|
||||||
|
constraint.ReferencedColumns = []string{pkCol.Name}
|
||||||
|
constraint.OnDelete = "CASCADE"
|
||||||
|
constraint.OnUpdate = "RESTRICT"
|
||||||
|
|
||||||
|
fromTable.Constraints[constraint.Name] = constraint
|
||||||
|
|
||||||
|
// Create relationship
|
||||||
|
relationship := models.InitRelationship(
|
||||||
|
fmt.Sprintf("rel_%s_%s", fromTable.Name, fieldName),
|
||||||
|
models.OneToMany,
|
||||||
|
)
|
||||||
|
relationship.FromTable = fromTable.Name
|
||||||
|
relationship.FromSchema = schema.Name
|
||||||
|
relationship.FromColumns = []string{fkColName}
|
||||||
|
relationship.ToTable = toTable.Name
|
||||||
|
relationship.ToSchema = schema.Name
|
||||||
|
relationship.ToColumns = []string{pkCol.Name}
|
||||||
|
relationship.ForeignKey = constraint.Name
|
||||||
|
|
||||||
|
fromTable.Relationships[relationship.Name] = relationship
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) createManyToManyJoinTable(schema *models.Schema, table1, table2 *models.Table, fieldName string, tableMap map[string]*models.Table) error {
|
||||||
|
// Create join table name
|
||||||
|
joinTableName := table1.Name + table2.Name
|
||||||
|
|
||||||
|
// Check if join table already exists
|
||||||
|
if _, exists := tableMap[joinTableName]; exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get primary keys
|
||||||
|
pk1 := table1.GetPrimaryKey()
|
||||||
|
pk2 := table2.GetPrimaryKey()
|
||||||
|
|
||||||
|
if pk1 == nil || pk2 == nil {
|
||||||
|
return fmt.Errorf("cannot create many-to-many: tables must have primary keys")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join table
|
||||||
|
joinTable := models.InitTable(joinTableName, schema.Name)
|
||||||
|
|
||||||
|
// Create FK column for table1
|
||||||
|
fkCol1Name := strings.ToLower(table1.Name) + "Id"
|
||||||
|
fkCol1 := models.InitColumn(fkCol1Name, joinTable.Name, schema.Name)
|
||||||
|
fkCol1.Type = pk1.Type
|
||||||
|
fkCol1.NotNull = true
|
||||||
|
joinTable.Columns[fkCol1Name] = fkCol1
|
||||||
|
|
||||||
|
// Create FK column for table2
|
||||||
|
fkCol2Name := strings.ToLower(table2.Name) + "Id"
|
||||||
|
fkCol2 := models.InitColumn(fkCol2Name, joinTable.Name, schema.Name)
|
||||||
|
fkCol2.Type = pk2.Type
|
||||||
|
fkCol2.NotNull = true
|
||||||
|
joinTable.Columns[fkCol2Name] = fkCol2
|
||||||
|
|
||||||
|
// Create composite primary key
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", joinTableName),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = schema.Name
|
||||||
|
pkConstraint.Table = joinTable.Name
|
||||||
|
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||||
|
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
// Create FK constraint to table1
|
||||||
|
fk1 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, table1.Name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk1.Schema = schema.Name
|
||||||
|
fk1.Table = joinTable.Name
|
||||||
|
fk1.Columns = []string{fkCol1Name}
|
||||||
|
fk1.ReferencedSchema = schema.Name
|
||||||
|
fk1.ReferencedTable = table1.Name
|
||||||
|
fk1.ReferencedColumns = []string{pk1.Name}
|
||||||
|
fk1.OnDelete = "CASCADE"
|
||||||
|
fk1.OnUpdate = "RESTRICT"
|
||||||
|
joinTable.Constraints[fk1.Name] = fk1
|
||||||
|
|
||||||
|
// Create FK constraint to table2
|
||||||
|
fk2 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, table2.Name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk2.Schema = schema.Name
|
||||||
|
fk2.Table = joinTable.Name
|
||||||
|
fk2.Columns = []string{fkCol2Name}
|
||||||
|
fk2.ReferencedSchema = schema.Name
|
||||||
|
fk2.ReferencedTable = table2.Name
|
||||||
|
fk2.ReferencedColumns = []string{pk2.Name}
|
||||||
|
fk2.OnDelete = "CASCADE"
|
||||||
|
fk2.OnUpdate = "RESTRICT"
|
||||||
|
joinTable.Constraints[fk2.Name] = fk2
|
||||||
|
|
||||||
|
// Create relationships
|
||||||
|
rel1 := models.InitRelationship(
|
||||||
|
fmt.Sprintf("rel_%s_%s_%s", joinTableName, table1.Name, table2.Name),
|
||||||
|
models.ManyToMany,
|
||||||
|
)
|
||||||
|
rel1.FromTable = table1.Name
|
||||||
|
rel1.FromSchema = schema.Name
|
||||||
|
rel1.ToTable = table2.Name
|
||||||
|
rel1.ToSchema = schema.Name
|
||||||
|
rel1.ThroughTable = joinTableName
|
||||||
|
rel1.ThroughSchema = schema.Name
|
||||||
|
joinTable.Relationships[rel1.Name] = rel1
|
||||||
|
|
||||||
|
// Add join table to schema
|
||||||
|
schema.Tables = append(schema.Tables, joinTable)
|
||||||
|
tableMap[joinTableName] = joinTable
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
97
pkg/readers/graphql/type_mapping.go
Normal file
97
pkg/readers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
func (r *Reader) isScalarType(typeName string, ctx *parseContext) bool {
|
||||||
|
// Built-in GraphQL scalars
|
||||||
|
builtInScalars := map[string]bool{
|
||||||
|
"ID": true,
|
||||||
|
"String": true,
|
||||||
|
"Int": true,
|
||||||
|
"Float": true,
|
||||||
|
"Boolean": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if builtInScalars[typeName] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalars declared in the schema
|
||||||
|
if ctx.customScalars[typeName] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common custom scalars (even if not declared)
|
||||||
|
commonCustomScalars := map[string]bool{
|
||||||
|
"DateTime": true,
|
||||||
|
"JSON": true,
|
||||||
|
"Date": true,
|
||||||
|
"Time": true,
|
||||||
|
"Upload": true,
|
||||||
|
"Decimal": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return commonCustomScalars[typeName]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) graphQLTypeToSQL(gqlType string, fieldName string, typeName string) string {
|
||||||
|
// Check for ID type with configurable mapping
|
||||||
|
if gqlType == "ID" {
|
||||||
|
// Check metadata for ID type preference
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
// Global idType setting
|
||||||
|
if idType, ok := r.options.Metadata["idType"].(string); ok {
|
||||||
|
if idType == "uuid" {
|
||||||
|
return "uuid"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Per-type ID mapping
|
||||||
|
if typeIdMappings, ok := r.options.Metadata["typeIdMappings"].(map[string]string); ok {
|
||||||
|
if idType, ok := typeIdMappings[typeName]; ok {
|
||||||
|
if idType == "uuid" {
|
||||||
|
return "uuid"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "bigint" // Default
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalar mappings
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if customMappings, ok := r.options.Metadata["customScalarMappings"].(map[string]string); ok {
|
||||||
|
if sqlType, ok := customMappings[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Built-in custom scalar mappings
|
||||||
|
customScalars := map[string]string{
|
||||||
|
"DateTime": "timestamp",
|
||||||
|
"JSON": "jsonb",
|
||||||
|
"Date": "date",
|
||||||
|
"Time": "time",
|
||||||
|
"Decimal": "numeric",
|
||||||
|
"Upload": "bytea",
|
||||||
|
}
|
||||||
|
if sqlType, ok := customScalars[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard scalar mappings
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"String": "text",
|
||||||
|
"Int": "integer",
|
||||||
|
"Float": "double precision",
|
||||||
|
"Boolean": "boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sqlType, ok := typeMap[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a known scalar, assume it's an enum or custom type
|
||||||
|
// Return as-is (might be an enum)
|
||||||
|
return gqlType
|
||||||
|
}
|
||||||
152
pkg/readers/json/README.md
Normal file
152
pkg/readers/json/README.md
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
# JSON Reader
|
||||||
|
|
||||||
|
Reads database schema definitions from JSON files.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The JSON Reader parses JSON files that define database schemas in RelSpec's canonical JSON format and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Reads RelSpec's standard JSON schema format
|
||||||
|
- Supports complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := json.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read JSON schema and convert to GORM models
|
||||||
|
relspec --input json --in-file schema.json --output gorm --out-file models.go
|
||||||
|
|
||||||
|
# Convert JSON to PostgreSQL DDL
|
||||||
|
relspec --input json --in-file database.json --output pgsql --out-file schema.sql
|
||||||
|
|
||||||
|
# Transform JSON to YAML
|
||||||
|
relspec --input json --in-file schema.json --output yaml --out-file schema.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example JSON Schema
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "myapp",
|
||||||
|
"database_type": "postgresql",
|
||||||
|
"schemas": [
|
||||||
|
{
|
||||||
|
"name": "public",
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"name": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": {
|
||||||
|
"id": {
|
||||||
|
"name": "id",
|
||||||
|
"type": "bigint",
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": true,
|
||||||
|
"auto_increment": true,
|
||||||
|
"sequence": 1
|
||||||
|
},
|
||||||
|
"username": {
|
||||||
|
"name": "username",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 50,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 2
|
||||||
|
},
|
||||||
|
"email": {
|
||||||
|
"name": "email",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 100,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"constraints": {
|
||||||
|
"pk_users": {
|
||||||
|
"name": "pk_users",
|
||||||
|
"type": "PRIMARY KEY",
|
||||||
|
"columns": ["id"]
|
||||||
|
},
|
||||||
|
"uq_users_username": {
|
||||||
|
"name": "uq_users_username",
|
||||||
|
"type": "UNIQUE",
|
||||||
|
"columns": ["username"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"indexes": {
|
||||||
|
"idx_users_email": {
|
||||||
|
"name": "idx_users_email",
|
||||||
|
"columns": ["email"],
|
||||||
|
"unique": false,
|
||||||
|
"type": "btree"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The JSON format follows RelSpec's internal model structure:
|
||||||
|
|
||||||
|
- `Database` - Top-level container
|
||||||
|
- `name` - Database name
|
||||||
|
- `database_type` - Database system (postgresql, mysql, etc.)
|
||||||
|
- `schemas[]` - Array of schemas
|
||||||
|
|
||||||
|
- `Schema` - Schema/namespace
|
||||||
|
- `name` - Schema name
|
||||||
|
- `tables[]` - Array of tables
|
||||||
|
- `views[]` - Array of views
|
||||||
|
- `sequences[]` - Array of sequences
|
||||||
|
|
||||||
|
- `Table` - Table definition
|
||||||
|
- `name` - Table name
|
||||||
|
- `columns{}` - Map of columns
|
||||||
|
- `constraints{}` - Map of constraints
|
||||||
|
- `indexes{}` - Map of indexes
|
||||||
|
- `relationships{}` - Map of relationships
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- This is RelSpec's native interchange format
|
||||||
|
- Preserves complete schema information
|
||||||
|
- Ideal for version control and schema documentation
|
||||||
|
- Can be used as an intermediate format for transformations
|
||||||
138
pkg/readers/pgsql/README.md
Normal file
138
pkg/readers/pgsql/README.md
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
# PostgreSQL Reader
|
||||||
|
|
||||||
|
Reads schema information directly from a live PostgreSQL database.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The PostgreSQL Reader connects to a PostgreSQL database and introspects its schema, extracting complete information about tables, columns, constraints, indexes, views, and sequences.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Direct database introspection
|
||||||
|
- Extracts complete schema information including:
|
||||||
|
- Tables and columns
|
||||||
|
- Primary keys, foreign keys, unique constraints, check constraints
|
||||||
|
- Indexes
|
||||||
|
- Views
|
||||||
|
- Sequences
|
||||||
|
- Supports multiple schemas
|
||||||
|
- Captures constraint actions (ON DELETE, ON UPDATE)
|
||||||
|
- Derives relationships from foreign keys
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
ConnectionString: "postgres://user:password@localhost:5432/mydb?sslmode=disable",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := pgsql.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Database: %s\n", db.Name)
|
||||||
|
fmt.Printf("Schemas: %d\n", len(db.Schemas))
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
fmt.Printf(" Schema: %s, Tables: %d\n", schema.Name, len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect PostgreSQL database and export to JSON
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://user:password@localhost:5432/mydb" \
|
||||||
|
--output json \
|
||||||
|
--out-file schema.json
|
||||||
|
|
||||||
|
# Generate GORM models from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://user:password@localhost:5432/mydb" \
|
||||||
|
--output gorm \
|
||||||
|
--out-file models.go
|
||||||
|
|
||||||
|
# Export database structure to YAML
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb?sslmode=disable" \
|
||||||
|
--output yaml \
|
||||||
|
--out-file schema.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Connection String Format
|
||||||
|
|
||||||
|
The reader uses PostgreSQL connection strings in the format:
|
||||||
|
|
||||||
|
```
|
||||||
|
postgres://username:password@hostname:port/database?parameters
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
postgres://localhost/mydb
|
||||||
|
postgres://user:pass@localhost:5432/mydb
|
||||||
|
postgres://user@localhost/mydb?sslmode=disable
|
||||||
|
postgres://user:pass@db.example.com:5432/production?sslmode=require
|
||||||
|
```
|
||||||
|
|
||||||
|
## Extracted Information
|
||||||
|
|
||||||
|
### Tables
|
||||||
|
- Table name and schema
|
||||||
|
- Comments/descriptions
|
||||||
|
- All columns with data types, nullable, defaults
|
||||||
|
- Sequences
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
- Column name, data type, length/precision
|
||||||
|
- NULL/NOT NULL constraints
|
||||||
|
- Default values
|
||||||
|
- Auto-increment information
|
||||||
|
- Primary key designation
|
||||||
|
|
||||||
|
### Constraints
|
||||||
|
- Primary keys
|
||||||
|
- Foreign keys (with ON DELETE/UPDATE actions)
|
||||||
|
- Unique constraints
|
||||||
|
- Check constraints
|
||||||
|
|
||||||
|
### Indexes
|
||||||
|
- Index name and type (btree, hash, gist, gin, etc.)
|
||||||
|
- Columns in index
|
||||||
|
- Unique/non-unique
|
||||||
|
- Partial indexes
|
||||||
|
|
||||||
|
### Views
|
||||||
|
- View definitions
|
||||||
|
- Column information
|
||||||
|
|
||||||
|
### Sequences
|
||||||
|
- Sequence properties
|
||||||
|
- Associated tables
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Requires PostgreSQL connection permissions
|
||||||
|
- Reads all non-system schemas (excludes pg_catalog, information_schema, pg_toast)
|
||||||
|
- Captures PostgreSQL-specific data types
|
||||||
|
- Automatically maps PostgreSQL types to canonical types
|
||||||
|
- Preserves relationship metadata for downstream conversion
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- Go library: `github.com/jackc/pgx/v5`
|
||||||
|
- Database user must have SELECT permissions on system catalogs
|
||||||
103
pkg/readers/prisma/README.md
Normal file
103
pkg/readers/prisma/README.md
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
# Prisma Reader
|
||||||
|
|
||||||
|
Reads Prisma schema files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Prisma Reader parses `.prisma` schema files that define database models using Prisma's schema language and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Prisma schema syntax
|
||||||
|
- Extracts models, fields, and relationships
|
||||||
|
- Supports Prisma attributes and directives
|
||||||
|
- Handles enums and composite types
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.prisma",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := prisma.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read Prisma schema and convert to JSON
|
||||||
|
relspec --input prisma --in-file schema.prisma --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert Prisma to GORM models
|
||||||
|
relspec --input prisma --in-file schema.prisma --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Prisma Schema
|
||||||
|
|
||||||
|
```prisma
|
||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
}
|
||||||
|
|
||||||
|
generator client {
|
||||||
|
provider = "prisma-client-js"
|
||||||
|
}
|
||||||
|
|
||||||
|
model User {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
username String @unique @db.VarChar(50)
|
||||||
|
email String @db.VarChar(100)
|
||||||
|
createdAt DateTime @default(now()) @map("created_at")
|
||||||
|
|
||||||
|
posts Post[]
|
||||||
|
|
||||||
|
@@map("users")
|
||||||
|
}
|
||||||
|
|
||||||
|
model Post {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
userId Int @map("user_id")
|
||||||
|
title String @db.VarChar(200)
|
||||||
|
content String @db.Text
|
||||||
|
|
||||||
|
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@map("posts")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Prisma Attributes
|
||||||
|
|
||||||
|
- `@id` - Primary key
|
||||||
|
- `@unique` - Unique constraint
|
||||||
|
- `@default` - Default value
|
||||||
|
- `@map` - Column name mapping
|
||||||
|
- `@@map` - Table name mapping
|
||||||
|
- `@relation` - Relationship definition
|
||||||
|
- `@db.*` - Database-specific type annotations
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Extracts datasource provider information
|
||||||
|
- Supports `@@map` for custom table names
|
||||||
|
- Handles Prisma-specific types and converts them to standard SQL types
|
||||||
815
pkg/readers/prisma/reader.go
Normal file
815
pkg/readers/prisma/reader.go
Normal file
@@ -0,0 +1,815 @@
|
|||||||
|
package prisma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for Prisma schema format
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new Prisma reader with the given options
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads and parses Prisma schema input, returning a Database model
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Prisma reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parsePrisma(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads and parses Prisma schema input, returning a Schema model
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in Prisma schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first schema
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads and parses Prisma schema input, returning a Table model
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in Prisma schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first table
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePrisma parses Prisma schema content and returns a Database model
|
||||||
|
func (r *Reader) parsePrisma(content string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
db.Name = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default schema for Prisma (doesn't have explicit schema concept in most cases)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Enums = make([]*models.Enum, 0)
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
// State tracking
|
||||||
|
var currentBlock string // "datasource", "generator", "model", "enum"
|
||||||
|
var currentTable *models.Table
|
||||||
|
var currentEnum *models.Enum
|
||||||
|
var blockContent []string
|
||||||
|
|
||||||
|
// Regex patterns
|
||||||
|
datasourceRegex := regexp.MustCompile(`^datasource\s+\w+\s*{`)
|
||||||
|
generatorRegex := regexp.MustCompile(`^generator\s+\w+\s*{`)
|
||||||
|
modelRegex := regexp.MustCompile(`^model\s+(\w+)\s*{`)
|
||||||
|
enumRegex := regexp.MustCompile(`^enum\s+(\w+)\s*{`)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for block start
|
||||||
|
if matches := datasourceRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "datasource"
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := generatorRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "generator"
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := modelRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "model"
|
||||||
|
tableName := matches[1]
|
||||||
|
currentTable = models.InitTable(tableName, "public")
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "enum"
|
||||||
|
enumName := matches[1]
|
||||||
|
currentEnum = &models.Enum{
|
||||||
|
Name: enumName,
|
||||||
|
Schema: "public",
|
||||||
|
Values: make([]string, 0),
|
||||||
|
}
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for block end
|
||||||
|
if trimmed == "}" {
|
||||||
|
switch currentBlock {
|
||||||
|
case "datasource":
|
||||||
|
r.parseDatasource(blockContent, db)
|
||||||
|
case "generator":
|
||||||
|
// We don't need to do anything with generator blocks
|
||||||
|
case "model":
|
||||||
|
if currentTable != nil {
|
||||||
|
r.parseModelFields(blockContent, currentTable)
|
||||||
|
schema.Tables = append(schema.Tables, currentTable)
|
||||||
|
currentTable = nil
|
||||||
|
}
|
||||||
|
case "enum":
|
||||||
|
if currentEnum != nil {
|
||||||
|
schema.Enums = append(schema.Enums, currentEnum)
|
||||||
|
currentEnum = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
currentBlock = ""
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Accumulate block content
|
||||||
|
if currentBlock != "" {
|
||||||
|
if currentBlock == "enum" && currentEnum != nil {
|
||||||
|
// For enums, just add the trimmed value
|
||||||
|
if trimmed != "" {
|
||||||
|
currentEnum.Values = append(currentEnum.Values, trimmed)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
blockContent = append(blockContent, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: resolve relationships
|
||||||
|
r.resolveRelationships(schema)
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDatasource extracts database type from datasource block
|
||||||
|
func (r *Reader) parseDatasource(lines []string, db *models.Database) {
|
||||||
|
providerRegex := regexp.MustCompile(`provider\s*=\s*"?(\w+)"?`)
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
if matches := providerRegex.FindStringSubmatch(line); matches != nil {
|
||||||
|
provider := matches[1]
|
||||||
|
switch provider {
|
||||||
|
case "postgresql", "postgres":
|
||||||
|
db.DatabaseType = models.PostgresqlDatabaseType
|
||||||
|
case "mysql":
|
||||||
|
db.DatabaseType = "mysql"
|
||||||
|
case "sqlite":
|
||||||
|
db.DatabaseType = models.SqlLiteDatabaseType
|
||||||
|
case "sqlserver":
|
||||||
|
db.DatabaseType = models.MSSQLDatabaseType
|
||||||
|
default:
|
||||||
|
db.DatabaseType = models.PostgresqlDatabaseType
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseModelFields parses model field definitions
|
||||||
|
func (r *Reader) parseModelFields(lines []string, table *models.Table) {
|
||||||
|
fieldRegex := regexp.MustCompile(`^(\w+)\s+(\w+)(\?|\[\])?\s*(@.+)?`)
|
||||||
|
blockAttrRegex := regexp.MustCompile(`^@@(\w+)\((.*?)\)`)
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for block attributes (@@id, @@unique, @@index)
|
||||||
|
if matches := blockAttrRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
attrName := matches[1]
|
||||||
|
attrContent := matches[2]
|
||||||
|
r.parseBlockAttribute(attrName, attrContent, table)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse field definition
|
||||||
|
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
fieldName := matches[1]
|
||||||
|
fieldType := matches[2]
|
||||||
|
modifier := matches[3] // ? or []
|
||||||
|
attributes := matches[4] // @... part
|
||||||
|
|
||||||
|
column := r.parseField(fieldName, fieldType, modifier, attributes, table)
|
||||||
|
if column != nil {
|
||||||
|
table.Columns[column.Name] = column
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseField parses a single field definition
|
||||||
|
func (r *Reader) parseField(name, fieldType, modifier, attributes string, table *models.Table) *models.Column {
|
||||||
|
// Check if this is a relation field (array or references another model)
|
||||||
|
if modifier == "[]" {
|
||||||
|
// Array field - this is a relation field, not a column
|
||||||
|
// We'll handle this in relationship resolution
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a non-primitive type (relation field)
|
||||||
|
// Note: We need to allow enum types through as they're like primitives
|
||||||
|
if !r.isPrimitiveType(fieldType) && !r.isEnumType(fieldType, table) {
|
||||||
|
// This is a relation field (e.g., user User), not a scalar column
|
||||||
|
// Only process this if it has @relation attribute (which means it's the owning side with FK)
|
||||||
|
// Otherwise skip it as it's just the inverse relation field
|
||||||
|
if attributes == "" || !strings.Contains(attributes, "@relation") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// If it has @relation, we still don't create a column for it
|
||||||
|
// The actual FK column will be in the fields: [...] part of @relation
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
column := models.InitColumn(name, table.Name, table.Schema)
|
||||||
|
|
||||||
|
// Map Prisma type to SQL type
|
||||||
|
column.Type = r.prismaTypeToSQL(fieldType)
|
||||||
|
|
||||||
|
// Handle modifiers
|
||||||
|
if modifier == "?" {
|
||||||
|
column.NotNull = false
|
||||||
|
} else {
|
||||||
|
// Default: required fields are NOT NULL
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse field attributes
|
||||||
|
if attributes != "" {
|
||||||
|
r.parseFieldAttributes(attributes, column, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// prismaTypeToSQL converts Prisma types to SQL types
|
||||||
|
func (r *Reader) prismaTypeToSQL(prismaType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"String": "text",
|
||||||
|
"Boolean": "boolean",
|
||||||
|
"Int": "integer",
|
||||||
|
"BigInt": "bigint",
|
||||||
|
"Float": "double precision",
|
||||||
|
"Decimal": "decimal",
|
||||||
|
"DateTime": "timestamp",
|
||||||
|
"Json": "jsonb",
|
||||||
|
"Bytes": "bytea",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sqlType, ok := typeMap[prismaType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a built-in type, it might be an enum or model reference
|
||||||
|
// For enums, we'll use the enum name directly
|
||||||
|
return prismaType
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseFieldAttributes parses field attributes like @id, @unique, @default
|
||||||
|
func (r *Reader) parseFieldAttributes(attributes string, column *models.Column, table *models.Table) {
|
||||||
|
// @id attribute
|
||||||
|
if strings.Contains(attributes, "@id") {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// @unique attribute
|
||||||
|
if regexp.MustCompile(`@unique\b`).MatchString(attributes) {
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s", column.Name),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = []string{column.Name}
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// @default attribute - extract value with balanced parentheses
|
||||||
|
if strings.Contains(attributes, "@default(") {
|
||||||
|
defaultValue := r.extractDefaultValue(attributes)
|
||||||
|
if defaultValue != "" {
|
||||||
|
r.parseDefaultValue(defaultValue, column)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @updatedAt attribute - store in comment for now
|
||||||
|
if strings.Contains(attributes, "@updatedAt") {
|
||||||
|
if column.Comment != "" {
|
||||||
|
column.Comment += "; @updatedAt"
|
||||||
|
} else {
|
||||||
|
column.Comment = "@updatedAt"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @relation attribute - we'll handle this in relationship resolution
|
||||||
|
// For now, just note that this field is part of a relation
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractDefaultValue extracts the default value from @default(...) handling nested parentheses
|
||||||
|
func (r *Reader) extractDefaultValue(attributes string) string {
|
||||||
|
idx := strings.Index(attributes, "@default(")
|
||||||
|
if idx == -1 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
start := idx + len("@default(")
|
||||||
|
depth := 1
|
||||||
|
i := start
|
||||||
|
|
||||||
|
for i < len(attributes) && depth > 0 {
|
||||||
|
switch attributes[i] {
|
||||||
|
case '(':
|
||||||
|
depth++
|
||||||
|
case ')':
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
if depth == 0 {
|
||||||
|
return attributes[start : i-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDefaultValue parses Prisma default value expressions
|
||||||
|
func (r *Reader) parseDefaultValue(defaultExpr string, column *models.Column) {
|
||||||
|
defaultExpr = strings.TrimSpace(defaultExpr)
|
||||||
|
|
||||||
|
switch defaultExpr {
|
||||||
|
case "autoincrement()":
|
||||||
|
column.AutoIncrement = true
|
||||||
|
case "now()":
|
||||||
|
column.Default = "now()"
|
||||||
|
case "uuid()":
|
||||||
|
column.Default = "gen_random_uuid()"
|
||||||
|
case "cuid()":
|
||||||
|
// CUID is Prisma-specific, store in comment
|
||||||
|
if column.Comment != "" {
|
||||||
|
column.Comment += "; default(cuid())"
|
||||||
|
} else {
|
||||||
|
column.Comment = "default(cuid())"
|
||||||
|
}
|
||||||
|
case "true":
|
||||||
|
column.Default = true
|
||||||
|
case "false":
|
||||||
|
column.Default = false
|
||||||
|
default:
|
||||||
|
// Check if it's a string literal
|
||||||
|
if strings.HasPrefix(defaultExpr, "\"") && strings.HasSuffix(defaultExpr, "\"") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
} else if strings.HasPrefix(defaultExpr, "'") && strings.HasSuffix(defaultExpr, "'") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
} else {
|
||||||
|
// Try to parse as number or enum value
|
||||||
|
column.Default = defaultExpr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseBlockAttribute parses block-level attributes like @@id, @@unique, @@index
|
||||||
|
func (r *Reader) parseBlockAttribute(attrName, content string, table *models.Table) {
|
||||||
|
// Extract column list from brackets [col1, col2]
|
||||||
|
colListRegex := regexp.MustCompile(`\[(.*?)\]`)
|
||||||
|
matches := colListRegex.FindStringSubmatch(content)
|
||||||
|
if matches == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
columnList := strings.Split(matches[1], ",")
|
||||||
|
columns := make([]string, 0)
|
||||||
|
for _, col := range columnList {
|
||||||
|
columns = append(columns, strings.TrimSpace(col))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch attrName {
|
||||||
|
case "id":
|
||||||
|
// Composite primary key
|
||||||
|
for _, colName := range columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
col.IsPrimaryKey = true
|
||||||
|
col.NotNull = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Also create a PK constraint
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", table.Name),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = table.Schema
|
||||||
|
pkConstraint.Table = table.Name
|
||||||
|
pkConstraint.Columns = columns
|
||||||
|
table.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
case "unique":
|
||||||
|
// Multi-column unique constraint
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s_%s", table.Name, strings.Join(columns, "_")),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = columns
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
|
||||||
|
case "index":
|
||||||
|
// Index
|
||||||
|
index := models.InitIndex(
|
||||||
|
fmt.Sprintf("idx_%s_%s", table.Name, strings.Join(columns, "_")),
|
||||||
|
table.Name,
|
||||||
|
table.Schema,
|
||||||
|
)
|
||||||
|
index.Columns = columns
|
||||||
|
table.Indexes[index.Name] = index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// relationField stores information about a relation field for second-pass processing
|
||||||
|
type relationField struct {
|
||||||
|
tableName string
|
||||||
|
fieldName string
|
||||||
|
relatedModel string
|
||||||
|
isArray bool
|
||||||
|
relationAttr string
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveRelationships performs a second pass to resolve @relation attributes
|
||||||
|
func (r *Reader) resolveRelationships(schema *models.Schema) {
|
||||||
|
// Build a map of table names for quick lookup
|
||||||
|
tableMap := make(map[string]*models.Table)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableMap[table.Name] = table
|
||||||
|
}
|
||||||
|
|
||||||
|
// First, we need to re-parse to find relation fields
|
||||||
|
// We'll re-read the file to extract relation information
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
relations := r.extractRelationFields(string(content))
|
||||||
|
|
||||||
|
// Process explicit @relation attributes to create FK constraints
|
||||||
|
for _, rel := range relations {
|
||||||
|
if rel.relationAttr != "" {
|
||||||
|
r.createConstraintFromRelation(rel, tableMap, schema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect implicit many-to-many relationships
|
||||||
|
r.detectImplicitManyToMany(relations, tableMap, schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractRelationFields extracts relation field information from the schema
|
||||||
|
func (r *Reader) extractRelationFields(content string) []relationField {
|
||||||
|
relations := make([]relationField, 0)
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
modelRegex := regexp.MustCompile(`^model\s+(\w+)\s*{`)
|
||||||
|
fieldRegex := regexp.MustCompile(`^(\w+)\s+(\w+)(\?|\[\])?\s*(@.+)?`)
|
||||||
|
|
||||||
|
var currentModel string
|
||||||
|
inModel := false
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := modelRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentModel = matches[1]
|
||||||
|
inModel = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if trimmed == "}" {
|
||||||
|
inModel = false
|
||||||
|
currentModel = ""
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if inModel && currentModel != "" {
|
||||||
|
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
fieldName := matches[1]
|
||||||
|
fieldType := matches[2]
|
||||||
|
modifier := matches[3]
|
||||||
|
attributes := matches[4]
|
||||||
|
|
||||||
|
// Check if this is a relation field (references another model or is an array)
|
||||||
|
isPotentialRelation := modifier == "[]" || !r.isPrimitiveType(fieldType)
|
||||||
|
|
||||||
|
if isPotentialRelation {
|
||||||
|
rel := relationField{
|
||||||
|
tableName: currentModel,
|
||||||
|
fieldName: fieldName,
|
||||||
|
relatedModel: fieldType,
|
||||||
|
isArray: modifier == "[]",
|
||||||
|
relationAttr: attributes,
|
||||||
|
}
|
||||||
|
relations = append(relations, rel)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return relations
|
||||||
|
}
|
||||||
|
|
||||||
|
// isPrimitiveType checks if a type is a Prisma primitive type
|
||||||
|
func (r *Reader) isPrimitiveType(typeName string) bool {
|
||||||
|
primitives := []string{"String", "Boolean", "Int", "BigInt", "Float", "Decimal", "DateTime", "Json", "Bytes"}
|
||||||
|
for _, p := range primitives {
|
||||||
|
if typeName == p {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// isEnumType checks if a type name might be an enum
|
||||||
|
// Note: We can't definitively check against schema.Enums at parse time
|
||||||
|
// because enums might be defined after the model, so we just check
|
||||||
|
// if it starts with uppercase (Prisma convention for enums)
|
||||||
|
func (r *Reader) isEnumType(typeName string, table *models.Table) bool {
|
||||||
|
// Simple heuristic: enum types start with uppercase letter
|
||||||
|
// and are not known model names (though we can't check that yet)
|
||||||
|
if len(typeName) > 0 && typeName[0] >= 'A' && typeName[0] <= 'Z' {
|
||||||
|
// Additional check: primitive types are already handled above
|
||||||
|
// So if it's uppercase and not primitive, it's likely an enum or model
|
||||||
|
// We'll assume it's an enum if it's a single word
|
||||||
|
return !strings.Contains(typeName, "_")
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// createConstraintFromRelation creates a FK constraint from a @relation attribute
|
||||||
|
func (r *Reader) createConstraintFromRelation(rel relationField, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Skip array fields (they are the inverse side of the relation)
|
||||||
|
if rel.isArray {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if rel.relationAttr == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse @relation attribute
|
||||||
|
relationRegex := regexp.MustCompile(`@relation\((.*?)\)`)
|
||||||
|
matches := relationRegex.FindStringSubmatch(rel.relationAttr)
|
||||||
|
if matches == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
relationContent := matches[1]
|
||||||
|
|
||||||
|
// Extract fields and references
|
||||||
|
fieldsRegex := regexp.MustCompile(`fields:\s*\[(.*?)\]`)
|
||||||
|
referencesRegex := regexp.MustCompile(`references:\s*\[(.*?)\]`)
|
||||||
|
nameRegex := regexp.MustCompile(`name:\s*"([^"]+)"`)
|
||||||
|
onDeleteRegex := regexp.MustCompile(`onDelete:\s*(\w+)`)
|
||||||
|
onUpdateRegex := regexp.MustCompile(`onUpdate:\s*(\w+)`)
|
||||||
|
|
||||||
|
fieldsMatch := fieldsRegex.FindStringSubmatch(relationContent)
|
||||||
|
referencesMatch := referencesRegex.FindStringSubmatch(relationContent)
|
||||||
|
|
||||||
|
if fieldsMatch == nil || referencesMatch == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse field and reference column lists
|
||||||
|
fieldCols := r.parseColumnList(fieldsMatch[1])
|
||||||
|
refCols := r.parseColumnList(referencesMatch[1])
|
||||||
|
|
||||||
|
if len(fieldCols) == 0 || len(refCols) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraintName := fmt.Sprintf("fk_%s_%s", rel.tableName, fieldCols[0])
|
||||||
|
|
||||||
|
// Check for custom name
|
||||||
|
if nameMatch := nameRegex.FindStringSubmatch(relationContent); nameMatch != nil {
|
||||||
|
constraintName = nameMatch[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||||
|
constraint.Schema = "public"
|
||||||
|
constraint.Table = rel.tableName
|
||||||
|
constraint.Columns = fieldCols
|
||||||
|
constraint.ReferencedSchema = "public"
|
||||||
|
constraint.ReferencedTable = rel.relatedModel
|
||||||
|
constraint.ReferencedColumns = refCols
|
||||||
|
|
||||||
|
// Parse referential actions
|
||||||
|
if onDeleteMatch := onDeleteRegex.FindStringSubmatch(relationContent); onDeleteMatch != nil {
|
||||||
|
constraint.OnDelete = onDeleteMatch[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
if onUpdateMatch := onUpdateRegex.FindStringSubmatch(relationContent); onUpdateMatch != nil {
|
||||||
|
constraint.OnUpdate = onUpdateMatch[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add constraint to table
|
||||||
|
if table, exists := tableMap[rel.tableName]; exists {
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnList parses a comma-separated list of column names
|
||||||
|
func (r *Reader) parseColumnList(list string) []string {
|
||||||
|
parts := strings.Split(list, ",")
|
||||||
|
result := make([]string, 0)
|
||||||
|
for _, part := range parts {
|
||||||
|
trimmed := strings.TrimSpace(part)
|
||||||
|
if trimmed != "" {
|
||||||
|
result = append(result, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// detectImplicitManyToMany detects implicit M2M relationships and creates join tables
|
||||||
|
func (r *Reader) detectImplicitManyToMany(relations []relationField, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Group relations by model pairs
|
||||||
|
type modelPair struct {
|
||||||
|
model1 string
|
||||||
|
model2 string
|
||||||
|
}
|
||||||
|
|
||||||
|
pairMap := make(map[modelPair][]relationField)
|
||||||
|
|
||||||
|
for _, rel := range relations {
|
||||||
|
if !rel.isArray || rel.relationAttr != "" {
|
||||||
|
// Skip non-array fields and explicit relations
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a normalized pair (alphabetically sorted to avoid duplicates)
|
||||||
|
pair := modelPair{}
|
||||||
|
if rel.tableName < rel.relatedModel {
|
||||||
|
pair.model1 = rel.tableName
|
||||||
|
pair.model2 = rel.relatedModel
|
||||||
|
} else {
|
||||||
|
pair.model1 = rel.relatedModel
|
||||||
|
pair.model2 = rel.tableName
|
||||||
|
}
|
||||||
|
|
||||||
|
pairMap[pair] = append(pairMap[pair], rel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for pairs with arrays on both sides (implicit M2M)
|
||||||
|
for pair, rels := range pairMap {
|
||||||
|
if len(rels) >= 2 {
|
||||||
|
// This is an implicit many-to-many relationship
|
||||||
|
r.createImplicitJoinTable(pair.model1, pair.model2, tableMap, schema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// createImplicitJoinTable creates a virtual join table for implicit M2M relations
|
||||||
|
func (r *Reader) createImplicitJoinTable(model1, model2 string, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Prisma naming convention: _Model1ToModel2 (alphabetically sorted)
|
||||||
|
joinTableName := fmt.Sprintf("_%sTo%s", model1, model2)
|
||||||
|
|
||||||
|
// Check if join table already exists
|
||||||
|
if _, exists := tableMap[joinTableName]; exists {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join table
|
||||||
|
joinTable := models.InitTable(joinTableName, "public")
|
||||||
|
|
||||||
|
// Get primary keys from both tables
|
||||||
|
pk1 := r.getPrimaryKeyColumn(tableMap[model1])
|
||||||
|
pk2 := r.getPrimaryKeyColumn(tableMap[model2])
|
||||||
|
|
||||||
|
if pk1 == nil || pk2 == nil {
|
||||||
|
return // Can't create join table without PKs
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK columns in join table
|
||||||
|
fkCol1Name := fmt.Sprintf("%sId", model1)
|
||||||
|
fkCol1 := models.InitColumn(fkCol1Name, joinTableName, "public")
|
||||||
|
fkCol1.Type = pk1.Type
|
||||||
|
fkCol1.NotNull = true
|
||||||
|
joinTable.Columns[fkCol1Name] = fkCol1
|
||||||
|
|
||||||
|
fkCol2Name := fmt.Sprintf("%sId", model2)
|
||||||
|
fkCol2 := models.InitColumn(fkCol2Name, joinTableName, "public")
|
||||||
|
fkCol2.Type = pk2.Type
|
||||||
|
fkCol2.NotNull = true
|
||||||
|
joinTable.Columns[fkCol2Name] = fkCol2
|
||||||
|
|
||||||
|
// Create composite primary key
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", joinTableName),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = "public"
|
||||||
|
pkConstraint.Table = joinTableName
|
||||||
|
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||||
|
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
// Mark columns as PK
|
||||||
|
fkCol1.IsPrimaryKey = true
|
||||||
|
fkCol2.IsPrimaryKey = true
|
||||||
|
|
||||||
|
// Create FK constraints
|
||||||
|
fk1 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, model1),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk1.Schema = "public"
|
||||||
|
fk1.Table = joinTableName
|
||||||
|
fk1.Columns = []string{fkCol1Name}
|
||||||
|
fk1.ReferencedSchema = "public"
|
||||||
|
fk1.ReferencedTable = model1
|
||||||
|
fk1.ReferencedColumns = []string{pk1.Name}
|
||||||
|
fk1.OnDelete = "Cascade"
|
||||||
|
joinTable.Constraints[fk1.Name] = fk1
|
||||||
|
|
||||||
|
fk2 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, model2),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk2.Schema = "public"
|
||||||
|
fk2.Table = joinTableName
|
||||||
|
fk2.Columns = []string{fkCol2Name}
|
||||||
|
fk2.ReferencedSchema = "public"
|
||||||
|
fk2.ReferencedTable = model2
|
||||||
|
fk2.ReferencedColumns = []string{pk2.Name}
|
||||||
|
fk2.OnDelete = "Cascade"
|
||||||
|
joinTable.Constraints[fk2.Name] = fk2
|
||||||
|
|
||||||
|
// Add join table to schema
|
||||||
|
schema.Tables = append(schema.Tables, joinTable)
|
||||||
|
tableMap[joinTableName] = joinTable
|
||||||
|
}
|
||||||
|
|
||||||
|
// getPrimaryKeyColumn returns the primary key column of a table
|
||||||
|
func (r *Reader) getPrimaryKeyColumn(table *models.Table) *models.Column {
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
return col
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
160
pkg/readers/sqldir/README.md
Normal file
160
pkg/readers/sqldir/README.md
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
# SQL Directory Reader
|
||||||
|
|
||||||
|
The SQL Directory Reader (`sqldir`) reads SQL scripts from a directory structure and populates the `Scripts` field of a `Schema`. It supports recursive directory scanning and extracts priority, sequence, and name information from filenames.
|
||||||
|
|
||||||
|
## File Naming Convention
|
||||||
|
|
||||||
|
Scripts must follow this naming pattern (supports both underscores and hyphens as separators):
|
||||||
|
|
||||||
|
```
|
||||||
|
{priority}_{sequence}_{name}.{sql|pgsql}
|
||||||
|
{priority}-{sequence}-{name}.{sql|pgsql}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- **priority**: Integer (0-9999) - Defines execution order (lower executes first)
|
||||||
|
- **sequence**: Integer (0-9999) - Defines order within the same priority level
|
||||||
|
- **separator**: Underscore `_` or hyphen `-` (can be mixed)
|
||||||
|
- **name**: Descriptive name (alphanumeric, underscores, hyphens allowed)
|
||||||
|
- **extension**: `.sql` or `.pgsql`
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 1_001_create_schema.sql # Priority 1, Sequence 1 (underscore format)
|
||||||
|
├── 1-002-create-users-table.sql # Priority 1, Sequence 2 (hyphen format)
|
||||||
|
├── 1_003_create_posts_table.pgsql # Priority 1, Sequence 3 (underscore format)
|
||||||
|
├── 2-001-add-indexes.sql # Priority 2, Sequence 1 (hyphen format)
|
||||||
|
├── 2_002_add_constraints.sql # Priority 2, Sequence 2 (underscore format)
|
||||||
|
├── 10-10-create-newid.pgsql # Priority 10, Sequence 10 (hyphen format)
|
||||||
|
└── subdirectory/
|
||||||
|
└── 3_001_seed_data.sql # Priority 3, Sequence 1 (subdirs supported)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Execution Order**: 1→2→3→4→5→6→7 (sorted by Priority ascending, then Sequence ascending)
|
||||||
|
|
||||||
|
**Both formats can be mixed** in the same directory - the reader handles both seamlessly.
|
||||||
|
|
||||||
|
### Invalid Filenames (Ignored)
|
||||||
|
|
||||||
|
- `migration.sql` - Missing priority/sequence
|
||||||
|
- `1_create_users.sql` - Missing sequence
|
||||||
|
- `create_users.sql` - Missing priority/sequence
|
||||||
|
- `1_001_test.txt` - Wrong extension
|
||||||
|
- `readme.md` - Not a SQL file
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
)
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "public", // Optional, defaults to "public"
|
||||||
|
"database_name": "myapp", // Optional, defaults to "database"
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read all scripts
|
||||||
|
database, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Access scripts
|
||||||
|
for _, schema := range database.Schemas {
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
fmt.Printf("Script: %s (P:%d S:%d)\n",
|
||||||
|
script.Name, script.Priority, script.Sequence)
|
||||||
|
fmt.Printf("SQL: %s\n", script.SQL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Read Schema Only
|
||||||
|
|
||||||
|
```go
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d scripts\n", len(schema.Scripts))
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Recursive Directory Scanning**: Automatically scans all subdirectories
|
||||||
|
- **Multiple Extensions**: Supports both `.sql` and `.pgsql` files
|
||||||
|
- **Flexible Naming**: Extract metadata from filename patterns
|
||||||
|
- **Error Handling**: Validates directory existence and file accessibility
|
||||||
|
- **Schema Integration**: Scripts are added to the standard RelSpec `Schema` model
|
||||||
|
|
||||||
|
## Script Model
|
||||||
|
|
||||||
|
Each script is stored as a `models.Script`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Script struct {
|
||||||
|
Name string // Extracted from filename (e.g., "create_users")
|
||||||
|
Description string // Auto-generated description with file path
|
||||||
|
SQL string // Complete SQL content from file
|
||||||
|
Priority int // Execution priority from filename
|
||||||
|
Sequence uint // Execution sequence from filename
|
||||||
|
// ... other fields available but not populated by this reader
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with SQL Executor
|
||||||
|
|
||||||
|
The SQL Directory Reader is designed to work seamlessly with the SQL Executor Writer:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
db, _ := reader.ReadDatabase()
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/mydb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
writer.WriteDatabase(db) // Executes in Priority→Sequence order
|
||||||
|
```
|
||||||
|
|
||||||
|
See `pkg/writers/sqlexec/README.md` for more details on script execution.
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
The reader will return errors for:
|
||||||
|
- Non-existent directory paths
|
||||||
|
- Inaccessible directories or files
|
||||||
|
- Invalid file permissions
|
||||||
|
- File read failures
|
||||||
|
|
||||||
|
Files that don't match the naming pattern are silently ignored (not treated as errors).
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/readers/sqldir/
|
||||||
|
```
|
||||||
|
|
||||||
|
Tests include:
|
||||||
|
- Valid file parsing
|
||||||
|
- Recursive directory scanning
|
||||||
|
- Invalid filename handling
|
||||||
|
- Empty directory handling
|
||||||
|
- Error conditions
|
||||||
127
pkg/readers/sqldir/example_test.go
Normal file
127
pkg/readers/sqldir/example_test.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package sqldir_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Example demonstrates how to read SQL scripts from a directory and execute them
|
||||||
|
func Example() {
|
||||||
|
// Step 1: Read SQL scripts from a directory
|
||||||
|
// Directory structure example:
|
||||||
|
// migrations/
|
||||||
|
// 1_001_create_schema.sql
|
||||||
|
// 1_002_create_users_table.sql
|
||||||
|
// 1_003_create_posts_table.pgsql
|
||||||
|
// 2_001_add_indexes.sql
|
||||||
|
// 2_002_seed_data.sql
|
||||||
|
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "public",
|
||||||
|
"database_name": "myapp",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read the database schema with scripts
|
||||||
|
database, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to read scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Read %d schemas\n", len(database.Schemas))
|
||||||
|
fmt.Printf("Found %d scripts in schema '%s'\n",
|
||||||
|
len(database.Schemas[0].Scripts),
|
||||||
|
database.Schemas[0].Name)
|
||||||
|
|
||||||
|
// Step 2: Execute the scripts against a PostgreSQL database
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://user:password@localhost:5432/myapp?sslmode=disable",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute all scripts in Priority then Sequence order
|
||||||
|
if err := writer.WriteDatabase(database); err != nil {
|
||||||
|
log.Fatalf("Failed to execute scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("All scripts executed successfully!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example_withSingleSchema shows how to read and execute scripts for a single schema
|
||||||
|
func Example_withSingleSchema() {
|
||||||
|
// Read scripts
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/migrations",
|
||||||
|
})
|
||||||
|
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to read schema: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/testdb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteSchema(schema); err != nil {
|
||||||
|
log.Fatalf("Failed to execute scripts: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Schema scripts executed successfully!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example_fileNamingConvention shows the expected file naming pattern
|
||||||
|
func Example_fileNamingConvention() {
|
||||||
|
// File naming pattern: {priority}_{sequence}_{name}.sql or .pgsql
|
||||||
|
// OR: {priority}-{sequence}-{name}.sql or .pgsql
|
||||||
|
//
|
||||||
|
// Both underscore (_) and hyphen (-) separators are supported and can be mixed.
|
||||||
|
//
|
||||||
|
// Components:
|
||||||
|
// - priority: Integer (0-9999) - Scripts with lower priority execute first
|
||||||
|
// - sequence: Integer (0-9999) - Within same priority, lower sequence executes first
|
||||||
|
// - separator: Underscore (_) or hyphen (-)
|
||||||
|
// - name: Descriptive name (alphanumeric, underscores, hyphens)
|
||||||
|
// - extension: .sql or .pgsql
|
||||||
|
//
|
||||||
|
// Examples (underscore format):
|
||||||
|
// ✓ 1_001_create_users.sql (Priority=1, Sequence=1)
|
||||||
|
// ✓ 1_002_create_posts.sql (Priority=1, Sequence=2)
|
||||||
|
// ✓ 2_001_add_indexes.pgsql (Priority=2, Sequence=1)
|
||||||
|
// ✓ 10_100_migration.sql (Priority=10, Sequence=100)
|
||||||
|
//
|
||||||
|
// Examples (hyphen format):
|
||||||
|
// ✓ 1-001-create-users.sql (Priority=1, Sequence=1)
|
||||||
|
// ✓ 1-002-create-posts.sql (Priority=1, Sequence=2)
|
||||||
|
// ✓ 2-001-add-indexes.pgsql (Priority=2, Sequence=1)
|
||||||
|
// ✓ 10-10-create-newid.pgsql (Priority=10, Sequence=10)
|
||||||
|
//
|
||||||
|
// Mixed format (both in same directory):
|
||||||
|
// ✓ 1_001_create_users.sql (underscore format)
|
||||||
|
// ✓ 1-002-create-posts.sql (hyphen format)
|
||||||
|
// ✓ 2_001_add_indexes.sql (underscore format)
|
||||||
|
//
|
||||||
|
// Execution order for mixed examples:
|
||||||
|
// 1. 1_001_create_users.sql (Priority 1, Sequence 1)
|
||||||
|
// 2. 1-002-create-posts.sql (Priority 1, Sequence 2)
|
||||||
|
// 3. 2_001_add_indexes.sql (Priority 2, Sequence 1)
|
||||||
|
//
|
||||||
|
// Invalid filenames (will be ignored):
|
||||||
|
// ✗ migration.sql (missing priority/sequence)
|
||||||
|
// ✗ 1_create_users.sql (missing sequence)
|
||||||
|
// ✗ create_users.sql (missing priority/sequence)
|
||||||
|
// ✗ 1_001_create_users.txt (wrong extension)
|
||||||
|
|
||||||
|
fmt.Println("See comments for file naming conventions")
|
||||||
|
}
|
||||||
171
pkg/readers/sqldir/reader.go
Normal file
171
pkg/readers/sqldir/reader.go
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
package sqldir
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for SQL script directories
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new SQL directory reader
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads all SQL scripts from a directory into a Database
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("directory path is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if directory exists
|
||||||
|
info, err := os.Stat(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to access directory: %w", err)
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return nil, fmt.Errorf("path is not a directory: %s", r.options.FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read scripts from directory
|
||||||
|
scripts, err := r.readScripts()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get schema name from metadata or use default
|
||||||
|
schemaName := "public"
|
||||||
|
if name, ok := r.options.Metadata["schema_name"].(string); ok && name != "" {
|
||||||
|
schemaName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create schema with scripts
|
||||||
|
schema := &models.Schema{
|
||||||
|
Name: schemaName,
|
||||||
|
Scripts: scripts,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get database name from metadata or use default
|
||||||
|
dbName := "database"
|
||||||
|
if name, ok := r.options.Metadata["database_name"].(string); ok && name != "" {
|
||||||
|
dbName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create database with schema
|
||||||
|
database := &models.Database{
|
||||||
|
Name: dbName,
|
||||||
|
Schemas: []*models.Schema{schema},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set back-reference
|
||||||
|
schema.RefDatabase = database
|
||||||
|
|
||||||
|
return database, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads all SQL scripts from a directory into a Schema
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schema found")
|
||||||
|
}
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable is not applicable for SQL script directories
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
return nil, fmt.Errorf("ReadTable is not supported for SQL script directories")
|
||||||
|
}
|
||||||
|
|
||||||
|
// readScripts recursively scans the directory for SQL files and parses them into Script models
|
||||||
|
func (r *Reader) readScripts() ([]*models.Script, error) {
|
||||||
|
var scripts []*models.Script
|
||||||
|
|
||||||
|
// Regular expression to parse filename: {priority}{sep}{sequence}{sep}{name}.sql or .pgsql
|
||||||
|
// Separator can be underscore (_) or hyphen (-)
|
||||||
|
// Example: 1_001_create_users.sql -> priority=1, sequence=001, name=create_users
|
||||||
|
// Example: 2_005_add_indexes.pgsql -> priority=2, sequence=005, name=add_indexes
|
||||||
|
// Example: 10-10-create-newid.pgsql -> priority=10, sequence=10, name=create-newid
|
||||||
|
pattern := regexp.MustCompile(`^(\d+)[_-](\d+)[_-](.+)\.(sql|pgsql)$`)
|
||||||
|
|
||||||
|
err := filepath.WalkDir(r.options.FilePath, func(path string, d os.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip directories
|
||||||
|
if d.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get filename
|
||||||
|
filename := d.Name()
|
||||||
|
|
||||||
|
// Match against pattern
|
||||||
|
matches := pattern.FindStringSubmatch(filename)
|
||||||
|
if matches == nil {
|
||||||
|
// Skip files that don't match the pattern
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse priority
|
||||||
|
priority, err := strconv.Atoi(matches[1])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid priority in filename %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse sequence
|
||||||
|
sequence, err := strconv.ParseUint(matches[2], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid sequence in filename %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract name
|
||||||
|
name := matches[3]
|
||||||
|
|
||||||
|
// Read SQL content
|
||||||
|
content, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read file %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get relative path from base directory
|
||||||
|
relPath, err := filepath.Rel(r.options.FilePath, path)
|
||||||
|
if err != nil {
|
||||||
|
relPath = path
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Script model
|
||||||
|
script := &models.Script{
|
||||||
|
Name: name,
|
||||||
|
Description: fmt.Sprintf("SQL script from %s", relPath),
|
||||||
|
SQL: string(content),
|
||||||
|
Priority: priority,
|
||||||
|
Sequence: uint(sequence),
|
||||||
|
}
|
||||||
|
|
||||||
|
scripts = append(scripts, script)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return scripts, nil
|
||||||
|
}
|
||||||
375
pkg/readers/sqldir/reader_test.go
Normal file
375
pkg/readers/sqldir/reader_test.go
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
package sqldir
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test SQL files with both underscore and hyphen separators
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1_001_create_users.sql": "CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT);",
|
||||||
|
"1_002_create_posts.sql": "CREATE TABLE posts (id SERIAL PRIMARY KEY, user_id INT);",
|
||||||
|
"2_001_add_indexes.sql": "CREATE INDEX idx_posts_user_id ON posts(user_id);",
|
||||||
|
"1_003_seed_data.pgsql": "INSERT INTO users (name) VALUES ('Alice'), ('Bob');",
|
||||||
|
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL PRIMARY KEY);",
|
||||||
|
"2-005-add-column.sql": "ALTER TABLE users ADD COLUMN email TEXT;",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create subdirectory with additional script
|
||||||
|
subDir := filepath.Join(tempDir, "migrations")
|
||||||
|
if err := os.MkdirAll(subDir, 0755); err != nil {
|
||||||
|
t.Fatalf("Failed to create subdirectory: %v", err)
|
||||||
|
}
|
||||||
|
subFile := filepath.Join(subDir, "3_001_add_column.sql")
|
||||||
|
if err := os.WriteFile(subFile, []byte("ALTER TABLE users ADD COLUMN email TEXT;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create subdirectory file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"schema_name": "test_schema",
|
||||||
|
"database_name": "test_db",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify database
|
||||||
|
if db.Name != "test_db" {
|
||||||
|
t.Errorf("Expected database name 'test_db', got '%s'", db.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) != 1 {
|
||||||
|
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "test_schema" {
|
||||||
|
t.Errorf("Expected schema name 'test_schema', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify scripts (should be 7 total: 4 underscore + 2 hyphen + 1 subdirectory)
|
||||||
|
if len(schema.Scripts) != 7 {
|
||||||
|
t.Fatalf("Expected 7 scripts, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify script details
|
||||||
|
expectedScripts := []struct {
|
||||||
|
name string
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
}{
|
||||||
|
{"create_users", 1, 1},
|
||||||
|
{"create_posts", 1, 2},
|
||||||
|
{"seed_data", 1, 3},
|
||||||
|
{"add_indexes", 2, 1},
|
||||||
|
{"add-column", 2, 5},
|
||||||
|
{"add_column", 3, 1},
|
||||||
|
{"create-newid", 10, 10},
|
||||||
|
}
|
||||||
|
|
||||||
|
scriptMap := make(map[string]*struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sql string
|
||||||
|
})
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
scriptMap[script.Name] = &struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
sql string
|
||||||
|
}{
|
||||||
|
priority: script.Priority,
|
||||||
|
sequence: script.Sequence,
|
||||||
|
sql: script.SQL,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, expected := range expectedScripts {
|
||||||
|
script, exists := scriptMap[expected.name]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected script '%s' not found", expected.name)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if script.priority != expected.priority {
|
||||||
|
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||||
|
expected.name, expected.priority, script.priority)
|
||||||
|
}
|
||||||
|
if script.sequence != expected.sequence {
|
||||||
|
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||||
|
expected.name, expected.sequence, script.sequence)
|
||||||
|
}
|
||||||
|
if script.sql == "" {
|
||||||
|
t.Errorf("Script '%s': SQL content is empty", expected.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test SQL file
|
||||||
|
testFile := filepath.Join(tempDir, "1_001_test.sql")
|
||||||
|
if err := os.WriteFile(testFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read schema
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify schema
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected default schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Scripts) != 1 {
|
||||||
|
t.Fatalf("Expected 1 script, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidDirectory(t *testing.T) {
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/directory",
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for nonexistent directory, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_EmptyDirectory(t *testing.T) {
|
||||||
|
// Create temporary empty directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas[0].Scripts) != 0 {
|
||||||
|
t.Errorf("Expected 0 scripts in empty directory, got %d", len(db.Schemas[0].Scripts))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidFilename(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create files with various invalid patterns
|
||||||
|
invalidFiles := []string{
|
||||||
|
"invalid.sql", // No priority/sequence
|
||||||
|
"1_test.sql", // Missing sequence
|
||||||
|
"test_1_2.sql", // Wrong order
|
||||||
|
"a_001_test.sql", // Non-numeric priority
|
||||||
|
"1_abc_test.sql", // Non-numeric sequence
|
||||||
|
"1_001_test.txt", // Wrong extension
|
||||||
|
"1_001_test.sql.backup", // Wrong extension
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, filename := range invalidFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create one valid file
|
||||||
|
validFile := filepath.Join(tempDir, "1_001_valid.sql")
|
||||||
|
if err := os.WriteFile(validFile, []byte("SELECT 1;"), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create valid file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should only have the valid file
|
||||||
|
if len(db.Schemas[0].Scripts) != 1 {
|
||||||
|
t.Errorf("Expected 1 script (invalid files should be skipped), got %d", len(db.Schemas[0].Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
if db.Schemas[0].Scripts[0].Name != "valid" {
|
||||||
|
t.Errorf("Expected script name 'valid', got '%s'", db.Schemas[0].Scripts[0].Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
reader := NewReader(&readers.ReaderOptions{})
|
||||||
|
|
||||||
|
_, err := reader.ReadTable()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for ReadTable (not supported), got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_HyphenFormat(t *testing.T) {
|
||||||
|
// Create temporary test directory
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-hyphen-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create test files with hyphen separators
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1-001-create-table.sql": "CREATE TABLE test (id INT);",
|
||||||
|
"1-002-insert-data.pgsql": "INSERT INTO test VALUES (1);",
|
||||||
|
"10-10-create-newid.pgsql": "CREATE TABLE newid (id SERIAL);",
|
||||||
|
"2-005-add-index.sql": "CREATE INDEX idx_test ON test(id);",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create reader
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Read database
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) != 4 {
|
||||||
|
t.Fatalf("Expected 4 scripts, got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify specific hyphen-formatted scripts
|
||||||
|
expectedScripts := map[string]struct {
|
||||||
|
priority int
|
||||||
|
sequence uint
|
||||||
|
}{
|
||||||
|
"create-table": {1, 1},
|
||||||
|
"insert-data": {1, 2},
|
||||||
|
"add-index": {2, 5},
|
||||||
|
"create-newid": {10, 10},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
expected, exists := expectedScripts[script.Name]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Unexpected script: %s", script.Name)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if script.Priority != expected.priority {
|
||||||
|
t.Errorf("Script '%s': expected priority %d, got %d",
|
||||||
|
script.Name, expected.priority, script.Priority)
|
||||||
|
}
|
||||||
|
if script.Sequence != expected.sequence {
|
||||||
|
t.Errorf("Script '%s': expected sequence %d, got %d",
|
||||||
|
script.Name, expected.sequence, script.Sequence)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_MixedFormat(t *testing.T) {
|
||||||
|
// Test that both underscore and hyphen formats can be mixed
|
||||||
|
tempDir, err := os.MkdirTemp("", "sqldir-test-mixed-*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp directory: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
testFiles := map[string]string{
|
||||||
|
"1_001_underscore.sql": "SELECT 1;",
|
||||||
|
"1-002-hyphen.sql": "SELECT 2;",
|
||||||
|
"2_003_underscore.sql": "SELECT 3;",
|
||||||
|
"2-004-hyphen.sql": "SELECT 4;",
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content := range testFiles {
|
||||||
|
filePath := filepath.Join(tempDir, filename)
|
||||||
|
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to create test file %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: tempDir,
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if len(schema.Scripts) != 4 {
|
||||||
|
t.Fatalf("Expected 4 scripts (mixed format), got %d", len(schema.Scripts))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify both formats are parsed correctly
|
||||||
|
names := make(map[string]bool)
|
||||||
|
for _, script := range schema.Scripts {
|
||||||
|
names[script.Name] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedNames := []string{"underscore", "hyphen", "underscore", "hyphen"}
|
||||||
|
for _, name := range expectedNames {
|
||||||
|
if !names[name] {
|
||||||
|
t.Errorf("Expected script name '%s' not found", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
122
pkg/readers/typeorm/README.md
Normal file
122
pkg/readers/typeorm/README.md
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
# TypeORM Reader
|
||||||
|
|
||||||
|
Reads TypeScript files containing TypeORM entity definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The TypeORM Reader parses TypeScript source files that define TypeORM entities (classes with TypeORM decorators) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses TypeORM decorators and entity definitions
|
||||||
|
- Extracts table, column, and relationship information
|
||||||
|
- Supports various TypeORM column types and options
|
||||||
|
- Handles constraints, indexes, and relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/entities/",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := typeorm.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read TypeORM entities and convert to JSON
|
||||||
|
relspec --input typeorm --in-file entities/ --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert TypeORM to GORM models
|
||||||
|
relspec --input typeorm --in-file User.ts --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example TypeORM Entity
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
Entity,
|
||||||
|
PrimaryGeneratedColumn,
|
||||||
|
Column,
|
||||||
|
CreateDateColumn,
|
||||||
|
OneToMany,
|
||||||
|
} from 'typeorm';
|
||||||
|
import { Post } from './Post';
|
||||||
|
|
||||||
|
@Entity('users')
|
||||||
|
export class User {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 50, unique: true })
|
||||||
|
username: string;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 100 })
|
||||||
|
email: string;
|
||||||
|
|
||||||
|
@CreateDateColumn({ name: 'created_at' })
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@OneToMany(() => Post, (post) => post.user)
|
||||||
|
posts: Post[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity('posts')
|
||||||
|
export class Post {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ name: 'user_id' })
|
||||||
|
userId: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 200 })
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text' })
|
||||||
|
content: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, (user) => user.posts, { onDelete: 'CASCADE' })
|
||||||
|
@JoinColumn({ name: 'user_id' })
|
||||||
|
user: User;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported TypeORM Decorators
|
||||||
|
|
||||||
|
- `@Entity()` - Entity/table definition
|
||||||
|
- `@PrimaryGeneratedColumn()` - Auto-increment primary key
|
||||||
|
- `@PrimaryColumn()` - Primary key
|
||||||
|
- `@Column()` - Column definition
|
||||||
|
- `@CreateDateColumn()` - Auto-set creation timestamp
|
||||||
|
- `@UpdateDateColumn()` - Auto-update timestamp
|
||||||
|
- `@OneToMany()` - One-to-many relationship
|
||||||
|
- `@ManyToOne()` - Many-to-one relationship
|
||||||
|
- `@JoinColumn()` - Foreign key column
|
||||||
|
- `@Index()` - Index definition
|
||||||
|
- `@Unique()` - Unique constraint
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Schema name can be specified in `@Entity()` decorator
|
||||||
|
- Supports both JavaScript and TypeScript entity files
|
||||||
|
- Relationship metadata is extracted from decorators
|
||||||
785
pkg/readers/typeorm/reader.go
Normal file
785
pkg/readers/typeorm/reader.go
Normal file
@@ -0,0 +1,785 @@
|
|||||||
|
package typeorm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for TypeORM entity files
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new TypeORM reader with the given options
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads and parses TypeORM entity files, returning a Database model
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for TypeORM reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parseTypeORM(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads and parses TypeORM entity files, returning a Schema model
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in TypeORM entities")
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads and parses TypeORM entity files, returning a Table model
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in TypeORM entities")
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// entityInfo stores information about an entity during parsing
|
||||||
|
type entityInfo struct {
|
||||||
|
name string
|
||||||
|
fields []fieldInfo
|
||||||
|
decorators []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// fieldInfo stores information about a field during parsing
|
||||||
|
type fieldInfo struct {
|
||||||
|
name string
|
||||||
|
typeName string
|
||||||
|
decorators []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTypeORM parses TypeORM entity content and returns a Database model
|
||||||
|
func (r *Reader) parseTypeORM(content string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Parse entities
|
||||||
|
entities := r.extractEntities(content)
|
||||||
|
|
||||||
|
// Convert entities to tables and views
|
||||||
|
tableMap := make(map[string]*models.Table)
|
||||||
|
for _, entity := range entities {
|
||||||
|
// Check if this is a view
|
||||||
|
isView := false
|
||||||
|
for _, decorator := range entity.decorators {
|
||||||
|
if strings.HasPrefix(decorator, "@ViewEntity") {
|
||||||
|
isView = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isView {
|
||||||
|
view := r.entityToView(entity)
|
||||||
|
schema.Views = append(schema.Views, view)
|
||||||
|
} else {
|
||||||
|
table := r.entityToTable(entity)
|
||||||
|
schema.Tables = append(schema.Tables, table)
|
||||||
|
tableMap[table.Name] = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: resolve relationships
|
||||||
|
r.resolveRelationships(entities, tableMap, schema)
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractEntities extracts entity and view definitions from TypeORM content
|
||||||
|
func (r *Reader) extractEntities(content string) []entityInfo {
|
||||||
|
entities := make([]entityInfo, 0)
|
||||||
|
|
||||||
|
// First, extract decorators properly (handling multi-line)
|
||||||
|
content = r.normalizeDecorators(content)
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
entityRegex := regexp.MustCompile(`^export\s+class\s+(\w+)`)
|
||||||
|
decoratorRegex := regexp.MustCompile(`^\s*@(\w+)(\([^)]*\))?`)
|
||||||
|
fieldRegex := regexp.MustCompile(`^\s*(\w+):\s*([^;]+);`)
|
||||||
|
|
||||||
|
var currentEntity *entityInfo
|
||||||
|
var pendingDecorators []string
|
||||||
|
inClass := false
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") || strings.HasPrefix(trimmed, "import ") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for decorator
|
||||||
|
if matches := decoratorRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
decorator := matches[0]
|
||||||
|
pendingDecorators = append(pendingDecorators, decorator)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for entity/view class
|
||||||
|
if matches := entityRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
// Save previous entity if exists
|
||||||
|
if currentEntity != nil {
|
||||||
|
entities = append(entities, *currentEntity)
|
||||||
|
}
|
||||||
|
|
||||||
|
currentEntity = &entityInfo{
|
||||||
|
name: matches[1],
|
||||||
|
fields: make([]fieldInfo, 0),
|
||||||
|
decorators: pendingDecorators,
|
||||||
|
}
|
||||||
|
pendingDecorators = []string{}
|
||||||
|
inClass = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for class end
|
||||||
|
if inClass && trimmed == "}" {
|
||||||
|
if currentEntity != nil {
|
||||||
|
entities = append(entities, *currentEntity)
|
||||||
|
currentEntity = nil
|
||||||
|
}
|
||||||
|
inClass = false
|
||||||
|
pendingDecorators = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for field definition
|
||||||
|
if inClass && currentEntity != nil {
|
||||||
|
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
fieldName := matches[1]
|
||||||
|
fieldType := strings.TrimSpace(matches[2])
|
||||||
|
|
||||||
|
field := fieldInfo{
|
||||||
|
name: fieldName,
|
||||||
|
typeName: fieldType,
|
||||||
|
decorators: pendingDecorators,
|
||||||
|
}
|
||||||
|
currentEntity.fields = append(currentEntity.fields, field)
|
||||||
|
pendingDecorators = []string{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save last entity
|
||||||
|
if currentEntity != nil {
|
||||||
|
entities = append(entities, *currentEntity)
|
||||||
|
}
|
||||||
|
|
||||||
|
return entities
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalizeDecorators combines multi-line decorators into single lines
|
||||||
|
func (r *Reader) normalizeDecorators(content string) string {
|
||||||
|
// Replace multi-line decorators with single-line versions
|
||||||
|
// Match @Decorator({ ... }) across multiple lines
|
||||||
|
decoratorRegex := regexp.MustCompile(`@(\w+)\s*\(\s*\{([^}]*)\}\s*\)`)
|
||||||
|
|
||||||
|
return decoratorRegex.ReplaceAllStringFunc(content, func(match string) string {
|
||||||
|
// Remove newlines and extra spaces from decorator
|
||||||
|
match = strings.ReplaceAll(match, "\n", " ")
|
||||||
|
match = strings.ReplaceAll(match, "\r", " ")
|
||||||
|
// Normalize multiple spaces
|
||||||
|
spaceRegex := regexp.MustCompile(`\s+`)
|
||||||
|
match = spaceRegex.ReplaceAllString(match, " ")
|
||||||
|
return match
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// entityToView converts a view entity to a view
|
||||||
|
func (r *Reader) entityToView(entity entityInfo) *models.View {
|
||||||
|
// Parse @ViewEntity decorator options
|
||||||
|
viewName := entity.name
|
||||||
|
schemaName := "public"
|
||||||
|
var expression string
|
||||||
|
|
||||||
|
for _, decorator := range entity.decorators {
|
||||||
|
if strings.HasPrefix(decorator, "@ViewEntity") {
|
||||||
|
// Extract options from @ViewEntity({ ... })
|
||||||
|
options := r.parseViewEntityOptions(decorator)
|
||||||
|
|
||||||
|
// Check for custom view name
|
||||||
|
if name, ok := options["name"]; ok {
|
||||||
|
viewName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for schema
|
||||||
|
if schema, ok := options["schema"]; ok {
|
||||||
|
schemaName = schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for expression (SQL definition)
|
||||||
|
if expr, ok := options["expression"]; ok {
|
||||||
|
expression = expr
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
view := models.InitView(viewName, schemaName)
|
||||||
|
view.Definition = expression
|
||||||
|
|
||||||
|
// Add columns from fields (if any are defined in the view class)
|
||||||
|
for _, field := range entity.fields {
|
||||||
|
column := models.InitColumn(field.name, viewName, schemaName)
|
||||||
|
column.Type = r.typeScriptTypeToSQL(field.typeName)
|
||||||
|
view.Columns[column.Name] = column
|
||||||
|
}
|
||||||
|
|
||||||
|
return view
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseViewEntityOptions parses @ViewEntity decorator options
|
||||||
|
func (r *Reader) parseViewEntityOptions(decorator string) map[string]string {
|
||||||
|
options := make(map[string]string)
|
||||||
|
|
||||||
|
// Extract content between parentheses
|
||||||
|
start := strings.Index(decorator, "(")
|
||||||
|
end := strings.LastIndex(decorator, ")")
|
||||||
|
|
||||||
|
if start == -1 || end == -1 || start >= end {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
content := decorator[start+1 : end]
|
||||||
|
|
||||||
|
// Skip if empty @ViewEntity()
|
||||||
|
if strings.TrimSpace(content) == "" {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse name: "value"
|
||||||
|
nameRegex := regexp.MustCompile(`name:\s*["']([^"']+)["']`)
|
||||||
|
if matches := nameRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["name"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse schema: "value"
|
||||||
|
schemaRegex := regexp.MustCompile(`schema:\s*["']([^"']+)["']`)
|
||||||
|
if matches := schemaRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["schema"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse expression: ` ... ` (can be multi-line, captured as single line after normalization)
|
||||||
|
// Look for expression followed by backtick or quote
|
||||||
|
expressionRegex := regexp.MustCompile(`expression:\s*` + "`" + `([^` + "`" + `]+)` + "`")
|
||||||
|
if matches := expressionRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["expression"] = strings.TrimSpace(matches[1])
|
||||||
|
} else {
|
||||||
|
// Try with regular quotes
|
||||||
|
expressionRegex = regexp.MustCompile(`expression:\s*["']([^"']+)["']`)
|
||||||
|
if matches := expressionRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["expression"] = strings.TrimSpace(matches[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// entityToTable converts an entity to a table
|
||||||
|
func (r *Reader) entityToTable(entity entityInfo) *models.Table {
|
||||||
|
// Parse @Entity decorator options
|
||||||
|
tableName := entity.name
|
||||||
|
schemaName := "public"
|
||||||
|
var entityOptions map[string]string
|
||||||
|
|
||||||
|
for _, decorator := range entity.decorators {
|
||||||
|
if strings.HasPrefix(decorator, "@Entity") {
|
||||||
|
// Extract options from @Entity({ ... })
|
||||||
|
entityOptions = r.parseEntityOptions(decorator)
|
||||||
|
|
||||||
|
// Check for custom table name
|
||||||
|
if name, ok := entityOptions["name"]; ok {
|
||||||
|
tableName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for schema
|
||||||
|
if schema, ok := entityOptions["schema"]; ok {
|
||||||
|
schemaName = schema
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table := models.InitTable(tableName, schemaName)
|
||||||
|
|
||||||
|
// Store additional metadata from @Entity options
|
||||||
|
if entityOptions != nil {
|
||||||
|
// Store database name in metadata
|
||||||
|
if database, ok := entityOptions["database"]; ok {
|
||||||
|
if table.Metadata == nil {
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
table.Metadata["database"] = database
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store engine in metadata
|
||||||
|
if engine, ok := entityOptions["engine"]; ok {
|
||||||
|
if table.Metadata == nil {
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
table.Metadata["engine"] = engine
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store original class name if different from table name
|
||||||
|
if entity.name != tableName {
|
||||||
|
if table.Metadata == nil {
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
table.Metadata["class_name"] = entity.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, field := range entity.fields {
|
||||||
|
// Skip relation fields (they'll be handled in relationship resolution)
|
||||||
|
if r.isRelationField(field) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
column := r.fieldToColumn(field, table)
|
||||||
|
if column != nil {
|
||||||
|
table.Columns[column.Name] = column
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseEntityOptions parses @Entity decorator options
|
||||||
|
func (r *Reader) parseEntityOptions(decorator string) map[string]string {
|
||||||
|
options := make(map[string]string)
|
||||||
|
|
||||||
|
// Extract content between parentheses
|
||||||
|
start := strings.Index(decorator, "(")
|
||||||
|
end := strings.LastIndex(decorator, ")")
|
||||||
|
|
||||||
|
if start == -1 || end == -1 || start >= end {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
content := decorator[start+1 : end]
|
||||||
|
|
||||||
|
// Skip if empty @Entity()
|
||||||
|
if strings.TrimSpace(content) == "" {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse name: "value" or name: 'value'
|
||||||
|
nameRegex := regexp.MustCompile(`name:\s*["']([^"']+)["']`)
|
||||||
|
if matches := nameRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["name"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse schema: "value"
|
||||||
|
schemaRegex := regexp.MustCompile(`schema:\s*["']([^"']+)["']`)
|
||||||
|
if matches := schemaRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["schema"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse database: "value"
|
||||||
|
databaseRegex := regexp.MustCompile(`database:\s*["']([^"']+)["']`)
|
||||||
|
if matches := databaseRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["database"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse engine: "value"
|
||||||
|
engineRegex := regexp.MustCompile(`engine:\s*["']([^"']+)["']`)
|
||||||
|
if matches := engineRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["engine"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// isRelationField checks if a field is a relation field
|
||||||
|
func (r *Reader) isRelationField(field fieldInfo) bool {
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
if strings.Contains(decorator, "@ManyToOne") ||
|
||||||
|
strings.Contains(decorator, "@OneToMany") ||
|
||||||
|
strings.Contains(decorator, "@ManyToMany") ||
|
||||||
|
strings.Contains(decorator, "@OneToOne") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// fieldToColumn converts a field to a column
|
||||||
|
func (r *Reader) fieldToColumn(field fieldInfo, table *models.Table) *models.Column {
|
||||||
|
column := models.InitColumn(field.name, table.Name, table.Schema)
|
||||||
|
|
||||||
|
// Map TypeScript type to SQL type
|
||||||
|
column.Type = r.typeScriptTypeToSQL(field.typeName)
|
||||||
|
|
||||||
|
// Default to NOT NULL
|
||||||
|
column.NotNull = true
|
||||||
|
|
||||||
|
// Parse decorators
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
r.parseColumnDecorator(decorator, column, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeScriptTypeToSQL converts TypeScript types to SQL types
|
||||||
|
func (r *Reader) typeScriptTypeToSQL(tsType string) string {
|
||||||
|
// Remove array brackets and optional markers
|
||||||
|
tsType = strings.TrimSuffix(tsType, "[]")
|
||||||
|
tsType = strings.TrimSuffix(tsType, " | null")
|
||||||
|
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"string": "text",
|
||||||
|
"number": "integer",
|
||||||
|
"boolean": "boolean",
|
||||||
|
"Date": "timestamp",
|
||||||
|
"any": "jsonb",
|
||||||
|
}
|
||||||
|
|
||||||
|
for tsPattern, sqlType := range typeMap {
|
||||||
|
if strings.Contains(tsType, tsPattern) {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to text
|
||||||
|
return "text"
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnDecorator parses a column decorator
|
||||||
|
func (r *Reader) parseColumnDecorator(decorator string, column *models.Column, table *models.Table) {
|
||||||
|
// @PrimaryGeneratedColumn
|
||||||
|
if strings.HasPrefix(decorator, "@PrimaryGeneratedColumn") {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.NotNull = true
|
||||||
|
|
||||||
|
if strings.Contains(decorator, "'uuid'") {
|
||||||
|
column.Type = "uuid"
|
||||||
|
column.Default = "gen_random_uuid()"
|
||||||
|
} else if strings.Contains(decorator, "'increment'") || strings.Contains(decorator, "()") {
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// @Column
|
||||||
|
if strings.HasPrefix(decorator, "@Column") {
|
||||||
|
r.parseColumnOptions(decorator, column, table)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// @CreateDateColumn
|
||||||
|
if strings.HasPrefix(decorator, "@CreateDateColumn") {
|
||||||
|
column.Type = "timestamp"
|
||||||
|
column.Default = "now()"
|
||||||
|
column.NotNull = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// @UpdateDateColumn
|
||||||
|
if strings.HasPrefix(decorator, "@UpdateDateColumn") {
|
||||||
|
column.Type = "timestamp"
|
||||||
|
column.NotNull = true
|
||||||
|
if column.Comment != "" {
|
||||||
|
column.Comment += "; auto-update"
|
||||||
|
} else {
|
||||||
|
column.Comment = "auto-update"
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnOptions parses @Column decorator options
|
||||||
|
func (r *Reader) parseColumnOptions(decorator string, column *models.Column, table *models.Table) {
|
||||||
|
// Extract content between parentheses
|
||||||
|
start := strings.Index(decorator, "(")
|
||||||
|
end := strings.LastIndex(decorator, ")")
|
||||||
|
|
||||||
|
if start == -1 || end == -1 || start >= end {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
content := decorator[start+1 : end]
|
||||||
|
|
||||||
|
// Check for shorthand type: @Column('text')
|
||||||
|
if strings.HasPrefix(content, "'") || strings.HasPrefix(content, "\"") {
|
||||||
|
typeStr := strings.Trim(content, "'\"`")
|
||||||
|
column.Type = typeStr
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse options object
|
||||||
|
if strings.Contains(content, "type:") {
|
||||||
|
typeRegex := regexp.MustCompile(`type:\s*['"]([^'"]+)['"]`)
|
||||||
|
if matches := typeRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
column.Type = matches[1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(content, "nullable: true") || strings.Contains(content, "nullable:true") {
|
||||||
|
column.NotNull = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(content, "unique: true") || strings.Contains(content, "unique:true") {
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s", column.Name),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = []string{column.Name}
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(content, "default:") {
|
||||||
|
defaultRegex := regexp.MustCompile(`default:\s*['"]?([^,}'"]+)['"]?`)
|
||||||
|
if matches := defaultRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
defaultValue := strings.TrimSpace(matches[1])
|
||||||
|
defaultValue = strings.Trim(defaultValue, "'\"")
|
||||||
|
column.Default = defaultValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveRelationships resolves TypeORM relationships
|
||||||
|
func (r *Reader) resolveRelationships(entities []entityInfo, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Track M2M relations that need join tables
|
||||||
|
type m2mRelation struct {
|
||||||
|
ownerEntity string
|
||||||
|
targetEntity string
|
||||||
|
ownerField string
|
||||||
|
}
|
||||||
|
m2mRelations := make([]m2mRelation, 0)
|
||||||
|
|
||||||
|
for _, entity := range entities {
|
||||||
|
table := tableMap[entity.name]
|
||||||
|
if table == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, field := range entity.fields {
|
||||||
|
// Handle @ManyToOne relations
|
||||||
|
if r.hasDecorator(field, "@ManyToOne") {
|
||||||
|
r.createManyToOneConstraint(field, entity.name, table, tableMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track @ManyToMany relations with @JoinTable
|
||||||
|
if r.hasDecorator(field, "@ManyToMany") && r.hasDecorator(field, "@JoinTable") {
|
||||||
|
targetEntity := r.extractRelationTarget(field)
|
||||||
|
if targetEntity != "" {
|
||||||
|
m2mRelations = append(m2mRelations, m2mRelation{
|
||||||
|
ownerEntity: entity.name,
|
||||||
|
targetEntity: targetEntity,
|
||||||
|
ownerField: field.name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join tables for M2M relations
|
||||||
|
for _, rel := range m2mRelations {
|
||||||
|
r.createManyToManyJoinTable(rel.ownerEntity, rel.targetEntity, tableMap, schema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasDecorator checks if a field has a specific decorator
|
||||||
|
func (r *Reader) hasDecorator(field fieldInfo, decoratorName string) bool {
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
if strings.HasPrefix(decorator, decoratorName) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractRelationTarget extracts the target entity from a relation decorator
|
||||||
|
func (r *Reader) extractRelationTarget(field fieldInfo) string {
|
||||||
|
// Remove array brackets from type
|
||||||
|
targetType := strings.TrimSuffix(field.typeName, "[]")
|
||||||
|
targetType = strings.TrimSpace(targetType)
|
||||||
|
return targetType
|
||||||
|
}
|
||||||
|
|
||||||
|
// createManyToOneConstraint creates a foreign key constraint for @ManyToOne
|
||||||
|
func (r *Reader) createManyToOneConstraint(field fieldInfo, entityName string, table *models.Table, tableMap map[string]*models.Table) {
|
||||||
|
targetEntity := r.extractRelationTarget(field)
|
||||||
|
if targetEntity == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get target table to find its PK
|
||||||
|
targetTable := tableMap[targetEntity]
|
||||||
|
if targetTable == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
targetPK := r.getPrimaryKeyColumn(targetTable)
|
||||||
|
if targetPK == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column
|
||||||
|
fkColumnName := fmt.Sprintf("%sId", field.name)
|
||||||
|
fkColumn := models.InitColumn(fkColumnName, table.Name, table.Schema)
|
||||||
|
fkColumn.Type = targetPK.Type
|
||||||
|
|
||||||
|
// Check if nullable option is set in @ManyToOne decorator
|
||||||
|
isNullable := false
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
if strings.Contains(decorator, "nullable: true") || strings.Contains(decorator, "nullable:true") {
|
||||||
|
isNullable = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fkColumn.NotNull = !isNullable
|
||||||
|
|
||||||
|
table.Columns[fkColumnName] = fkColumn
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", entityName, field.name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
constraint.Schema = table.Schema
|
||||||
|
constraint.Table = table.Name
|
||||||
|
constraint.Columns = []string{fkColumnName}
|
||||||
|
constraint.ReferencedSchema = "public"
|
||||||
|
constraint.ReferencedTable = targetEntity
|
||||||
|
constraint.ReferencedColumns = []string{targetPK.Name}
|
||||||
|
constraint.OnDelete = "CASCADE"
|
||||||
|
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// createManyToManyJoinTable creates a join table for M2M relations
|
||||||
|
func (r *Reader) createManyToManyJoinTable(entity1, entity2 string, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// TypeORM naming convention: entity1_entity2_entity1field
|
||||||
|
// We'll simplify to entity1_entity2
|
||||||
|
joinTableName := fmt.Sprintf("%s_%s", strings.ToLower(entity1), strings.ToLower(entity2))
|
||||||
|
|
||||||
|
// Check if join table already exists
|
||||||
|
if _, exists := tableMap[joinTableName]; exists {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get PKs from both tables
|
||||||
|
table1 := tableMap[entity1]
|
||||||
|
table2 := tableMap[entity2]
|
||||||
|
if table1 == nil || table2 == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
pk1 := r.getPrimaryKeyColumn(table1)
|
||||||
|
pk2 := r.getPrimaryKeyColumn(table2)
|
||||||
|
if pk1 == nil || pk2 == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join table
|
||||||
|
joinTable := models.InitTable(joinTableName, "public")
|
||||||
|
|
||||||
|
// Create FK columns
|
||||||
|
fkCol1Name := fmt.Sprintf("%sId", strings.ToLower(entity1))
|
||||||
|
fkCol1 := models.InitColumn(fkCol1Name, joinTableName, "public")
|
||||||
|
fkCol1.Type = pk1.Type
|
||||||
|
fkCol1.NotNull = true
|
||||||
|
fkCol1.IsPrimaryKey = true
|
||||||
|
joinTable.Columns[fkCol1Name] = fkCol1
|
||||||
|
|
||||||
|
fkCol2Name := fmt.Sprintf("%sId", strings.ToLower(entity2))
|
||||||
|
fkCol2 := models.InitColumn(fkCol2Name, joinTableName, "public")
|
||||||
|
fkCol2.Type = pk2.Type
|
||||||
|
fkCol2.NotNull = true
|
||||||
|
fkCol2.IsPrimaryKey = true
|
||||||
|
joinTable.Columns[fkCol2Name] = fkCol2
|
||||||
|
|
||||||
|
// Create composite PK constraint
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", joinTableName),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = "public"
|
||||||
|
pkConstraint.Table = joinTableName
|
||||||
|
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||||
|
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
// Create FK constraints
|
||||||
|
fk1 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, entity1),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk1.Schema = "public"
|
||||||
|
fk1.Table = joinTableName
|
||||||
|
fk1.Columns = []string{fkCol1Name}
|
||||||
|
fk1.ReferencedSchema = "public"
|
||||||
|
fk1.ReferencedTable = entity1
|
||||||
|
fk1.ReferencedColumns = []string{pk1.Name}
|
||||||
|
fk1.OnDelete = "CASCADE"
|
||||||
|
joinTable.Constraints[fk1.Name] = fk1
|
||||||
|
|
||||||
|
fk2 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, entity2),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk2.Schema = "public"
|
||||||
|
fk2.Table = joinTableName
|
||||||
|
fk2.Columns = []string{fkCol2Name}
|
||||||
|
fk2.ReferencedSchema = "public"
|
||||||
|
fk2.ReferencedTable = entity2
|
||||||
|
fk2.ReferencedColumns = []string{pk2.Name}
|
||||||
|
fk2.OnDelete = "CASCADE"
|
||||||
|
joinTable.Constraints[fk2.Name] = fk2
|
||||||
|
|
||||||
|
// Add join table to schema
|
||||||
|
schema.Tables = append(schema.Tables, joinTable)
|
||||||
|
tableMap[joinTableName] = joinTable
|
||||||
|
}
|
||||||
|
|
||||||
|
// getPrimaryKeyColumn returns the primary key column of a table
|
||||||
|
func (r *Reader) getPrimaryKeyColumn(table *models.Table) *models.Column {
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
return col
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
159
pkg/readers/yaml/README.md
Normal file
159
pkg/readers/yaml/README.md
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
# YAML Reader
|
||||||
|
|
||||||
|
Reads database schema definitions from YAML files.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The YAML Reader parses YAML files that define database schemas in RelSpec's canonical YAML format and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Reads RelSpec's standard YAML schema format
|
||||||
|
- Human-readable alternative to JSON format
|
||||||
|
- Supports complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.yaml",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := yaml.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read YAML schema and convert to GORM models
|
||||||
|
relspec --input yaml --in-file schema.yaml --output gorm --out-file models.go
|
||||||
|
|
||||||
|
# Convert YAML to PostgreSQL DDL
|
||||||
|
relspec --input yaml --in-file database.yaml --output pgsql --out-file schema.sql
|
||||||
|
|
||||||
|
# Transform YAML to JSON
|
||||||
|
relspec --input yaml --in-file schema.yaml --output json --out-file schema.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example YAML Schema
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: myapp
|
||||||
|
database_type: postgresql
|
||||||
|
schemas:
|
||||||
|
- name: public
|
||||||
|
tables:
|
||||||
|
- name: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
auto_increment: true
|
||||||
|
sequence: 1
|
||||||
|
username:
|
||||||
|
name: username
|
||||||
|
type: varchar
|
||||||
|
length: 50
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
email:
|
||||||
|
name: email
|
||||||
|
type: varchar
|
||||||
|
length: 100
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
constraints:
|
||||||
|
pk_users:
|
||||||
|
name: pk_users
|
||||||
|
type: PRIMARY KEY
|
||||||
|
columns:
|
||||||
|
- id
|
||||||
|
uq_users_username:
|
||||||
|
name: uq_users_username
|
||||||
|
type: UNIQUE
|
||||||
|
columns:
|
||||||
|
- username
|
||||||
|
indexes:
|
||||||
|
idx_users_email:
|
||||||
|
name: idx_users_email
|
||||||
|
columns:
|
||||||
|
- email
|
||||||
|
unique: false
|
||||||
|
type: btree
|
||||||
|
- name: posts
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
sequence: 1
|
||||||
|
user_id:
|
||||||
|
name: user_id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
title:
|
||||||
|
name: title
|
||||||
|
type: varchar
|
||||||
|
length: 200
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
constraints:
|
||||||
|
fk_posts_user_id:
|
||||||
|
name: fk_posts_user_id
|
||||||
|
type: FOREIGN KEY
|
||||||
|
columns:
|
||||||
|
- user_id
|
||||||
|
referenced_table: users
|
||||||
|
referenced_schema: public
|
||||||
|
referenced_columns:
|
||||||
|
- id
|
||||||
|
on_delete: CASCADE
|
||||||
|
on_update: NO ACTION
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The YAML format mirrors RelSpec's internal model structure with human-readable syntax:
|
||||||
|
|
||||||
|
- Database level: `name`, `database_type`, `schemas`
|
||||||
|
- Schema level: `name`, `tables`, `views`, `sequences`
|
||||||
|
- Table level: `name`, `schema`, `columns`, `constraints`, `indexes`, `relationships`
|
||||||
|
- Column level: `name`, `type`, `length`, `not_null`, `default`, etc.
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- YAML format is more human-readable than JSON
|
||||||
|
- Ideal for manual editing and version control
|
||||||
|
- Comments are supported in YAML
|
||||||
|
- Preserves complete schema information
|
||||||
|
- Can be used for configuration and documentation
|
||||||
129
pkg/writers/bun/README.md
Normal file
129
pkg/writers/bun/README.md
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
# Bun Writer
|
||||||
|
|
||||||
|
Generates Go source files with Bun model definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Bun Writer converts RelSpec's internal database model representation into Go source code with Bun struct definitions, complete with proper tags, relationships, and table configuration.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates Bun-compatible Go structs
|
||||||
|
- Creates proper `bun` struct tags
|
||||||
|
- Adds relationship fields
|
||||||
|
- Supports both single-file and multi-file output
|
||||||
|
- Maps SQL types to Go types
|
||||||
|
- Handles nullable fields with sql.Null* types
|
||||||
|
- Generates table aliases
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "models.go",
|
||||||
|
PackageName: "models",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := bun.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate Bun models from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output bun \
|
||||||
|
--out-file models.go \
|
||||||
|
--package models
|
||||||
|
|
||||||
|
# Convert GORM models to Bun
|
||||||
|
relspec --input gorm --in-file gorm_models.go --output bun --out-file bun_models.go
|
||||||
|
|
||||||
|
# Multi-file output
|
||||||
|
relspec --input json --in-file schema.json --output bun --out-file models/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement" json:"id"`
|
||||||
|
Username string `bun:"username,notnull,unique" json:"username"`
|
||||||
|
Email string `bun:"email,notnull" json:"email"`
|
||||||
|
Bio sql.NullString `bun:"bio" json:"bio,omitempty"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull,default:now()" json:"created_at"`
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
Posts []*Post `bun:"rel:has-many,join:id=user_id" json:"posts,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post struct {
|
||||||
|
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk" json:"id"`
|
||||||
|
UserID int64 `bun:"user_id,notnull" json:"user_id"`
|
||||||
|
Title string `bun:"title,notnull" json:"title"`
|
||||||
|
Content sql.NullString `bun:"content" json:"content,omitempty"`
|
||||||
|
|
||||||
|
// Belongs to
|
||||||
|
User *User `bun:"rel:belongs-to,join:user_id=id" json:"user,omitempty"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Bun Tags
|
||||||
|
|
||||||
|
- `table` - Table name and alias
|
||||||
|
- `column` - Column name (auto-derived if not specified)
|
||||||
|
- `pk` - Primary key
|
||||||
|
- `autoincrement` - Auto-increment
|
||||||
|
- `notnull` - NOT NULL constraint
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `default` - Default value
|
||||||
|
- `rel` - Relationship definition
|
||||||
|
- `type` - Explicit SQL type
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | Go Type | Nullable Type |
|
||||||
|
|----------|---------|---------------|
|
||||||
|
| bigint | int64 | sql.NullInt64 |
|
||||||
|
| integer | int | sql.NullInt32 |
|
||||||
|
| varchar, text | string | sql.NullString |
|
||||||
|
| boolean | bool | sql.NullBool |
|
||||||
|
| timestamp | time.Time | sql.NullTime |
|
||||||
|
| numeric | float64 | sql.NullFloat64 |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Model names are derived from table names (singularized, PascalCase)
|
||||||
|
- Table aliases are auto-generated from table names
|
||||||
|
- Multi-file mode: one file per table named `sql_{schema}_{table}.go`
|
||||||
|
- Generated code is auto-formatted
|
||||||
|
- JSON tags are automatically added
|
||||||
@@ -41,12 +41,7 @@ func NewWriter(options *writers.WriterOptions) *Writer {
|
|||||||
// WriteDatabase writes a complete database as Bun models
|
// WriteDatabase writes a complete database as Bun models
|
||||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
// Check if multi-file mode is enabled
|
// Check if multi-file mode is enabled
|
||||||
multiFile := false
|
multiFile := w.shouldUseMultiFile()
|
||||||
if w.options.Metadata != nil {
|
|
||||||
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
|
||||||
multiFile = mf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if multiFile {
|
if multiFile {
|
||||||
return w.writeMultiFile(db)
|
return w.writeMultiFile(db)
|
||||||
@@ -346,6 +341,41 @@ func (w *Writer) writeOutput(content string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shouldUseMultiFile determines whether to use multi-file mode based on metadata or output path
|
||||||
|
func (w *Writer) shouldUseMultiFile() bool {
|
||||||
|
// Check if multi_file is explicitly set in metadata
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||||
|
return mf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-detect based on output path
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
// No output path means stdout (single file)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with .go (explicit file)
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, ".go") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with directory separator
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, "/") || strings.HasSuffix(w.options.OutputPath, "\\") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path exists and is a directory
|
||||||
|
info, err := os.Stat(w.options.OutputPath)
|
||||||
|
if err == nil && info.IsDir() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to single file for ambiguous cases
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
||||||
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
||||||
return &models.Database{
|
return &models.Database{
|
||||||
|
|||||||
161
pkg/writers/dbml/README.md
Normal file
161
pkg/writers/dbml/README.md
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
# DBML Writer
|
||||||
|
|
||||||
|
Generates Database Markup Language (DBML) files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DBML Writer converts RelSpec's internal database model representation into DBML syntax, suitable for use with dbdiagram.io and other DBML-compatible tools.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates DBML syntax
|
||||||
|
- Creates table definitions with columns
|
||||||
|
- Defines relationships
|
||||||
|
- Includes indexes
|
||||||
|
- Adds notes and documentation
|
||||||
|
- Supports enums
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.dbml",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := dbml.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate DBML from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output dbml \
|
||||||
|
--out-file schema.dbml
|
||||||
|
|
||||||
|
# Convert GORM models to DBML
|
||||||
|
relspec --input gorm --in-file models.go --output dbml --out-file database.dbml
|
||||||
|
|
||||||
|
# Convert JSON to DBML for visualization
|
||||||
|
relspec --input json --in-file schema.json --output dbml --out-file diagram.dbml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated DBML Example
|
||||||
|
|
||||||
|
```dbml
|
||||||
|
Project MyDatabase {
|
||||||
|
database_type: 'PostgreSQL'
|
||||||
|
}
|
||||||
|
|
||||||
|
Table users {
|
||||||
|
id bigserial [pk, increment]
|
||||||
|
username varchar(50) [not null, unique]
|
||||||
|
email varchar(100) [not null]
|
||||||
|
bio text [null]
|
||||||
|
created_at timestamp [not null, default: `now()`]
|
||||||
|
|
||||||
|
Note: 'Users table'
|
||||||
|
|
||||||
|
indexes {
|
||||||
|
email [name: 'idx_users_email']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Table posts {
|
||||||
|
id bigserial [pk, increment]
|
||||||
|
user_id bigint [not null]
|
||||||
|
title varchar(200) [not null]
|
||||||
|
content text [null]
|
||||||
|
created_at timestamp [default: `now()`]
|
||||||
|
|
||||||
|
indexes {
|
||||||
|
user_id [name: 'idx_posts_user_id']
|
||||||
|
(user_id, created_at) [name: 'idx_posts_user_created']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ref: posts.user_id > users.id [delete: cascade, update: no action]
|
||||||
|
```
|
||||||
|
|
||||||
|
## DBML Features
|
||||||
|
|
||||||
|
### Table Definitions
|
||||||
|
```dbml
|
||||||
|
Table table_name {
|
||||||
|
column_name type [attributes]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Column Attributes
|
||||||
|
- `pk` - Primary key
|
||||||
|
- `increment` - Auto-increment
|
||||||
|
- `not null` - NOT NULL constraint
|
||||||
|
- `null` - Nullable (explicit)
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `default: value` - Default value
|
||||||
|
- `note: 'text'` - Column note
|
||||||
|
|
||||||
|
### Relationships
|
||||||
|
```dbml
|
||||||
|
Ref: table1.column > table2.column
|
||||||
|
Ref: table1.column < table2.column
|
||||||
|
Ref: table1.column - table2.column
|
||||||
|
```
|
||||||
|
|
||||||
|
Relationship types:
|
||||||
|
- `>` - Many-to-one
|
||||||
|
- `<` - One-to-many
|
||||||
|
- `-` - One-to-one
|
||||||
|
|
||||||
|
Relationship actions:
|
||||||
|
```dbml
|
||||||
|
Ref: posts.user_id > users.id [delete: cascade, update: restrict]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Indexes
|
||||||
|
```dbml
|
||||||
|
indexes {
|
||||||
|
column_name
|
||||||
|
(column1, column2) [name: 'idx_name', unique]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | DBML Type |
|
||||||
|
|----------|-----------|
|
||||||
|
| bigint | bigint |
|
||||||
|
| integer | int |
|
||||||
|
| varchar(n) | varchar(n) |
|
||||||
|
| text | text |
|
||||||
|
| boolean | boolean |
|
||||||
|
| timestamp | timestamp |
|
||||||
|
| date | date |
|
||||||
|
| json | json |
|
||||||
|
| uuid | uuid |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DBML is designed for database visualization
|
||||||
|
- Can be imported into dbdiagram.io
|
||||||
|
- Human-readable format
|
||||||
|
- Schema names can be included in table names
|
||||||
|
- Comments and notes are preserved
|
||||||
|
- Ideal for documentation and sharing designs
|
||||||
111
pkg/writers/dctx/README.md
Normal file
111
pkg/writers/dctx/README.md
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# DCTX Writer
|
||||||
|
|
||||||
|
Generates Clarion database dictionary (DCTX) files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DCTX Writer converts RelSpec's internal database model representation into Clarion dictionary XML format, used by the Clarion development platform.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates DCTX XML format
|
||||||
|
- Creates file (table) definitions
|
||||||
|
- Defines fields (columns) with Clarion types
|
||||||
|
- Includes keys (indexes)
|
||||||
|
- Handles relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "database.dctx",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := dctx.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate DCTX from PostgreSQL database (for Clarion migration)
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output dctx \
|
||||||
|
--out-file app.dctx
|
||||||
|
|
||||||
|
# Convert GORM models to DCTX
|
||||||
|
relspec --input gorm --in-file models.go --output dctx --out-file legacy.dctx
|
||||||
|
|
||||||
|
# Convert JSON schema to DCTX
|
||||||
|
relspec --input json --in-file schema.json --output dctx --out-file database.dctx
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
Converts standard SQL types to Clarion types:
|
||||||
|
|
||||||
|
| SQL Type | Clarion Type | Notes |
|
||||||
|
|----------|--------------|-------|
|
||||||
|
| VARCHAR(n) | STRING(n) | Fixed-length string |
|
||||||
|
| TEXT | STRING | Variable length |
|
||||||
|
| INTEGER | LONG | 32-bit integer |
|
||||||
|
| BIGINT | DECIMAL(20,0) | Large integer |
|
||||||
|
| SMALLINT | SHORT | 16-bit integer |
|
||||||
|
| NUMERIC(p,s) | DECIMAL(p,s) | Decimal number |
|
||||||
|
| REAL, FLOAT | REAL | Floating point |
|
||||||
|
| BOOLEAN | BYTE | 0/1 value |
|
||||||
|
| DATE | DATE | Date field |
|
||||||
|
| TIME | TIME | Time field |
|
||||||
|
| TIMESTAMP | LONG | Unix timestamp |
|
||||||
|
|
||||||
|
## DCTX Structure
|
||||||
|
|
||||||
|
DCTX files are XML-based with this structure:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<dictionary>
|
||||||
|
<file name="USERS" driver="TOPSPEED">
|
||||||
|
<record>
|
||||||
|
<field name="ID" type="LONG" />
|
||||||
|
<field name="USERNAME" type="STRING" bytes="50" />
|
||||||
|
<field name="EMAIL" type="STRING" bytes="100" />
|
||||||
|
</record>
|
||||||
|
<key name="KEY_PRIMARY" primary="true">
|
||||||
|
<field name="ID" />
|
||||||
|
</key>
|
||||||
|
</file>
|
||||||
|
</dictionary>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- File definitions (equivalent to tables)
|
||||||
|
- Field definitions with Clarion-specific types
|
||||||
|
- Key definitions (primary and foreign)
|
||||||
|
- Relationships between files
|
||||||
|
- Driver specifications (TOPSPEED, SQL, etc.)
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DCTX is specific to Clarion development
|
||||||
|
- Useful for legacy system integration
|
||||||
|
- Field names are typically uppercase in Clarion
|
||||||
|
- Supports Clarion-specific attributes
|
||||||
|
- Can be imported into Clarion IDE
|
||||||
182
pkg/writers/drawdb/README.md
Normal file
182
pkg/writers/drawdb/README.md
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
# DrawDB Writer
|
||||||
|
|
||||||
|
Generates DrawDB-compatible JSON files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DrawDB Writer converts RelSpec's internal database model representation into JSON format compatible with DrawDB, a free online database design tool.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates DrawDB JSON format
|
||||||
|
- Creates table and field definitions
|
||||||
|
- Defines relationships
|
||||||
|
- Includes visual layout information
|
||||||
|
- Preserves constraints and indexes
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "diagram.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := drawdb.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate DrawDB diagram from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output drawdb \
|
||||||
|
--out-file diagram.json
|
||||||
|
|
||||||
|
# Convert GORM models to DrawDB for visualization
|
||||||
|
relspec --input gorm --in-file models.go --output drawdb --out-file design.json
|
||||||
|
|
||||||
|
# Convert JSON schema to DrawDB
|
||||||
|
relspec --input json --in-file schema.json --output drawdb --out-file diagram.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated JSON Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "1.0",
|
||||||
|
"database": "PostgreSQL",
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "users",
|
||||||
|
"x": 100,
|
||||||
|
"y": 100,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"primary": true,
|
||||||
|
"autoIncrement": true,
|
||||||
|
"notNull": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"name": "username",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 50,
|
||||||
|
"notNull": true,
|
||||||
|
"unique": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "3",
|
||||||
|
"name": "email",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 100,
|
||||||
|
"notNull": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"indexes": [
|
||||||
|
{
|
||||||
|
"name": "idx_users_email",
|
||||||
|
"fields": ["email"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"name": "posts",
|
||||||
|
"x": 400,
|
||||||
|
"y": 100,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"primary": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"name": "user_id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"notNull": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "3",
|
||||||
|
"name": "title",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 200,
|
||||||
|
"notNull": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"relationships": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"source": "2",
|
||||||
|
"target": "1",
|
||||||
|
"sourceField": "user_id",
|
||||||
|
"targetField": "id",
|
||||||
|
"type": "many-to-one",
|
||||||
|
"onDelete": "CASCADE"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## DrawDB Features
|
||||||
|
|
||||||
|
### Table Properties
|
||||||
|
- `id` - Unique table identifier
|
||||||
|
- `name` - Table name
|
||||||
|
- `x`, `y` - Position in diagram
|
||||||
|
- `fields` - Array of field definitions
|
||||||
|
- `indexes` - Array of index definitions
|
||||||
|
|
||||||
|
### Field Properties
|
||||||
|
- `id` - Unique field identifier
|
||||||
|
- `name` - Field name
|
||||||
|
- `type` - Data type (BIGINT, VARCHAR, etc.)
|
||||||
|
- `size` - Length for string types
|
||||||
|
- `primary` - Primary key flag
|
||||||
|
- `notNull` - NOT NULL constraint
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `autoIncrement` - Auto-increment flag
|
||||||
|
- `default` - Default value
|
||||||
|
|
||||||
|
### Relationship Properties
|
||||||
|
- `id` - Unique relationship identifier
|
||||||
|
- `source` - Source table ID
|
||||||
|
- `target` - Target table ID
|
||||||
|
- `sourceField` - Foreign key field
|
||||||
|
- `targetField` - Referenced field
|
||||||
|
- `type` - Relationship type (one-to-one, one-to-many, many-to-one)
|
||||||
|
- `onDelete` - Delete action
|
||||||
|
- `onUpdate` - Update action
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DrawDB is available at drawdb.vercel.app
|
||||||
|
- Generated files can be imported for visual editing
|
||||||
|
- Visual positions (x, y) are auto-generated
|
||||||
|
- Ideal for creating ERD diagrams
|
||||||
|
- Supports modern database features
|
||||||
|
- Free and open-source tool
|
||||||
120
pkg/writers/drizzle/README.md
Normal file
120
pkg/writers/drizzle/README.md
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
# Drizzle Writer
|
||||||
|
|
||||||
|
Generates TypeScript/JavaScript files with Drizzle ORM schema definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Drizzle Writer converts RelSpec's internal database model representation into TypeScript source code with Drizzle ORM schema definitions, including tables, columns, relationships, and constraints.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates Drizzle-compatible TypeScript schema
|
||||||
|
- Supports PostgreSQL and MySQL schemas
|
||||||
|
- Creates table definitions with proper column types
|
||||||
|
- Generates relationship definitions
|
||||||
|
- Handles constraints and indexes
|
||||||
|
- Outputs formatted TypeScript code
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.ts",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"database_type": "postgresql", // or "mysql"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := drizzle.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate Drizzle schema from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output drizzle \
|
||||||
|
--out-file schema.ts
|
||||||
|
|
||||||
|
# Convert GORM models to Drizzle
|
||||||
|
relspec --input gorm --in-file models.go --output drizzle --out-file schema.ts
|
||||||
|
|
||||||
|
# Convert JSON schema to Drizzle
|
||||||
|
relspec --input json --in-file schema.json --output drizzle --out-file db/schema.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { pgTable, serial, varchar, text, timestamp, integer } from 'drizzle-orm/pg-core';
|
||||||
|
import { relations } from 'drizzle-orm';
|
||||||
|
|
||||||
|
export const users = pgTable('users', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
username: varchar('username', { length: 50 }).notNull().unique(),
|
||||||
|
email: varchar('email', { length: 100 }).notNull(),
|
||||||
|
bio: text('bio'),
|
||||||
|
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const posts = pgTable('posts', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }),
|
||||||
|
title: varchar('title', { length: 200 }).notNull(),
|
||||||
|
content: text('content'),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const usersRelations = relations(users, ({ many }) => ({
|
||||||
|
posts: many(posts),
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const postsRelations = relations(posts, ({ one }) => ({
|
||||||
|
user: one(users, {
|
||||||
|
fields: [posts.userId],
|
||||||
|
references: [users.id],
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Column Types
|
||||||
|
|
||||||
|
### PostgreSQL
|
||||||
|
- `serial`, `bigserial` - Auto-increment integers
|
||||||
|
- `integer`, `bigint`, `smallint` - Integer types
|
||||||
|
- `varchar`, `text` - String types
|
||||||
|
- `boolean` - Boolean
|
||||||
|
- `timestamp`, `date`, `time` - Date/time types
|
||||||
|
- `json`, `jsonb` - JSON types
|
||||||
|
- `uuid` - UUID type
|
||||||
|
|
||||||
|
### MySQL
|
||||||
|
- `int`, `bigint`, `smallint` - Integer types
|
||||||
|
- `varchar`, `text` - String types
|
||||||
|
- `boolean` - Boolean
|
||||||
|
- `datetime`, `timestamp` - Date/time types
|
||||||
|
- `json` - JSON type
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Table names and column names are preserved as-is
|
||||||
|
- Relationships are generated as separate relation definitions
|
||||||
|
- Constraint actions (CASCADE, etc.) are included in references
|
||||||
|
- Schema names other than 'public' are supported
|
||||||
|
- Output is formatted TypeScript code
|
||||||
221
pkg/writers/drizzle/template_data.go
Normal file
221
pkg/writers/drizzle/template_data.go
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TemplateData represents the data passed to the template for code generation
|
||||||
|
type TemplateData struct {
|
||||||
|
Imports []string
|
||||||
|
Enums []*EnumData
|
||||||
|
Tables []*TableData
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnumData represents an enum in the schema
|
||||||
|
type EnumData struct {
|
||||||
|
Name string // Enum name (PascalCase)
|
||||||
|
VarName string // Variable name for the enum (camelCase)
|
||||||
|
Values []string // Enum values
|
||||||
|
ValuesStr string // Comma-separated quoted values for pgEnum()
|
||||||
|
TypeUnion string // TypeScript union type (e.g., "'admin' | 'user' | 'guest'")
|
||||||
|
SchemaName string // Schema name
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableData represents a table in the template
|
||||||
|
type TableData struct {
|
||||||
|
Name string // Table variable name (camelCase, e.g., users)
|
||||||
|
TableName string // Actual database table name (e.g., users)
|
||||||
|
TypeName string // TypeScript type name (PascalCase, e.g., Users)
|
||||||
|
Columns []*ColumnData // Column definitions
|
||||||
|
Indexes []*IndexData // Index definitions
|
||||||
|
Comment string // Table comment
|
||||||
|
SchemaName string // Schema name
|
||||||
|
NeedsSQLTag bool // Whether we need to import 'sql' from drizzle-orm
|
||||||
|
IndexColumnFields []string // Column field names used in indexes (for destructuring)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColumnData represents a column in a table
|
||||||
|
type ColumnData struct {
|
||||||
|
Name string // Column name in database
|
||||||
|
FieldName string // Field name in TypeScript (camelCase)
|
||||||
|
DrizzleChain string // Complete Drizzle column chain (e.g., "integer('id').primaryKey()")
|
||||||
|
TypeScriptType string // TypeScript type for interface (e.g., "string", "number | null")
|
||||||
|
IsForeignKey bool // Whether this is a foreign key
|
||||||
|
ReferencesLine string // The .references() line if FK
|
||||||
|
Comment string // Column comment
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexData represents an index definition
|
||||||
|
type IndexData struct {
|
||||||
|
Name string // Index name
|
||||||
|
Columns []string // Column names
|
||||||
|
IsUnique bool // Whether it's a unique index
|
||||||
|
Definition string // Complete index definition line
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTemplateData creates a new TemplateData
|
||||||
|
func NewTemplateData() *TemplateData {
|
||||||
|
return &TemplateData{
|
||||||
|
Imports: make([]string, 0),
|
||||||
|
Enums: make([]*EnumData, 0),
|
||||||
|
Tables: make([]*TableData, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddImport adds an import to the template data (deduplicates automatically)
|
||||||
|
func (td *TemplateData) AddImport(importLine string) {
|
||||||
|
// Check if already exists
|
||||||
|
for _, imp := range td.Imports {
|
||||||
|
if imp == importLine {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
td.Imports = append(td.Imports, importLine)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddEnum adds an enum to the template data
|
||||||
|
func (td *TemplateData) AddEnum(enum *EnumData) {
|
||||||
|
td.Enums = append(td.Enums, enum)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTable adds a table to the template data
|
||||||
|
func (td *TemplateData) AddTable(table *TableData) {
|
||||||
|
td.Tables = append(td.Tables, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FinalizeImports sorts imports
|
||||||
|
func (td *TemplateData) FinalizeImports() {
|
||||||
|
sort.Strings(td.Imports)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEnumData creates EnumData from a models.Enum
|
||||||
|
func NewEnumData(enum *models.Enum, tm *TypeMapper) *EnumData {
|
||||||
|
// Keep enum name as-is (it should already be PascalCase from the source)
|
||||||
|
enumName := enum.Name
|
||||||
|
// Variable name is camelCase version
|
||||||
|
varName := tm.ToCamelCase(enum.Name)
|
||||||
|
|
||||||
|
// Format values as comma-separated quoted strings for pgEnum()
|
||||||
|
quotedValues := make([]string, len(enum.Values))
|
||||||
|
for i, v := range enum.Values {
|
||||||
|
quotedValues[i] = "'" + v + "'"
|
||||||
|
}
|
||||||
|
valuesStr := ""
|
||||||
|
for i, qv := range quotedValues {
|
||||||
|
if i > 0 {
|
||||||
|
valuesStr += ", "
|
||||||
|
}
|
||||||
|
valuesStr += qv
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build TypeScript union type (e.g., "'admin' | 'user' | 'guest'")
|
||||||
|
typeUnion := ""
|
||||||
|
for i, qv := range quotedValues {
|
||||||
|
if i > 0 {
|
||||||
|
typeUnion += " | "
|
||||||
|
}
|
||||||
|
typeUnion += qv
|
||||||
|
}
|
||||||
|
|
||||||
|
return &EnumData{
|
||||||
|
Name: enumName,
|
||||||
|
VarName: varName,
|
||||||
|
Values: enum.Values,
|
||||||
|
ValuesStr: valuesStr,
|
||||||
|
TypeUnion: typeUnion,
|
||||||
|
SchemaName: enum.Schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTableData creates TableData from a models.Table
|
||||||
|
func NewTableData(table *models.Table, tm *TypeMapper) *TableData {
|
||||||
|
tableName := tm.ToCamelCase(table.Name)
|
||||||
|
typeName := tm.ToPascalCase(table.Name)
|
||||||
|
|
||||||
|
return &TableData{
|
||||||
|
Name: tableName,
|
||||||
|
TableName: table.Name,
|
||||||
|
TypeName: typeName,
|
||||||
|
Columns: make([]*ColumnData, 0),
|
||||||
|
Indexes: make([]*IndexData, 0),
|
||||||
|
Comment: formatComment(table.Description, table.Comment),
|
||||||
|
SchemaName: table.Schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddColumn adds a column to the table data
|
||||||
|
func (td *TableData) AddColumn(col *ColumnData) {
|
||||||
|
td.Columns = append(td.Columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddIndex adds an index to the table data
|
||||||
|
func (td *TableData) AddIndex(idx *IndexData) {
|
||||||
|
td.Indexes = append(td.Indexes, idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewColumnData creates ColumnData from a models.Column
|
||||||
|
func NewColumnData(col *models.Column, table *models.Table, tm *TypeMapper, isEnum bool) *ColumnData {
|
||||||
|
fieldName := tm.ToCamelCase(col.Name)
|
||||||
|
drizzleChain := tm.BuildColumnChain(col, table, isEnum)
|
||||||
|
|
||||||
|
return &ColumnData{
|
||||||
|
Name: col.Name,
|
||||||
|
FieldName: fieldName,
|
||||||
|
DrizzleChain: drizzleChain,
|
||||||
|
Comment: formatComment(col.Description, col.Comment),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewIndexData creates IndexData from a models.Index
|
||||||
|
func NewIndexData(index *models.Index, tableVar string, tm *TypeMapper) *IndexData {
|
||||||
|
indexName := tm.ToCamelCase(index.Name) + "Idx"
|
||||||
|
|
||||||
|
// Build column references as field names (will be used with destructuring)
|
||||||
|
colRefs := make([]string, len(index.Columns))
|
||||||
|
for i, colName := range index.Columns {
|
||||||
|
// Use just the field name for destructured parameters
|
||||||
|
colRefs[i] = tm.ToCamelCase(colName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the complete definition
|
||||||
|
// Example: index('email_idx').on(email)
|
||||||
|
// or: uniqueIndex('unique_email_idx').on(email)
|
||||||
|
definition := ""
|
||||||
|
if index.Unique {
|
||||||
|
definition = "uniqueIndex('" + index.Name + "').on(" + joinStrings(colRefs, ", ") + ")"
|
||||||
|
} else {
|
||||||
|
definition = "index('" + index.Name + "').on(" + joinStrings(colRefs, ", ") + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
return &IndexData{
|
||||||
|
Name: indexName,
|
||||||
|
Columns: index.Columns,
|
||||||
|
IsUnique: index.Unique,
|
||||||
|
Definition: definition,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatComment combines description and comment into a single comment string
|
||||||
|
func formatComment(description, comment string) string {
|
||||||
|
if description != "" && comment != "" {
|
||||||
|
return description + " - " + comment
|
||||||
|
}
|
||||||
|
if description != "" {
|
||||||
|
return description
|
||||||
|
}
|
||||||
|
return comment
|
||||||
|
}
|
||||||
|
|
||||||
|
// joinStrings joins a slice of strings with a separator
|
||||||
|
func joinStrings(strs []string, sep string) string {
|
||||||
|
result := ""
|
||||||
|
for i, s := range strs {
|
||||||
|
if i > 0 {
|
||||||
|
result += sep
|
||||||
|
}
|
||||||
|
result += s
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
64
pkg/writers/drizzle/templates.go
Normal file
64
pkg/writers/drizzle/templates.go
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
// schemaTemplate defines the template for generating Drizzle schemas
|
||||||
|
const schemaTemplate = `// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
{{range .Imports}}{{.}}
|
||||||
|
{{end}}
|
||||||
|
{{if .Enums}}
|
||||||
|
// Enums
|
||||||
|
{{range .Enums}}export const {{.VarName}} = pgEnum('{{.Name}}', [{{.ValuesStr}}]);
|
||||||
|
export type {{.Name}} = {{.TypeUnion}};
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
{{range .Tables}}// Table: {{.TableName}}{{if .Comment}} - {{.Comment}}{{end}}
|
||||||
|
export interface {{.TypeName}} {
|
||||||
|
{{- range $i, $col := .Columns}}
|
||||||
|
{{$col.FieldName}}: {{$col.TypeScriptType}};{{if $col.Comment}} // {{$col.Comment}}{{end}}
|
||||||
|
{{- end}}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const {{.Name}} = pgTable('{{.TableName}}', {
|
||||||
|
{{- range $i, $col := .Columns}}
|
||||||
|
{{$col.FieldName}}: {{$col.DrizzleChain}},{{if $col.Comment}} // {{$col.Comment}}{{end}}
|
||||||
|
{{- end}}
|
||||||
|
}{{if .Indexes}}{{if .IndexColumnFields}}, ({ {{range $i, $field := .IndexColumnFields}}{{if $i}}, {{end}}{{$field}}{{end}} }) => [{{else}}, (table) => [{{end}}
|
||||||
|
{{- range $i, $idx := .Indexes}}
|
||||||
|
{{$idx.Definition}},
|
||||||
|
{{- end}}
|
||||||
|
]{{end}});
|
||||||
|
|
||||||
|
export type New{{.TypeName}} = typeof {{.Name}}.$inferInsert;
|
||||||
|
{{end}}`
|
||||||
|
|
||||||
|
// Templates holds the parsed templates
|
||||||
|
type Templates struct {
|
||||||
|
schemaTmpl *template.Template
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTemplates creates and parses the templates
|
||||||
|
func NewTemplates() (*Templates, error) {
|
||||||
|
schemaTmpl, err := template.New("schema").Parse(schemaTemplate)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Templates{
|
||||||
|
schemaTmpl: schemaTmpl,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateCode executes the template with the given data
|
||||||
|
func (t *Templates) GenerateCode(data *TemplateData) (string, error) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := t.schemaTmpl.Execute(&buf, data)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String(), nil
|
||||||
|
}
|
||||||
318
pkg/writers/drizzle/type_mapper.go
Normal file
318
pkg/writers/drizzle/type_mapper.go
Normal file
@@ -0,0 +1,318 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TypeMapper handles SQL to Drizzle type conversions
|
||||||
|
type TypeMapper struct{}
|
||||||
|
|
||||||
|
// NewTypeMapper creates a new TypeMapper instance
|
||||||
|
func NewTypeMapper() *TypeMapper {
|
||||||
|
return &TypeMapper{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLTypeToDrizzle converts SQL types to Drizzle column type functions
|
||||||
|
// Returns the Drizzle column constructor (e.g., "integer", "varchar", "text")
|
||||||
|
func (tm *TypeMapper) SQLTypeToDrizzle(sqlType string) string {
|
||||||
|
sqlTypeLower := strings.ToLower(sqlType)
|
||||||
|
|
||||||
|
// PostgreSQL type mapping to Drizzle
|
||||||
|
typeMap := map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "integer",
|
||||||
|
"int": "integer",
|
||||||
|
"int4": "integer",
|
||||||
|
"smallint": "smallint",
|
||||||
|
"int2": "smallint",
|
||||||
|
"bigint": "bigint",
|
||||||
|
"int8": "bigint",
|
||||||
|
|
||||||
|
// Serial types
|
||||||
|
"serial": "serial",
|
||||||
|
"serial4": "serial",
|
||||||
|
"smallserial": "smallserial",
|
||||||
|
"serial2": "smallserial",
|
||||||
|
"bigserial": "bigserial",
|
||||||
|
"serial8": "bigserial",
|
||||||
|
|
||||||
|
// Numeric types
|
||||||
|
"numeric": "numeric",
|
||||||
|
"decimal": "numeric",
|
||||||
|
"real": "real",
|
||||||
|
"float4": "real",
|
||||||
|
"double precision": "doublePrecision",
|
||||||
|
"float": "doublePrecision",
|
||||||
|
"float8": "doublePrecision",
|
||||||
|
|
||||||
|
// Character types
|
||||||
|
"text": "text",
|
||||||
|
"varchar": "varchar",
|
||||||
|
"character varying": "varchar",
|
||||||
|
"char": "char",
|
||||||
|
"character": "char",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "boolean",
|
||||||
|
"bool": "boolean",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "bytea",
|
||||||
|
|
||||||
|
// JSON types
|
||||||
|
"json": "json",
|
||||||
|
"jsonb": "jsonb",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"time": "time",
|
||||||
|
"timetz": "time",
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"timestamptz": "timestamp",
|
||||||
|
"date": "date",
|
||||||
|
"interval": "interval",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "uuid",
|
||||||
|
|
||||||
|
// Geometric types
|
||||||
|
"point": "point",
|
||||||
|
"line": "line",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for exact match first
|
||||||
|
if drizzleType, ok := typeMap[sqlTypeLower]; ok {
|
||||||
|
return drizzleType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for partial matches (e.g., "varchar(255)" -> "varchar")
|
||||||
|
for sqlPattern, drizzleType := range typeMap {
|
||||||
|
if strings.HasPrefix(sqlTypeLower, sqlPattern) {
|
||||||
|
return drizzleType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to text for unknown types
|
||||||
|
return "text"
|
||||||
|
}
|
||||||
|
|
||||||
|
// BuildColumnChain builds the complete column definition chain for Drizzle
|
||||||
|
// Example: integer('id').primaryKey().notNull()
|
||||||
|
func (tm *TypeMapper) BuildColumnChain(col *models.Column, table *models.Table, isEnum bool) string {
|
||||||
|
var parts []string
|
||||||
|
|
||||||
|
// Determine Drizzle column type
|
||||||
|
var drizzleType string
|
||||||
|
if isEnum {
|
||||||
|
// For enum types, use the type name directly
|
||||||
|
drizzleType = fmt.Sprintf("pgEnum('%s')", col.Type)
|
||||||
|
} else {
|
||||||
|
drizzleType = tm.SQLTypeToDrizzle(col.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start with column type and name
|
||||||
|
// Note: column name is passed as first argument to the column constructor
|
||||||
|
base := fmt.Sprintf("%s('%s')", drizzleType, col.Name)
|
||||||
|
parts = append(parts, base)
|
||||||
|
|
||||||
|
// Add column modifiers in order
|
||||||
|
modifiers := tm.buildColumnModifiers(col, table)
|
||||||
|
if len(modifiers) > 0 {
|
||||||
|
parts = append(parts, modifiers...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(parts, ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildColumnModifiers builds an array of method calls for column modifiers
|
||||||
|
func (tm *TypeMapper) buildColumnModifiers(col *models.Column, table *models.Table) []string {
|
||||||
|
var modifiers []string
|
||||||
|
|
||||||
|
// Primary key
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
modifiers = append(modifiers, "primaryKey()")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not null constraint
|
||||||
|
if col.NotNull && !col.IsPrimaryKey {
|
||||||
|
modifiers = append(modifiers, "notNull()")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique constraint (check if there's a single-column unique constraint)
|
||||||
|
if tm.hasUniqueConstraint(col.Name, table) {
|
||||||
|
modifiers = append(modifiers, "unique()")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default value
|
||||||
|
if col.AutoIncrement {
|
||||||
|
// For auto-increment, use generatedAlwaysAsIdentity()
|
||||||
|
modifiers = append(modifiers, "generatedAlwaysAsIdentity()")
|
||||||
|
} else if col.Default != nil {
|
||||||
|
defaultValue := tm.formatDefaultValue(col.Default)
|
||||||
|
if defaultValue != "" {
|
||||||
|
modifiers = append(modifiers, fmt.Sprintf("default(%s)", defaultValue))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return modifiers
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatDefaultValue formats a default value for Drizzle
|
||||||
|
func (tm *TypeMapper) formatDefaultValue(defaultValue any) string {
|
||||||
|
switch v := defaultValue.(type) {
|
||||||
|
case string:
|
||||||
|
if v == "now()" || v == "CURRENT_TIMESTAMP" {
|
||||||
|
return "sql`now()`"
|
||||||
|
} else if v == "gen_random_uuid()" || strings.Contains(strings.ToLower(v), "uuid") {
|
||||||
|
return "sql`gen_random_uuid()`"
|
||||||
|
} else {
|
||||||
|
// Try to parse as number first
|
||||||
|
// Check if it's a numeric string that should be a number
|
||||||
|
if isNumericString(v) {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
// String literal
|
||||||
|
return fmt.Sprintf("'%s'", strings.ReplaceAll(v, "'", "\\'"))
|
||||||
|
}
|
||||||
|
case bool:
|
||||||
|
if v {
|
||||||
|
return "true"
|
||||||
|
}
|
||||||
|
return "false"
|
||||||
|
case int, int64, int32, int16, int8:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
case float32, float64:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// isNumericString checks if a string represents a number
|
||||||
|
func isNumericString(s string) bool {
|
||||||
|
if s == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// Simple check for numeric strings
|
||||||
|
for i, c := range s {
|
||||||
|
if i == 0 && c == '-' {
|
||||||
|
continue // Allow negative sign at start
|
||||||
|
}
|
||||||
|
if c < '0' || c > '9' {
|
||||||
|
if c != '.' {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasUniqueConstraint checks if a column has a unique constraint
|
||||||
|
func (tm *TypeMapper) hasUniqueConstraint(colName string, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint &&
|
||||||
|
len(constraint.Columns) == 1 &&
|
||||||
|
constraint.Columns[0] == colName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// BuildReferencesChain builds the .references() chain for foreign key columns
|
||||||
|
func (tm *TypeMapper) BuildReferencesChain(fk *models.Constraint, referencedTable string) string {
|
||||||
|
// Example: .references(() => users.id)
|
||||||
|
if len(fk.ReferencedColumns) > 0 {
|
||||||
|
// Use the referenced table variable name (camelCase)
|
||||||
|
refTableVar := tm.ToCamelCase(referencedTable)
|
||||||
|
refColumn := fk.ReferencedColumns[0]
|
||||||
|
return fmt.Sprintf("references(() => %s.%s)", refTableVar, refColumn)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToCamelCase converts snake_case or PascalCase to camelCase
|
||||||
|
func (tm *TypeMapper) ToCamelCase(s string) string {
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's snake_case
|
||||||
|
if strings.Contains(s, "_") {
|
||||||
|
parts := strings.Split(s, "_")
|
||||||
|
if len(parts) == 0 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// First part stays lowercase
|
||||||
|
result := strings.ToLower(parts[0])
|
||||||
|
|
||||||
|
// Capitalize first letter of remaining parts
|
||||||
|
for i := 1; i < len(parts); i++ {
|
||||||
|
if len(parts[i]) > 0 {
|
||||||
|
result += strings.ToUpper(parts[i][:1]) + strings.ToLower(parts[i][1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, assume it's PascalCase - just lowercase the first letter
|
||||||
|
return strings.ToLower(s[:1]) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToPascalCase converts snake_case to PascalCase
|
||||||
|
func (tm *TypeMapper) ToPascalCase(s string) string {
|
||||||
|
parts := strings.Split(s, "_")
|
||||||
|
var result string
|
||||||
|
|
||||||
|
for _, part := range parts {
|
||||||
|
if len(part) > 0 {
|
||||||
|
result += strings.ToUpper(part[:1]) + strings.ToLower(part[1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// DrizzleTypeToTypeScript converts Drizzle column types to TypeScript types
|
||||||
|
func (tm *TypeMapper) DrizzleTypeToTypeScript(drizzleType string, isEnum bool, enumName string) string {
|
||||||
|
if isEnum {
|
||||||
|
return enumName
|
||||||
|
}
|
||||||
|
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"integer": "number",
|
||||||
|
"bigint": "number",
|
||||||
|
"smallint": "number",
|
||||||
|
"serial": "number",
|
||||||
|
"bigserial": "number",
|
||||||
|
"smallserial": "number",
|
||||||
|
"numeric": "number",
|
||||||
|
"real": "number",
|
||||||
|
"doublePrecision": "number",
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"char": "string",
|
||||||
|
"boolean": "boolean",
|
||||||
|
"bytea": "Buffer",
|
||||||
|
"json": "any",
|
||||||
|
"jsonb": "any",
|
||||||
|
"timestamp": "Date",
|
||||||
|
"date": "Date",
|
||||||
|
"time": "Date",
|
||||||
|
"interval": "string",
|
||||||
|
"uuid": "string",
|
||||||
|
"point": "{ x: number; y: number }",
|
||||||
|
"line": "{ a: number; b: number; c: number }",
|
||||||
|
}
|
||||||
|
|
||||||
|
if tsType, ok := typeMap[drizzleType]; ok {
|
||||||
|
return tsType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to any for unknown types
|
||||||
|
return "any"
|
||||||
|
}
|
||||||
543
pkg/writers/drizzle/writer.go
Normal file
543
pkg/writers/drizzle/writer.go
Normal file
@@ -0,0 +1,543 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for Drizzle ORM
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
typeMapper *TypeMapper
|
||||||
|
templates *Templates
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new Drizzle writer with the given options
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
w := &Writer{
|
||||||
|
options: options,
|
||||||
|
typeMapper: NewTypeMapper(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize templates
|
||||||
|
tmpl, err := NewTemplates()
|
||||||
|
if err != nil {
|
||||||
|
// Should not happen with embedded templates
|
||||||
|
panic(fmt.Sprintf("failed to initialize templates: %v", err))
|
||||||
|
}
|
||||||
|
w.templates = tmpl
|
||||||
|
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase writes a complete database as Drizzle schema
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
// Check if multi-file mode is enabled
|
||||||
|
multiFile := w.shouldUseMultiFile()
|
||||||
|
|
||||||
|
if multiFile {
|
||||||
|
return w.writeMultiFile(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.writeSingleFile(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema writes a schema as Drizzle schema
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
// Create a temporary database with just this schema
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable writes a single table as a Drizzle schema
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
// Create a temporary schema and database
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeSingleFile writes all tables to a single file
|
||||||
|
func (w *Writer) writeSingleFile(db *models.Database) error {
|
||||||
|
templateData := NewTemplateData()
|
||||||
|
|
||||||
|
// Build enum map for quick lookup
|
||||||
|
enumMap := w.buildEnumMap(db)
|
||||||
|
|
||||||
|
// Process all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Add enums
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
enumData := NewEnumData(enum, w.typeMapper)
|
||||||
|
templateData.AddEnum(enumData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableData := w.buildTableData(table, schema, db, enumMap)
|
||||||
|
templateData.AddTable(tableData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add imports
|
||||||
|
w.addImports(templateData, db)
|
||||||
|
|
||||||
|
// Finalize imports
|
||||||
|
templateData.FinalizeImports()
|
||||||
|
|
||||||
|
// Generate code
|
||||||
|
code, err := w.templates.GenerateCode(templateData)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate code: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write output
|
||||||
|
return w.writeOutput(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeMultiFile writes each table to a separate file
|
||||||
|
func (w *Writer) writeMultiFile(db *models.Database) error {
|
||||||
|
// Ensure output path is a directory
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
return fmt.Errorf("output path is required for multi-file mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create output directory if it doesn't exist
|
||||||
|
if err := os.MkdirAll(w.options.OutputPath, 0755); err != nil {
|
||||||
|
return fmt.Errorf("failed to create output directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build enum map for quick lookup
|
||||||
|
enumMap := w.buildEnumMap(db)
|
||||||
|
|
||||||
|
// Process all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Write enums file if there are any
|
||||||
|
if len(schema.Enums) > 0 {
|
||||||
|
if err := w.writeEnumsFile(schema); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write each table to a separate file
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeTableFile(table, schema, db, enumMap); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeEnumsFile writes all enums to a separate file
|
||||||
|
func (w *Writer) writeEnumsFile(schema *models.Schema) error {
|
||||||
|
templateData := NewTemplateData()
|
||||||
|
|
||||||
|
// Add enums
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
enumData := NewEnumData(enum, w.typeMapper)
|
||||||
|
templateData.AddEnum(enumData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add imports for enums
|
||||||
|
templateData.AddImport("import { pgEnum } from 'drizzle-orm/pg-core';")
|
||||||
|
|
||||||
|
// Generate code
|
||||||
|
code, err := w.templates.GenerateCode(templateData)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate enums code: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to enums.ts file
|
||||||
|
filename := filepath.Join(w.options.OutputPath, "enums.ts")
|
||||||
|
return os.WriteFile(filename, []byte(code), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeTableFile writes a single table to its own file
|
||||||
|
func (w *Writer) writeTableFile(table *models.Table, schema *models.Schema, db *models.Database, enumMap map[string]bool) error {
|
||||||
|
templateData := NewTemplateData()
|
||||||
|
|
||||||
|
// Build table data
|
||||||
|
tableData := w.buildTableData(table, schema, db, enumMap)
|
||||||
|
templateData.AddTable(tableData)
|
||||||
|
|
||||||
|
// Add imports
|
||||||
|
w.addImports(templateData, db)
|
||||||
|
|
||||||
|
// If there are enums, add import from enums file
|
||||||
|
if len(schema.Enums) > 0 && w.tableUsesEnum(table, enumMap) {
|
||||||
|
// Import enum definitions from enums.ts
|
||||||
|
enumNames := w.getTableEnumNames(table, schema, enumMap)
|
||||||
|
if len(enumNames) > 0 {
|
||||||
|
importLine := fmt.Sprintf("import { %s } from './enums';", strings.Join(enumNames, ", "))
|
||||||
|
templateData.AddImport(importLine)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finalize imports
|
||||||
|
templateData.FinalizeImports()
|
||||||
|
|
||||||
|
// Generate code
|
||||||
|
code, err := w.templates.GenerateCode(templateData)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate code for table %s: %w", table.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate filename: {tableName}.ts
|
||||||
|
filename := filepath.Join(w.options.OutputPath, table.Name+".ts")
|
||||||
|
return os.WriteFile(filename, []byte(code), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTableData builds TableData from a models.Table
|
||||||
|
func (w *Writer) buildTableData(table *models.Table, schema *models.Schema, db *models.Database, enumMap map[string]bool) *TableData {
|
||||||
|
tableData := NewTableData(table, w.typeMapper)
|
||||||
|
|
||||||
|
// Add columns
|
||||||
|
for _, colName := range w.getSortedColumnNames(table) {
|
||||||
|
col := table.Columns[colName]
|
||||||
|
|
||||||
|
// Check if this column uses an enum
|
||||||
|
isEnum := enumMap[col.Type]
|
||||||
|
|
||||||
|
columnData := NewColumnData(col, table, w.typeMapper, isEnum)
|
||||||
|
|
||||||
|
// Set TypeScript type
|
||||||
|
drizzleType := w.typeMapper.SQLTypeToDrizzle(col.Type)
|
||||||
|
enumName := ""
|
||||||
|
if isEnum {
|
||||||
|
// For enums, use the enum type name
|
||||||
|
enumName = col.Type
|
||||||
|
}
|
||||||
|
baseType := w.typeMapper.DrizzleTypeToTypeScript(drizzleType, isEnum, enumName)
|
||||||
|
|
||||||
|
// Add null union if column is nullable
|
||||||
|
if !col.NotNull && !col.IsPrimaryKey {
|
||||||
|
columnData.TypeScriptType = baseType + " | null"
|
||||||
|
} else {
|
||||||
|
columnData.TypeScriptType = baseType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this column is a foreign key
|
||||||
|
if fk := w.getForeignKeyForColumn(col.Name, table); fk != nil {
|
||||||
|
columnData.IsForeignKey = true
|
||||||
|
refTableName := fk.ReferencedTable
|
||||||
|
refChain := w.typeMapper.BuildReferencesChain(fk, refTableName)
|
||||||
|
if refChain != "" {
|
||||||
|
columnData.ReferencesLine = "." + refChain
|
||||||
|
// Append to the drizzle chain
|
||||||
|
columnData.DrizzleChain += columnData.ReferencesLine
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tableData.AddColumn(columnData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect all column field names that are used in indexes
|
||||||
|
indexColumnFields := make(map[string]bool)
|
||||||
|
|
||||||
|
// Add indexes (excluding single-column unique indexes, which are handled inline)
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
// Skip single-column unique indexes (handled by .unique() modifier)
|
||||||
|
if index.Unique && len(index.Columns) == 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track which columns are used in indexes
|
||||||
|
for _, colName := range index.Columns {
|
||||||
|
// Find the field name for this column
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
fieldName := w.typeMapper.ToCamelCase(col.Name)
|
||||||
|
indexColumnFields[fieldName] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
indexData := NewIndexData(index, tableData.Name, w.typeMapper)
|
||||||
|
tableData.AddIndex(indexData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add multi-column unique constraints as unique indexes
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint && len(constraint.Columns) > 1 {
|
||||||
|
// Create a unique index for this constraint
|
||||||
|
indexData := &IndexData{
|
||||||
|
Name: w.typeMapper.ToCamelCase(constraint.Name) + "Idx",
|
||||||
|
Columns: constraint.Columns,
|
||||||
|
IsUnique: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track which columns are used in indexes
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
fieldName := w.typeMapper.ToCamelCase(col.Name)
|
||||||
|
indexColumnFields[fieldName] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build column references as field names (for destructuring)
|
||||||
|
colRefs := make([]string, len(constraint.Columns))
|
||||||
|
for i, colName := range constraint.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
colRefs[i] = w.typeMapper.ToCamelCase(col.Name)
|
||||||
|
} else {
|
||||||
|
colRefs[i] = w.typeMapper.ToCamelCase(colName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
indexData.Definition = "uniqueIndex('" + constraint.Name + "').on(" + joinStrings(colRefs, ", ") + ")"
|
||||||
|
tableData.AddIndex(indexData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert index column fields map to sorted slice
|
||||||
|
if len(indexColumnFields) > 0 {
|
||||||
|
fields := make([]string, 0, len(indexColumnFields))
|
||||||
|
for field := range indexColumnFields {
|
||||||
|
fields = append(fields, field)
|
||||||
|
}
|
||||||
|
// Sort for consistent output
|
||||||
|
sortStrings(fields)
|
||||||
|
tableData.IndexColumnFields = fields
|
||||||
|
}
|
||||||
|
|
||||||
|
return tableData
|
||||||
|
}
|
||||||
|
|
||||||
|
// sortStrings sorts a slice of strings in place
|
||||||
|
func sortStrings(strs []string) {
|
||||||
|
for i := 0; i < len(strs); i++ {
|
||||||
|
for j := i + 1; j < len(strs); j++ {
|
||||||
|
if strs[i] > strs[j] {
|
||||||
|
strs[i], strs[j] = strs[j], strs[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// addImports adds the necessary imports to the template data
|
||||||
|
func (w *Writer) addImports(templateData *TemplateData, db *models.Database) {
|
||||||
|
// Determine which Drizzle imports we need
|
||||||
|
needsPgTable := len(templateData.Tables) > 0
|
||||||
|
needsPgEnum := len(templateData.Enums) > 0
|
||||||
|
needsIndex := false
|
||||||
|
needsUniqueIndex := false
|
||||||
|
needsSQL := false
|
||||||
|
|
||||||
|
// Check what we need based on tables
|
||||||
|
for _, table := range templateData.Tables {
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
if index.IsUnique {
|
||||||
|
needsUniqueIndex = true
|
||||||
|
} else {
|
||||||
|
needsIndex = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if any column uses SQL default values
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if strings.Contains(col.DrizzleChain, "sql`") {
|
||||||
|
needsSQL = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the import statement
|
||||||
|
imports := make([]string, 0)
|
||||||
|
|
||||||
|
if needsPgTable {
|
||||||
|
imports = append(imports, "pgTable")
|
||||||
|
}
|
||||||
|
if needsPgEnum {
|
||||||
|
imports = append(imports, "pgEnum")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add column types - for now, add common ones
|
||||||
|
// TODO: Could be optimized to only include used types
|
||||||
|
columnTypes := []string{
|
||||||
|
"integer", "bigint", "smallint",
|
||||||
|
"serial", "bigserial", "smallserial",
|
||||||
|
"text", "varchar", "char",
|
||||||
|
"boolean", "numeric", "real", "doublePrecision",
|
||||||
|
"timestamp", "date", "time", "interval",
|
||||||
|
"json", "jsonb", "uuid", "bytea",
|
||||||
|
}
|
||||||
|
imports = append(imports, columnTypes...)
|
||||||
|
|
||||||
|
if needsIndex {
|
||||||
|
imports = append(imports, "index")
|
||||||
|
}
|
||||||
|
if needsUniqueIndex {
|
||||||
|
imports = append(imports, "uniqueIndex")
|
||||||
|
}
|
||||||
|
|
||||||
|
importLine := "import { " + strings.Join(imports, ", ") + " } from 'drizzle-orm/pg-core';"
|
||||||
|
templateData.AddImport(importLine)
|
||||||
|
|
||||||
|
// Add SQL import if needed
|
||||||
|
if needsSQL {
|
||||||
|
templateData.AddImport("import { sql } from 'drizzle-orm';")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildEnumMap builds a map of enum type names for quick lookup
|
||||||
|
func (w *Writer) buildEnumMap(db *models.Database) map[string]bool {
|
||||||
|
enumMap := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
enumMap[enum.Name] = true
|
||||||
|
// Also add lowercase version for case-insensitive lookup
|
||||||
|
enumMap[strings.ToLower(enum.Name)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return enumMap
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableUsesEnum checks if a table uses any enum types
|
||||||
|
func (w *Writer) tableUsesEnum(table *models.Table, enumMap map[string]bool) bool {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if enumMap[col.Type] || enumMap[strings.ToLower(col.Type)] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// getTableEnumNames returns the list of enum variable names used by a table
|
||||||
|
func (w *Writer) getTableEnumNames(table *models.Table, schema *models.Schema, enumMap map[string]bool) []string {
|
||||||
|
enumNames := make([]string, 0)
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if enumMap[col.Type] || enumMap[strings.ToLower(col.Type)] {
|
||||||
|
// Find the enum in schema
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
if strings.EqualFold(enum.Name, col.Type) {
|
||||||
|
varName := w.typeMapper.ToCamelCase(enum.Name)
|
||||||
|
if !seen[varName] {
|
||||||
|
enumNames = append(enumNames, varName)
|
||||||
|
seen[varName] = true
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return enumNames
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSortedColumnNames returns column names sorted by sequence or name
|
||||||
|
func (w *Writer) getSortedColumnNames(table *models.Table) []string {
|
||||||
|
// Convert map to slice
|
||||||
|
columns := make([]*models.Column, 0, len(table.Columns))
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by sequence, then by primary key, then by name
|
||||||
|
// (Similar to GORM writer)
|
||||||
|
sortColumns := func(i, j int) bool {
|
||||||
|
// Sort by sequence if both have it
|
||||||
|
if columns[i].Sequence > 0 && columns[j].Sequence > 0 {
|
||||||
|
return columns[i].Sequence < columns[j].Sequence
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put primary keys first
|
||||||
|
if columns[i].IsPrimaryKey != columns[j].IsPrimaryKey {
|
||||||
|
return columns[i].IsPrimaryKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise sort alphabetically
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a custom sorter
|
||||||
|
for i := 0; i < len(columns); i++ {
|
||||||
|
for j := i + 1; j < len(columns); j++ {
|
||||||
|
if !sortColumns(i, j) {
|
||||||
|
columns[i], columns[j] = columns[j], columns[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract names
|
||||||
|
names := make([]string, len(columns))
|
||||||
|
for i, col := range columns {
|
||||||
|
names[i] = col.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
|
// getForeignKeyForColumn returns the foreign key constraint for a column, if any
|
||||||
|
func (w *Writer) getForeignKeyForColumn(columnName string, table *models.Table) *models.Constraint {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, col := range constraint.Columns {
|
||||||
|
if col == columnName {
|
||||||
|
return constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeOutput writes the content to file or stdout
|
||||||
|
func (w *Writer) writeOutput(content string) error {
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print to stdout
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// shouldUseMultiFile determines whether to use multi-file mode based on metadata or output path
|
||||||
|
func (w *Writer) shouldUseMultiFile() bool {
|
||||||
|
// Check if multi_file is explicitly set in metadata
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||||
|
return mf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-detect based on output path
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
// No output path means stdout (single file)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with .ts (explicit file)
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, ".ts") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with directory separator
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, "/") || strings.HasSuffix(w.options.OutputPath, "\\") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path exists and is a directory
|
||||||
|
info, err := os.Stat(w.options.OutputPath)
|
||||||
|
if err == nil && info.IsDir() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to single file for ambiguous cases
|
||||||
|
return false
|
||||||
|
}
|
||||||
176
pkg/writers/gorm/README.md
Normal file
176
pkg/writers/gorm/README.md
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
# GORM Writer
|
||||||
|
|
||||||
|
Generates Go source files with GORM model definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The GORM Writer converts RelSpec's internal database model representation into Go source code with GORM struct definitions, complete with proper tags, relationships, and methods.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates GORM-compatible Go structs
|
||||||
|
- Creates proper `gorm` struct tags
|
||||||
|
- Generates `TableName()` methods
|
||||||
|
- Adds relationship fields (belongs-to, has-many)
|
||||||
|
- Supports both single-file and multi-file output
|
||||||
|
- Auto-generates helper methods (optional)
|
||||||
|
- Maps SQL types to Go types
|
||||||
|
- Handles nullable fields with custom sql_types
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Assume db is a *models.Database from a reader
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "models.go",
|
||||||
|
PackageName: "models",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := gorm.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate GORM models from PostgreSQL database (single file)
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output gorm \
|
||||||
|
--out-file models.go \
|
||||||
|
--package models
|
||||||
|
|
||||||
|
# Generate GORM models with multi-file output (one file per table)
|
||||||
|
relspec --input json \
|
||||||
|
--in-file schema.json \
|
||||||
|
--output gorm \
|
||||||
|
--out-file models/ \
|
||||||
|
--package models
|
||||||
|
|
||||||
|
# Convert DBML to GORM models
|
||||||
|
relspec --input dbml --in-file schema.dbml --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Modes
|
||||||
|
|
||||||
|
### Single File Mode
|
||||||
|
|
||||||
|
Generates all models in one file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec --input pgsql --conn "..." --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multi-File Mode
|
||||||
|
|
||||||
|
Generates one file per table (auto-detected when output is a directory):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec --input pgsql --conn "..." --output gorm --out-file models/
|
||||||
|
```
|
||||||
|
|
||||||
|
Files are named: `sql_{schema}_{table}.go`
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
sql_types "git.warky.dev/wdevs/sql_types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelUser struct {
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey;autoIncrement" json:"id"`
|
||||||
|
Username string `gorm:"column:username;type:varchar(50);not null;uniqueIndex" json:"username"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(100);not null" json:"email"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;not null;default:now()" json:"created_at"`
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
Pos []*ModelPost `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE" json:"pos,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelUser) TableName() string {
|
||||||
|
return "public.users"
|
||||||
|
}
|
||||||
|
|
||||||
|
type ModelPost struct {
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey" json:"id"`
|
||||||
|
UserID int64 `gorm:"column:user_id;type:bigint;not null" json:"user_id"`
|
||||||
|
Title string `gorm:"column:title;type:varchar(200);not null" json:"title"`
|
||||||
|
Content sql_types.SqlString `gorm:"column:content;type:text" json:"content,omitempty"`
|
||||||
|
|
||||||
|
// Belongs to
|
||||||
|
Use *ModelUser `gorm:"foreignKey:UserID;references:ID" json:"use,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelPost) TableName() string {
|
||||||
|
return "public.posts"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Writer Options
|
||||||
|
|
||||||
|
### Metadata Options
|
||||||
|
|
||||||
|
Configure the writer behavior using metadata in `WriterOptions`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "models.go",
|
||||||
|
PackageName: "models",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"multi_file": true, // Enable multi-file mode
|
||||||
|
"populate_refs": true, // Populate RefDatabase/RefSchema
|
||||||
|
"generate_get_id_str": true, // Generate GetIDStr() methods
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | Go Type | Notes |
|
||||||
|
|----------|---------|-------|
|
||||||
|
| bigint, int8 | int64 | - |
|
||||||
|
| integer, int, int4 | int | - |
|
||||||
|
| smallint, int2 | int16 | - |
|
||||||
|
| varchar, text | string | Not nullable |
|
||||||
|
| varchar, text (nullable) | sql_types.SqlString | Nullable |
|
||||||
|
| boolean, bool | bool | - |
|
||||||
|
| timestamp, timestamptz | time.Time | - |
|
||||||
|
| numeric, decimal | float64 | - |
|
||||||
|
| uuid | string | - |
|
||||||
|
| json, jsonb | string | - |
|
||||||
|
|
||||||
|
## Relationship Generation
|
||||||
|
|
||||||
|
The writer automatically generates relationship fields:
|
||||||
|
|
||||||
|
- **Belongs-to**: Generated for tables with foreign keys
|
||||||
|
- **Has-many**: Generated for tables referenced by foreign keys
|
||||||
|
- Relationship field names use 3-letter prefixes
|
||||||
|
- Includes proper `gorm` tags with `foreignKey` and `references`
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Model names are prefixed with "Model" (e.g., `ModelUser`)
|
||||||
|
- Nullable columns use `sql_types.SqlString`, `sql_types.SqlInt64`, etc.
|
||||||
|
- Generated code is auto-formatted with `go fmt`
|
||||||
|
- JSON tags are automatically added
|
||||||
|
- Supports schema-qualified table names in `TableName()` method
|
||||||
@@ -41,12 +41,7 @@ func NewWriter(options *writers.WriterOptions) *Writer {
|
|||||||
// WriteDatabase writes a complete database as GORM models
|
// WriteDatabase writes a complete database as GORM models
|
||||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
// Check if multi-file mode is enabled
|
// Check if multi-file mode is enabled
|
||||||
multiFile := false
|
multiFile := w.shouldUseMultiFile()
|
||||||
if w.options.Metadata != nil {
|
|
||||||
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
|
||||||
multiFile = mf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if multiFile {
|
if multiFile {
|
||||||
return w.writeMultiFile(db)
|
return w.writeMultiFile(db)
|
||||||
@@ -340,6 +335,41 @@ func (w *Writer) writeOutput(content string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shouldUseMultiFile determines whether to use multi-file mode based on metadata or output path
|
||||||
|
func (w *Writer) shouldUseMultiFile() bool {
|
||||||
|
// Check if multi_file is explicitly set in metadata
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||||
|
return mf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-detect based on output path
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
// No output path means stdout (single file)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with .go (explicit file)
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, ".go") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with directory separator
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, "/") || strings.HasSuffix(w.options.OutputPath, "\\") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path exists and is a directory
|
||||||
|
info, err := os.Stat(w.options.OutputPath)
|
||||||
|
if err == nil && info.IsDir() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to single file for ambiguous cases
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
||||||
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
||||||
return &models.Database{
|
return &models.Database{
|
||||||
|
|||||||
272
pkg/writers/graphql/README.md
Normal file
272
pkg/writers/graphql/README.md
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
# GraphQL Schema Writer
|
||||||
|
|
||||||
|
The GraphQL writer converts RelSpec's internal database model into GraphQL Schema Definition Language (SDL) files.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Table to Type mapping**: Database tables become GraphQL types
|
||||||
|
- **Column to Field mapping**: Table columns become type fields
|
||||||
|
- **Enum support**: Database enums are preserved
|
||||||
|
- **Custom scalar declarations**: Automatically declares DateTime, JSON, Date scalars
|
||||||
|
- **Implicit relationships**: Generates relationship fields from foreign keys
|
||||||
|
- **Many-to-many support**: Handles junction tables intelligently
|
||||||
|
- **Clean output**: Proper formatting, field ordering, and comments
|
||||||
|
|
||||||
|
## Type Mappings
|
||||||
|
|
||||||
|
### SQL to GraphQL
|
||||||
|
|
||||||
|
| SQL Type | GraphQL Type | Notes |
|
||||||
|
|----------|--------------|-------|
|
||||||
|
| bigint, integer, serial (PK) | ID | Primary keys map to ID |
|
||||||
|
| bigint, integer, int | Int | |
|
||||||
|
| text, varchar, char | String | |
|
||||||
|
| uuid (PK) | ID | UUID primary keys also map to ID |
|
||||||
|
| uuid | String | Non-PK UUIDs map to String |
|
||||||
|
| double precision, numeric, float | Float | |
|
||||||
|
| boolean | Boolean | |
|
||||||
|
| timestamp, timestamptz | DateTime | Custom scalar |
|
||||||
|
| jsonb, json | JSON | Custom scalar |
|
||||||
|
| date | Date | Custom scalar |
|
||||||
|
| Enum types | Enum | Preserves enum name |
|
||||||
|
| Arrays (e.g., text[]) | [Type] | Mapped to GraphQL lists |
|
||||||
|
|
||||||
|
## Relationship Handling
|
||||||
|
|
||||||
|
The writer intelligently generates relationship fields based on foreign key constraints:
|
||||||
|
|
||||||
|
### Forward Relationships (FK on this table)
|
||||||
|
```sql
|
||||||
|
-- Post table has authorId FK to User.id
|
||||||
|
CREATE TABLE post (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
title text NOT NULL,
|
||||||
|
author_id bigint NOT NULL REFERENCES user(id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
author: User! # Generated from authorId FK
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reverse Relationships (FK on other table)
|
||||||
|
```graphql
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
posts: [Post!]! # Reverse relationship (Post has FK to User)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Many-to-Many Relationships
|
||||||
|
|
||||||
|
Junction tables (tables with only PKs and FKs) are automatically detected and hidden:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE post_tag (
|
||||||
|
post_id bigint NOT NULL REFERENCES post(id),
|
||||||
|
tag_id bigint NOT NULL REFERENCES tag(id),
|
||||||
|
PRIMARY KEY (post_id, tag_id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
tags: [Tag!]! # Many-to-many through PostTag junction table
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Reverse many-to-many
|
||||||
|
}
|
||||||
|
|
||||||
|
# Note: PostTag junction table is NOT included in output
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
|
)
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Metadata Options
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.graphql",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"includeScalarDeclarations": true, // Include scalar declarations
|
||||||
|
"includeComments": true, // Include field/table comments
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Write to Stdout
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "", // Empty path writes to stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Convert PostgreSQL database to GraphQL
|
||||||
|
relspec convert --from pgsql \
|
||||||
|
--from-conn "postgres://user:pass@localhost:5432/mydb" \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
|
||||||
|
# Convert GORM models to GraphQL
|
||||||
|
relspec convert --from gorm --from-path ./models \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
|
||||||
|
# Convert JSON to GraphQL
|
||||||
|
relspec convert --from json --from-path schema.json \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
The generated GraphQL schema follows this structure:
|
||||||
|
|
||||||
|
1. **Header comment** (if enabled)
|
||||||
|
2. **Custom scalar declarations** (if any custom scalars are used)
|
||||||
|
3. **Enum definitions** (alphabetically sorted)
|
||||||
|
4. **Type definitions** (with fields ordered: ID first, then scalars alphabetically, then relationships)
|
||||||
|
|
||||||
|
### Example Output
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
# Generated GraphQL Schema
|
||||||
|
# Database: myapp
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
MODERATOR
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
createdAt: DateTime!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
role: Role!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
profile: Profile
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
publishedAt: Date
|
||||||
|
title: String!
|
||||||
|
|
||||||
|
author: User!
|
||||||
|
tags: [Tag!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
name: String!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Metadata Options
|
||||||
|
|
||||||
|
| Option | Type | Description | Default |
|
||||||
|
|--------|------|-------------|---------|
|
||||||
|
| `includeScalarDeclarations` | bool | Include `scalar DateTime`, etc. declarations | true |
|
||||||
|
| `includeComments` | bool | Include table/field descriptions as comments | true |
|
||||||
|
| `preservePKType` | bool | Use Int/String for PKs instead of ID | false |
|
||||||
|
|
||||||
|
## Field Naming Conventions
|
||||||
|
|
||||||
|
- **FK columns**: Foreign key columns like `authorId` are removed from the output; instead, a relationship field `author` is generated
|
||||||
|
- **Relationship pluralization**: Reverse one-to-many relationships are pluralized (e.g., `posts`, `tags`)
|
||||||
|
- **CamelCase**: Field names are kept in their original casing from the database
|
||||||
|
|
||||||
|
## Junction Table Detection
|
||||||
|
|
||||||
|
A table is considered a junction table if it:
|
||||||
|
1. Has exactly 2 foreign key constraints
|
||||||
|
2. All columns are either primary keys or foreign keys
|
||||||
|
3. Has a composite primary key on the FK columns
|
||||||
|
|
||||||
|
Junction tables are automatically hidden from the GraphQL output, and many-to-many relationship fields are generated on the related types instead.
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- All tables in all schemas are flattened into a single GraphQL schema
|
||||||
|
- No support for GraphQL-specific features like directives, interfaces, or unions
|
||||||
|
- Nullable vs non-nullable is determined solely by the `NOT NULL` constraint
|
||||||
|
|
||||||
|
## Example Conversion
|
||||||
|
|
||||||
|
**Input** (Database Schema):
|
||||||
|
```sql
|
||||||
|
CREATE TABLE user (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
email text NOT NULL,
|
||||||
|
created_at timestamp NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE post (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
title text NOT NULL,
|
||||||
|
author_id bigint NOT NULL REFERENCES user(id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output** (GraphQL Schema):
|
||||||
|
```graphql
|
||||||
|
scalar DateTime
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
createdAt: DateTime!
|
||||||
|
email: String!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
|
```
|
||||||
178
pkg/writers/graphql/relationships.go
Normal file
178
pkg/writers/graphql/relationships.go
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (w *Writer) generateRelationFields(table *models.Table, db *models.Database, schema *models.Schema) []string {
|
||||||
|
var fields []string
|
||||||
|
|
||||||
|
// 1. Forward relationships (this table has FK)
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type != models.ForeignKeyConstraint {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the related table
|
||||||
|
relatedTable := w.findTable(db, constraint.ReferencedSchema, constraint.ReferencedTable)
|
||||||
|
if relatedTable == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate field name (remove "Id" suffix from FK column if present)
|
||||||
|
fieldName := w.relationFieldName(constraint.Columns[0])
|
||||||
|
|
||||||
|
// Determine nullability from FK column
|
||||||
|
nullable := true
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
if col.NotNull {
|
||||||
|
nullable = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format: fieldName: RelatedType! or fieldName: RelatedType
|
||||||
|
gqlType := relatedTable.Name
|
||||||
|
if !nullable {
|
||||||
|
gqlType += "!"
|
||||||
|
}
|
||||||
|
|
||||||
|
fields = append(fields, fmt.Sprintf(" %s: %s", fieldName, gqlType))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Reverse relationships (other tables reference this table)
|
||||||
|
for _, otherSchema := range db.Schemas {
|
||||||
|
for _, otherTable := range otherSchema.Tables {
|
||||||
|
if otherTable.Name == table.Name && otherSchema.Name == schema.Name {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip join tables for many-to-many
|
||||||
|
if w.isJoinTable(otherTable) {
|
||||||
|
// Check if this is a many-to-many through this join table
|
||||||
|
if m2mField := w.getManyToManyField(table, otherTable, db); m2mField != "" {
|
||||||
|
fields = append(fields, m2mField)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, constraint := range otherTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint &&
|
||||||
|
constraint.ReferencedTable == table.Name &&
|
||||||
|
constraint.ReferencedSchema == schema.Name {
|
||||||
|
// Add reverse relationship field (array)
|
||||||
|
fieldName := w.pluralize(w.camelCase(otherTable.Name))
|
||||||
|
fields = append(fields, fmt.Sprintf(" %s: [%s!]!", fieldName, otherTable.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) getManyToManyField(table *models.Table, joinTable *models.Table, db *models.Database) string {
|
||||||
|
// Find the two FK constraints in the join table
|
||||||
|
var fk1, fk2 *models.Constraint
|
||||||
|
for _, constraint := range joinTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
if fk1 == nil {
|
||||||
|
fk1 = constraint
|
||||||
|
} else {
|
||||||
|
fk2 = constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fk1 == nil || fk2 == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which FK points to our table and which to the other table
|
||||||
|
var targetConstraint *models.Constraint
|
||||||
|
if fk1.ReferencedTable == table.Name {
|
||||||
|
targetConstraint = fk2
|
||||||
|
} else if fk2.ReferencedTable == table.Name {
|
||||||
|
targetConstraint = fk1
|
||||||
|
} else {
|
||||||
|
return "" // This join table doesn't involve our table
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the target table
|
||||||
|
targetTable := w.findTable(db, targetConstraint.ReferencedSchema, targetConstraint.ReferencedTable)
|
||||||
|
if targetTable == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate many-to-many field
|
||||||
|
fieldName := w.pluralize(w.camelCase(targetTable.Name))
|
||||||
|
return fmt.Sprintf(" %s: [%s!]!", fieldName, targetTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) findTable(db *models.Database, schemaName, tableName string) *models.Table {
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name != schemaName {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == tableName {
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) relationFieldName(fkColumnName string) string {
|
||||||
|
// Remove "Id" or "_id" suffix
|
||||||
|
name := fkColumnName
|
||||||
|
if strings.HasSuffix(name, "Id") {
|
||||||
|
name = name[:len(name)-2]
|
||||||
|
} else if strings.HasSuffix(name, "_id") {
|
||||||
|
name = name[:len(name)-3]
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.camelCase(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) camelCase(s string) string {
|
||||||
|
// If already camelCase or PascalCase, convert to camelCase
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert first character to lowercase
|
||||||
|
return strings.ToLower(string(s[0])) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) pluralize(s string) string {
|
||||||
|
// Simple pluralization rules
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Already plural
|
||||||
|
if strings.HasSuffix(s, "s") {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Words ending in 'y' → 'ies'
|
||||||
|
if strings.HasSuffix(s, "y") {
|
||||||
|
return s[:len(s)-1] + "ies"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Words ending in 's', 'x', 'z', 'ch', 'sh' → add 'es'
|
||||||
|
if strings.HasSuffix(s, "s") || strings.HasSuffix(s, "x") ||
|
||||||
|
strings.HasSuffix(s, "z") || strings.HasSuffix(s, "ch") ||
|
||||||
|
strings.HasSuffix(s, "sh") {
|
||||||
|
return s + "es"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: add 's'
|
||||||
|
return s + "s"
|
||||||
|
}
|
||||||
148
pkg/writers/graphql/type_mapping.go
Normal file
148
pkg/writers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (w *Writer) sqlTypeToGraphQL(sqlType string, column *models.Column, table *models.Table, schema *models.Schema) string {
|
||||||
|
// Check if this is a primary key → ID type
|
||||||
|
if column.IsPrimaryKey {
|
||||||
|
// Check metadata for explicit type preference
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if preserveType, ok := w.options.Metadata["preservePKType"].(bool); ok && preserveType {
|
||||||
|
// Use Int or String based on SQL type
|
||||||
|
if w.isIntegerType(sqlType) {
|
||||||
|
return "Int"
|
||||||
|
}
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "ID"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map SQL types to custom scalars
|
||||||
|
if scalar := w.sqlTypeToCustomScalar(sqlType); scalar != "" {
|
||||||
|
return scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's an enum
|
||||||
|
if w.isEnumType(sqlType, schema) {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard type mappings
|
||||||
|
baseType := strings.Split(sqlType, "(")[0] // Remove length/precision
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
// Handle array types
|
||||||
|
if strings.HasSuffix(baseType, "[]") {
|
||||||
|
elemType := strings.TrimSuffix(baseType, "[]")
|
||||||
|
gqlType := w.mapBaseTypeToGraphQL(elemType)
|
||||||
|
return "[" + gqlType + "]"
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.mapBaseTypeToGraphQL(baseType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) mapBaseTypeToGraphQL(baseType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
// Text types
|
||||||
|
"text": "String",
|
||||||
|
"varchar": "String",
|
||||||
|
"char": "String",
|
||||||
|
"character": "String",
|
||||||
|
"bpchar": "String",
|
||||||
|
"name": "String",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "ID",
|
||||||
|
|
||||||
|
// Integer types
|
||||||
|
"integer": "Int",
|
||||||
|
"int": "Int",
|
||||||
|
"int2": "Int",
|
||||||
|
"int4": "Int",
|
||||||
|
"int8": "Int",
|
||||||
|
"bigint": "Int",
|
||||||
|
"smallint": "Int",
|
||||||
|
"serial": "Int",
|
||||||
|
"bigserial": "Int",
|
||||||
|
"smallserial": "Int",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"double precision": "Float",
|
||||||
|
"float": "Float",
|
||||||
|
"float4": "Float",
|
||||||
|
"float8": "Float",
|
||||||
|
"real": "Float",
|
||||||
|
"numeric": "Float",
|
||||||
|
"decimal": "Float",
|
||||||
|
"money": "Float",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"bool": "Boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
if gqlType, ok := typeMap[baseType]; ok {
|
||||||
|
return gqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: capitalize first letter
|
||||||
|
if len(baseType) > 0 {
|
||||||
|
return strings.ToUpper(string(baseType[0])) + baseType[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) sqlTypeToCustomScalar(sqlType string) string {
|
||||||
|
scalarMap := map[string]string{
|
||||||
|
"timestamp": "DateTime",
|
||||||
|
"timestamptz": "DateTime",
|
||||||
|
"timestamp with time zone": "DateTime",
|
||||||
|
"jsonb": "JSON",
|
||||||
|
"json": "JSON",
|
||||||
|
"date": "Date",
|
||||||
|
}
|
||||||
|
|
||||||
|
baseType := strings.Split(sqlType, "(")[0]
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
if scalar, ok := scalarMap[baseType]; ok {
|
||||||
|
return scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isIntegerType(sqlType string) bool {
|
||||||
|
intTypes := map[string]bool{
|
||||||
|
"integer": true,
|
||||||
|
"int": true,
|
||||||
|
"int2": true,
|
||||||
|
"int4": true,
|
||||||
|
"int8": true,
|
||||||
|
"bigint": true,
|
||||||
|
"smallint": true,
|
||||||
|
"serial": true,
|
||||||
|
"bigserial": true,
|
||||||
|
"smallserial": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
baseType := strings.Split(sqlType, "(")[0]
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
return intTypes[baseType]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isEnumType(sqlType string, schema *models.Schema) bool {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
if enum.Name == sqlType {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
272
pkg/writers/graphql/writer.go
Normal file
272
pkg/writers/graphql/writer.go
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
content := w.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) databaseToGraphQL(db *models.Database) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Header comment
|
||||||
|
if w.shouldIncludeComments() {
|
||||||
|
sb.WriteString("# Generated GraphQL Schema\n")
|
||||||
|
if db.Name != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf("# Database: %s\n", db.Name))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalar declarations
|
||||||
|
if w.shouldIncludeScalarDeclarations() {
|
||||||
|
scalars := w.collectCustomScalars(db)
|
||||||
|
if len(scalars) > 0 {
|
||||||
|
for _, scalar := range scalars {
|
||||||
|
sb.WriteString(fmt.Sprintf("scalar %s\n", scalar))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enum definitions
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
sb.WriteString(w.enumToGraphQL(enum))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Type definitions
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Skip join tables (tables with only PK+FK columns)
|
||||||
|
if w.isJoinTable(table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(w.tableToGraphQL(table, db, schema))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) shouldIncludeComments() bool {
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if include, ok := w.options.Metadata["includeComments"].(bool); ok {
|
||||||
|
return include
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true // Default to true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) shouldIncludeScalarDeclarations() bool {
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if include, ok := w.options.Metadata["includeScalarDeclarations"].(bool); ok {
|
||||||
|
return include
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true // Default to true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) collectCustomScalars(db *models.Database) []string {
|
||||||
|
scalarsNeeded := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if scalar := w.sqlTypeToCustomScalar(col.Type); scalar != "" {
|
||||||
|
scalarsNeeded[scalar] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to sorted slice
|
||||||
|
scalars := make([]string, 0, len(scalarsNeeded))
|
||||||
|
for scalar := range scalarsNeeded {
|
||||||
|
scalars = append(scalars, scalar)
|
||||||
|
}
|
||||||
|
sort.Strings(scalars)
|
||||||
|
|
||||||
|
return scalars
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isJoinTable(table *models.Table) bool {
|
||||||
|
// A join table typically has:
|
||||||
|
// 1. Exactly 2 FK constraints
|
||||||
|
// 2. Composite primary key on those FK columns
|
||||||
|
// 3. No other columns
|
||||||
|
|
||||||
|
fkCount := 0
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
fkCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCount != 2 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if all columns are either PKs or FKs
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
isFKColumn := false
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == col.Name {
|
||||||
|
isFKColumn = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isFKColumn && !col.IsPrimaryKey {
|
||||||
|
// Found a column that's neither PK nor FK
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) enumToGraphQL(enum *models.Enum) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("enum %s {\n", enum.Name))
|
||||||
|
for _, value := range enum.Values {
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s\n", value))
|
||||||
|
}
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) tableToGraphQL(table *models.Table, db *models.Database, schema *models.Schema) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Type name
|
||||||
|
typeName := table.Name
|
||||||
|
|
||||||
|
// Description comment
|
||||||
|
if w.shouldIncludeComments() && (table.Description != "" || table.Comment != "") {
|
||||||
|
desc := table.Description
|
||||||
|
if desc == "" {
|
||||||
|
desc = table.Comment
|
||||||
|
}
|
||||||
|
sb.WriteString(fmt.Sprintf("# %s\n", desc))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("type %s {\n", typeName))
|
||||||
|
|
||||||
|
// Collect and categorize fields
|
||||||
|
var idFields, scalarFields, relationFields []string
|
||||||
|
|
||||||
|
for _, column := range table.Columns {
|
||||||
|
// Skip FK columns (they become relation fields)
|
||||||
|
if w.isForeignKeyColumn(column, table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
gqlType := w.sqlTypeToGraphQL(column.Type, column, table, schema)
|
||||||
|
if gqlType == "" {
|
||||||
|
continue // Skip if type couldn't be mapped
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine nullability
|
||||||
|
if column.NotNull {
|
||||||
|
gqlType += "!"
|
||||||
|
}
|
||||||
|
|
||||||
|
field := fmt.Sprintf(" %s: %s", column.Name, gqlType)
|
||||||
|
|
||||||
|
if column.IsPrimaryKey {
|
||||||
|
idFields = append(idFields, field)
|
||||||
|
} else {
|
||||||
|
scalarFields = append(scalarFields, field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add relation fields
|
||||||
|
relationFields = w.generateRelationFields(table, db, schema)
|
||||||
|
|
||||||
|
// Write fields in order: ID, scalars (sorted), relations (sorted)
|
||||||
|
for _, field := range idFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(scalarFields)
|
||||||
|
for _, field := range scalarFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(relationFields) > 0 {
|
||||||
|
if len(scalarFields) > 0 || len(idFields) > 0 {
|
||||||
|
sb.WriteString("\n") // Blank line before relations
|
||||||
|
}
|
||||||
|
sort.Strings(relationFields)
|
||||||
|
for _, field := range relationFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isForeignKeyColumn(column *models.Column, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == column.Name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
412
pkg/writers/graphql/writer_test.go
Normal file
412
pkg/writers/graphql/writer_test.go
Normal file
@@ -0,0 +1,412 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestWriter_WriteTable_Simple(t *testing.T) {
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
nameCol := models.InitColumn("name", "User", "public")
|
||||||
|
nameCol.Type = "text"
|
||||||
|
nameCol.NotNull = true
|
||||||
|
table.Columns["name"] = nameCol
|
||||||
|
|
||||||
|
emailCol := models.InitColumn("email", "User", "public")
|
||||||
|
emailCol.Type = "text"
|
||||||
|
emailCol.NotNull = false
|
||||||
|
table.Columns["email"] = emailCol
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify output contains type definition
|
||||||
|
if !strings.Contains(output, "type User {") {
|
||||||
|
t.Error("Expected 'type User {' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify fields
|
||||||
|
if !strings.Contains(output, "id: ID!") {
|
||||||
|
t.Error("Expected 'id: ID!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "name: String!") {
|
||||||
|
t.Error("Expected 'name: String!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "email: String") {
|
||||||
|
t.Error("Expected 'email: String' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure email is not followed by ! (nullable)
|
||||||
|
if strings.Contains(output, "email: String!") {
|
||||||
|
t.Error("Did not expect 'email: String!' (should be nullable)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_WithEnum(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create enum
|
||||||
|
roleEnum := &models.Enum{
|
||||||
|
Name: "Role",
|
||||||
|
Schema: "public",
|
||||||
|
Values: []string{"ADMIN", "USER", "GUEST"},
|
||||||
|
}
|
||||||
|
schema.Enums = []*models.Enum{roleEnum}
|
||||||
|
|
||||||
|
// Create table with enum field
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
roleCol := models.InitColumn("role", "User", "public")
|
||||||
|
roleCol.Type = "Role"
|
||||||
|
roleCol.NotNull = true
|
||||||
|
table.Columns["role"] = roleCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify enum definition
|
||||||
|
if !strings.Contains(output, "enum Role {") {
|
||||||
|
t.Error("Expected 'enum Role {' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "ADMIN") {
|
||||||
|
t.Error("Expected 'ADMIN' enum value in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify enum usage in type
|
||||||
|
if !strings.Contains(output, "role: Role!") {
|
||||||
|
t.Error("Expected 'role: Role!' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_WithRelations(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create User table
|
||||||
|
userTable := models.InitTable("User", "public")
|
||||||
|
userIdCol := models.InitColumn("id", "User", "public")
|
||||||
|
userIdCol.Type = "bigint"
|
||||||
|
userIdCol.IsPrimaryKey = true
|
||||||
|
userIdCol.NotNull = true
|
||||||
|
userTable.Columns["id"] = userIdCol
|
||||||
|
|
||||||
|
userNameCol := models.InitColumn("name", "User", "public")
|
||||||
|
userNameCol.Type = "text"
|
||||||
|
userNameCol.NotNull = true
|
||||||
|
userTable.Columns["name"] = userNameCol
|
||||||
|
|
||||||
|
// Create Post table with FK to User
|
||||||
|
postTable := models.InitTable("Post", "public")
|
||||||
|
|
||||||
|
postIdCol := models.InitColumn("id", "Post", "public")
|
||||||
|
postIdCol.Type = "bigint"
|
||||||
|
postIdCol.IsPrimaryKey = true
|
||||||
|
postIdCol.NotNull = true
|
||||||
|
postTable.Columns["id"] = postIdCol
|
||||||
|
|
||||||
|
titleCol := models.InitColumn("title", "Post", "public")
|
||||||
|
titleCol.Type = "text"
|
||||||
|
titleCol.NotNull = true
|
||||||
|
postTable.Columns["title"] = titleCol
|
||||||
|
|
||||||
|
authorIdCol := models.InitColumn("authorId", "Post", "public")
|
||||||
|
authorIdCol.Type = "bigint"
|
||||||
|
authorIdCol.NotNull = true
|
||||||
|
postTable.Columns["authorId"] = authorIdCol
|
||||||
|
|
||||||
|
// Add FK constraint
|
||||||
|
fkConstraint := models.InitConstraint("fk_post_author", models.ForeignKeyConstraint)
|
||||||
|
fkConstraint.Schema = "public"
|
||||||
|
fkConstraint.Table = "Post"
|
||||||
|
fkConstraint.Columns = []string{"authorId"}
|
||||||
|
fkConstraint.ReferencedSchema = "public"
|
||||||
|
fkConstraint.ReferencedTable = "User"
|
||||||
|
fkConstraint.ReferencedColumns = []string{"id"}
|
||||||
|
postTable.Constraints["fk_post_author"] = fkConstraint
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{userTable, postTable}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify Post has author field (forward relationship)
|
||||||
|
if !strings.Contains(output, "author: User!") {
|
||||||
|
t.Error("Expected 'author: User!' in Post type")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify authorId FK column is NOT in the output
|
||||||
|
if strings.Contains(output, "authorId:") {
|
||||||
|
t.Error("Did not expect 'authorId:' field in output (FK columns should be hidden)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify User has posts field (reverse relationship)
|
||||||
|
if !strings.Contains(output, "posts: [Post!]!") {
|
||||||
|
t.Error("Expected 'posts: [Post!]!' in User type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_CustomScalars(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("Event", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "Event", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
createdAtCol := models.InitColumn("createdAt", "Event", "public")
|
||||||
|
createdAtCol.Type = "timestamp"
|
||||||
|
createdAtCol.NotNull = true
|
||||||
|
table.Columns["createdAt"] = createdAtCol
|
||||||
|
|
||||||
|
metadataCol := models.InitColumn("metadata", "Event", "public")
|
||||||
|
metadataCol.Type = "jsonb"
|
||||||
|
metadataCol.NotNull = false
|
||||||
|
table.Columns["metadata"] = metadataCol
|
||||||
|
|
||||||
|
dateCol := models.InitColumn("eventDate", "Event", "public")
|
||||||
|
dateCol.Type = "date"
|
||||||
|
dateCol.NotNull = false
|
||||||
|
table.Columns["eventDate"] = dateCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify scalar declarations
|
||||||
|
if !strings.Contains(output, "scalar DateTime") {
|
||||||
|
t.Error("Expected 'scalar DateTime' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "scalar JSON") {
|
||||||
|
t.Error("Expected 'scalar JSON' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "scalar Date") {
|
||||||
|
t.Error("Expected 'scalar Date' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify field types
|
||||||
|
if !strings.Contains(output, "createdAt: DateTime!") {
|
||||||
|
t.Error("Expected 'createdAt: DateTime!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "metadata: JSON") {
|
||||||
|
t.Error("Expected 'metadata: JSON' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "eventDate: Date") {
|
||||||
|
t.Error("Expected 'eventDate: Date' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_ManyToMany(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create Post table
|
||||||
|
postTable := models.InitTable("Post", "public")
|
||||||
|
postIdCol := models.InitColumn("id", "Post", "public")
|
||||||
|
postIdCol.Type = "bigint"
|
||||||
|
postIdCol.IsPrimaryKey = true
|
||||||
|
postIdCol.NotNull = true
|
||||||
|
postTable.Columns["id"] = postIdCol
|
||||||
|
|
||||||
|
titleCol := models.InitColumn("title", "Post", "public")
|
||||||
|
titleCol.Type = "text"
|
||||||
|
titleCol.NotNull = true
|
||||||
|
postTable.Columns["title"] = titleCol
|
||||||
|
|
||||||
|
// Create Tag table
|
||||||
|
tagTable := models.InitTable("Tag", "public")
|
||||||
|
tagIdCol := models.InitColumn("id", "Tag", "public")
|
||||||
|
tagIdCol.Type = "bigint"
|
||||||
|
tagIdCol.IsPrimaryKey = true
|
||||||
|
tagIdCol.NotNull = true
|
||||||
|
tagTable.Columns["id"] = tagIdCol
|
||||||
|
|
||||||
|
nameCol := models.InitColumn("name", "Tag", "public")
|
||||||
|
nameCol.Type = "text"
|
||||||
|
nameCol.NotNull = true
|
||||||
|
tagTable.Columns["name"] = nameCol
|
||||||
|
|
||||||
|
// Create PostTag join table
|
||||||
|
joinTable := models.InitTable("PostTag", "public")
|
||||||
|
|
||||||
|
postIdJoinCol := models.InitColumn("postId", "PostTag", "public")
|
||||||
|
postIdJoinCol.Type = "bigint"
|
||||||
|
postIdJoinCol.NotNull = true
|
||||||
|
postIdJoinCol.IsPrimaryKey = true
|
||||||
|
joinTable.Columns["postId"] = postIdJoinCol
|
||||||
|
|
||||||
|
tagIdJoinCol := models.InitColumn("tagId", "PostTag", "public")
|
||||||
|
tagIdJoinCol.Type = "bigint"
|
||||||
|
tagIdJoinCol.NotNull = true
|
||||||
|
tagIdJoinCol.IsPrimaryKey = true
|
||||||
|
joinTable.Columns["tagId"] = tagIdJoinCol
|
||||||
|
|
||||||
|
// Add composite PK constraint
|
||||||
|
pkConstraint := models.InitConstraint("pk_posttag", models.PrimaryKeyConstraint)
|
||||||
|
pkConstraint.Schema = "public"
|
||||||
|
pkConstraint.Table = "PostTag"
|
||||||
|
pkConstraint.Columns = []string{"postId", "tagId"}
|
||||||
|
joinTable.Constraints["pk_posttag"] = pkConstraint
|
||||||
|
|
||||||
|
// Add FK to Post
|
||||||
|
fk1 := models.InitConstraint("fk_posttag_post", models.ForeignKeyConstraint)
|
||||||
|
fk1.Schema = "public"
|
||||||
|
fk1.Table = "PostTag"
|
||||||
|
fk1.Columns = []string{"postId"}
|
||||||
|
fk1.ReferencedSchema = "public"
|
||||||
|
fk1.ReferencedTable = "Post"
|
||||||
|
fk1.ReferencedColumns = []string{"id"}
|
||||||
|
joinTable.Constraints["fk_posttag_post"] = fk1
|
||||||
|
|
||||||
|
// Add FK to Tag
|
||||||
|
fk2 := models.InitConstraint("fk_posttag_tag", models.ForeignKeyConstraint)
|
||||||
|
fk2.Schema = "public"
|
||||||
|
fk2.Table = "PostTag"
|
||||||
|
fk2.Columns = []string{"tagId"}
|
||||||
|
fk2.ReferencedSchema = "public"
|
||||||
|
fk2.ReferencedTable = "Tag"
|
||||||
|
fk2.ReferencedColumns = []string{"id"}
|
||||||
|
joinTable.Constraints["fk_posttag_tag"] = fk2
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{postTable, tagTable, joinTable}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify join table is NOT in output
|
||||||
|
if strings.Contains(output, "type PostTag") {
|
||||||
|
t.Error("Did not expect 'type PostTag' (join tables should be hidden)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Post has tags field
|
||||||
|
if !strings.Contains(output, "tags: [Tag!]!") {
|
||||||
|
t.Error("Expected 'tags: [Tag!]!' in Post type")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Tag has posts field
|
||||||
|
if !strings.Contains(output, "posts: [Post!]!") {
|
||||||
|
t.Error("Expected 'posts: [Post!]!' in Tag type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_UUIDType(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "uuid"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// UUID primary keys should still map to ID
|
||||||
|
if !strings.Contains(output, "id: ID!") {
|
||||||
|
t.Error("Expected 'id: ID!' for UUID primary key")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_Metadata_NoScalarDeclarations(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("Event", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "Event", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
createdAtCol := models.InitColumn("createdAt", "Event", "public")
|
||||||
|
createdAtCol.Type = "timestamp"
|
||||||
|
createdAtCol.NotNull = true
|
||||||
|
table.Columns["createdAt"] = createdAtCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"includeScalarDeclarations": false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify no scalar declarations
|
||||||
|
if strings.Contains(output, "scalar DateTime") {
|
||||||
|
t.Error("Did not expect 'scalar DateTime' with includeScalarDeclarations=false")
|
||||||
|
}
|
||||||
|
|
||||||
|
// But field should still use DateTime
|
||||||
|
if !strings.Contains(output, "createdAt: DateTime!") {
|
||||||
|
t.Error("Expected 'createdAt: DateTime!' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
277
pkg/writers/json/README.md
Normal file
277
pkg/writers/json/README.md
Normal file
@@ -0,0 +1,277 @@
|
|||||||
|
# JSON Writer
|
||||||
|
|
||||||
|
Generates database schema definitions in JSON format.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The JSON Writer converts RelSpec's internal database model representation into JSON format, providing a complete, structured representation of the database schema.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates RelSpec's canonical JSON schema format
|
||||||
|
- Complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
- Pretty-printed, human-readable output
|
||||||
|
- Suitable for version control
|
||||||
|
- Ideal interchange format
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := json.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Export PostgreSQL database to JSON
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output json \
|
||||||
|
--out-file schema.json
|
||||||
|
|
||||||
|
# Convert GORM models to JSON
|
||||||
|
relspec --input gorm --in-file models.go --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DBML to JSON
|
||||||
|
relspec --input dbml --in-file diagram.dbml --output json --out-file schema.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated JSON Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "myapp",
|
||||||
|
"description": "",
|
||||||
|
"database_type": "postgresql",
|
||||||
|
"database_version": "",
|
||||||
|
"source_format": "pgsql",
|
||||||
|
"schemas": [
|
||||||
|
{
|
||||||
|
"name": "public",
|
||||||
|
"description": "",
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"name": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"description": "",
|
||||||
|
"columns": {
|
||||||
|
"id": {
|
||||||
|
"name": "id",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"type": "bigint",
|
||||||
|
"length": 0,
|
||||||
|
"precision": 0,
|
||||||
|
"scale": 0,
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": true,
|
||||||
|
"auto_increment": true,
|
||||||
|
"default": "",
|
||||||
|
"sequence": 1
|
||||||
|
},
|
||||||
|
"username": {
|
||||||
|
"name": "username",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 50,
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": false,
|
||||||
|
"auto_increment": false,
|
||||||
|
"sequence": 2
|
||||||
|
},
|
||||||
|
"email": {
|
||||||
|
"name": "email",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 100,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"constraints": {
|
||||||
|
"pk_users": {
|
||||||
|
"name": "pk_users",
|
||||||
|
"type": "PRIMARY KEY",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["id"]
|
||||||
|
},
|
||||||
|
"uq_users_username": {
|
||||||
|
"name": "uq_users_username",
|
||||||
|
"type": "UNIQUE",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["username"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"indexes": {
|
||||||
|
"idx_users_email": {
|
||||||
|
"name": "idx_users_email",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["email"],
|
||||||
|
"unique": false,
|
||||||
|
"type": "btree"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relationships": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "posts",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": {
|
||||||
|
"id": {
|
||||||
|
"name": "id",
|
||||||
|
"type": "bigint",
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": true,
|
||||||
|
"sequence": 1
|
||||||
|
},
|
||||||
|
"user_id": {
|
||||||
|
"name": "user_id",
|
||||||
|
"type": "bigint",
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 2
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"name": "title",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 200,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 3
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"name": "content",
|
||||||
|
"type": "text",
|
||||||
|
"not_null": false,
|
||||||
|
"sequence": 4
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"constraints": {
|
||||||
|
"fk_posts_user_id": {
|
||||||
|
"name": "fk_posts_user_id",
|
||||||
|
"type": "FOREIGN KEY",
|
||||||
|
"table": "posts",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["user_id"],
|
||||||
|
"referenced_table": "users",
|
||||||
|
"referenced_schema": "public",
|
||||||
|
"referenced_columns": ["id"],
|
||||||
|
"on_delete": "CASCADE",
|
||||||
|
"on_update": "NO ACTION"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"indexes": {
|
||||||
|
"idx_posts_user_id": {
|
||||||
|
"name": "idx_posts_user_id",
|
||||||
|
"columns": ["user_id"],
|
||||||
|
"unique": false,
|
||||||
|
"type": "btree"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"views": [],
|
||||||
|
"sequences": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The JSON format includes:
|
||||||
|
|
||||||
|
### Database Level
|
||||||
|
- `name` - Database name
|
||||||
|
- `description` - Database description
|
||||||
|
- `database_type` - Database system type
|
||||||
|
- `database_version` - Version information
|
||||||
|
- `source_format` - Original source format
|
||||||
|
- `schemas` - Array of schema objects
|
||||||
|
|
||||||
|
### Schema Level
|
||||||
|
- `name` - Schema name
|
||||||
|
- `description` - Schema description
|
||||||
|
- `tables` - Array of table objects
|
||||||
|
- `views` - Array of view objects
|
||||||
|
- `sequences` - Array of sequence objects
|
||||||
|
|
||||||
|
### Table Level
|
||||||
|
- `name` - Table name
|
||||||
|
- `schema` - Schema name
|
||||||
|
- `description` - Table description
|
||||||
|
- `columns` - Map of column objects
|
||||||
|
- `constraints` - Map of constraint objects
|
||||||
|
- `indexes` - Map of index objects
|
||||||
|
- `relationships` - Map of relationship objects
|
||||||
|
|
||||||
|
### Column Level
|
||||||
|
- `name` - Column name
|
||||||
|
- `type` - Data type
|
||||||
|
- `length` - Type length
|
||||||
|
- `precision`, `scale` - Numeric precision
|
||||||
|
- `not_null` - NOT NULL flag
|
||||||
|
- `is_primary_key` - Primary key flag
|
||||||
|
- `auto_increment` - Auto-increment flag
|
||||||
|
- `default` - Default value
|
||||||
|
- `sequence` - Column order
|
||||||
|
|
||||||
|
### Constraint Level
|
||||||
|
- `name` - Constraint name
|
||||||
|
- `type` - Constraint type (PRIMARY KEY, FOREIGN KEY, UNIQUE, CHECK)
|
||||||
|
- `columns` - Constrained columns
|
||||||
|
- `referenced_table`, `referenced_schema` - FK references
|
||||||
|
- `referenced_columns` - Referenced columns
|
||||||
|
- `on_delete`, `on_update` - FK actions
|
||||||
|
|
||||||
|
### Index Level
|
||||||
|
- `name` - Index name
|
||||||
|
- `columns` - Indexed columns
|
||||||
|
- `unique` - Unique flag
|
||||||
|
- `type` - Index type
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
- **Version Control** - Track schema changes in git
|
||||||
|
- **Documentation** - Human-readable schema documentation
|
||||||
|
- **Interchange** - Standard format for tool integration
|
||||||
|
- **Backup** - Schema backup without database access
|
||||||
|
- **Testing** - Test data for schema validation
|
||||||
|
- **API** - Schema information for APIs
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Output is pretty-printed with 2-space indentation
|
||||||
|
- Preserves all schema metadata
|
||||||
|
- Can be round-tripped (read and write) without loss
|
||||||
|
- Schema-agnostic format
|
||||||
|
- Ideal for automation and tooling
|
||||||
195
pkg/writers/pgsql/README.md
Normal file
195
pkg/writers/pgsql/README.md
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
# PostgreSQL Writer
|
||||||
|
|
||||||
|
Generates PostgreSQL DDL (Data Definition Language) SQL scripts from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The PostgreSQL Writer converts RelSpec's internal database model representation into PostgreSQL-compatible SQL DDL scripts, including CREATE TABLE statements, constraints, indexes, views, and sequences.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates complete PostgreSQL DDL
|
||||||
|
- Creates schemas, tables, columns
|
||||||
|
- Defines constraints (PK, FK, unique, check)
|
||||||
|
- Creates indexes
|
||||||
|
- Generates views and sequences
|
||||||
|
- Supports migration scripts
|
||||||
|
- Includes audit triggers (optional)
|
||||||
|
- Handles PostgreSQL-specific data types
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.sql",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := pgsql.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate PostgreSQL DDL from JSON schema
|
||||||
|
relspec --input json \
|
||||||
|
--in-file schema.json \
|
||||||
|
--output pgsql \
|
||||||
|
--out-file schema.sql
|
||||||
|
|
||||||
|
# Convert GORM models to PostgreSQL DDL
|
||||||
|
relspec --input gorm \
|
||||||
|
--in-file models.go \
|
||||||
|
--output pgsql \
|
||||||
|
--out-file create_tables.sql
|
||||||
|
|
||||||
|
# Export live database schema to SQL
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/source_db" \
|
||||||
|
--output pgsql \
|
||||||
|
--out-file backup_schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated SQL Example
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Schema: public
|
||||||
|
|
||||||
|
CREATE SCHEMA IF NOT EXISTS public;
|
||||||
|
|
||||||
|
-- Table: public.users
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public.users (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
username VARCHAR(50) NOT NULL,
|
||||||
|
email VARCHAR(100) NOT NULL,
|
||||||
|
bio TEXT,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Constraints for public.users
|
||||||
|
|
||||||
|
ALTER TABLE public.users
|
||||||
|
ADD CONSTRAINT uq_users_username UNIQUE (username);
|
||||||
|
|
||||||
|
-- Indexes for public.users
|
||||||
|
|
||||||
|
CREATE INDEX idx_users_email ON public.users (email);
|
||||||
|
|
||||||
|
-- Table: public.posts
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public.posts (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
user_id BIGINT NOT NULL,
|
||||||
|
title VARCHAR(200) NOT NULL,
|
||||||
|
content TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Foreign Keys for public.posts
|
||||||
|
|
||||||
|
ALTER TABLE public.posts
|
||||||
|
ADD CONSTRAINT fk_posts_user_id
|
||||||
|
FOREIGN KEY (user_id)
|
||||||
|
REFERENCES public.users (id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE NO ACTION;
|
||||||
|
|
||||||
|
-- Indexes for public.posts
|
||||||
|
|
||||||
|
CREATE INDEX idx_posts_user_id ON public.posts (user_id);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Writer Options
|
||||||
|
|
||||||
|
### Metadata Options
|
||||||
|
|
||||||
|
```go
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.sql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"include_drop": true, // Include DROP statements
|
||||||
|
"include_audit": true, // Include audit triggers
|
||||||
|
"if_not_exists": true, // Use IF NOT EXISTS
|
||||||
|
"migration_mode": false, // Generate migration script
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
### Full DDL Generation
|
||||||
|
|
||||||
|
Generates complete database structure:
|
||||||
|
- CREATE SCHEMA statements
|
||||||
|
- CREATE TABLE with all columns and types
|
||||||
|
- PRIMARY KEY constraints
|
||||||
|
- FOREIGN KEY constraints with actions
|
||||||
|
- UNIQUE constraints
|
||||||
|
- CHECK constraints
|
||||||
|
- CREATE INDEX statements
|
||||||
|
- CREATE VIEW statements
|
||||||
|
- CREATE SEQUENCE statements
|
||||||
|
|
||||||
|
### Migration Mode
|
||||||
|
|
||||||
|
When `migration_mode` is enabled, generates migration scripts with:
|
||||||
|
- Version tracking
|
||||||
|
- Up/down migrations
|
||||||
|
- Transactional DDL
|
||||||
|
- Rollback support
|
||||||
|
|
||||||
|
### Audit Triggers
|
||||||
|
|
||||||
|
When `include_audit` is enabled, adds:
|
||||||
|
- Created/updated timestamp triggers
|
||||||
|
- Audit logging functionality
|
||||||
|
- Change tracking
|
||||||
|
|
||||||
|
## PostgreSQL-Specific Features
|
||||||
|
|
||||||
|
- Serial types (SERIAL, BIGSERIAL)
|
||||||
|
- Advanced types (UUID, JSONB, ARRAY)
|
||||||
|
- Schema-qualified names
|
||||||
|
- Constraint actions (CASCADE, RESTRICT, SET NULL)
|
||||||
|
- Partial indexes
|
||||||
|
- Function-based indexes
|
||||||
|
- Check constraints with expressions
|
||||||
|
|
||||||
|
## Data Types
|
||||||
|
|
||||||
|
Supports all PostgreSQL data types:
|
||||||
|
- Integer types: SMALLINT, INTEGER, BIGINT, SERIAL, BIGSERIAL
|
||||||
|
- Numeric types: NUMERIC, DECIMAL, REAL, DOUBLE PRECISION
|
||||||
|
- String types: VARCHAR, CHAR, TEXT
|
||||||
|
- Date/Time: DATE, TIME, TIMESTAMP, TIMESTAMPTZ, INTERVAL
|
||||||
|
- Boolean: BOOLEAN
|
||||||
|
- Binary: BYTEA
|
||||||
|
- JSON: JSON, JSONB
|
||||||
|
- UUID: UUID
|
||||||
|
- Network: INET, CIDR, MACADDR
|
||||||
|
- Special: ARRAY, HSTORE
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Generated SQL is formatted and readable
|
||||||
|
- Comments are preserved from source schema
|
||||||
|
- Schema names are fully qualified
|
||||||
|
- Default values are properly quoted
|
||||||
|
- Constraint names follow PostgreSQL conventions
|
||||||
|
- Compatible with PostgreSQL 12+
|
||||||
135
pkg/writers/prisma/README.md
Normal file
135
pkg/writers/prisma/README.md
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
# Prisma Writer
|
||||||
|
|
||||||
|
Generates Prisma schema files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Prisma Writer converts RelSpec's internal database model representation into Prisma schema language (`.prisma` files), complete with models, fields, relationships, and attributes.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates Prisma schema syntax
|
||||||
|
- Creates model definitions with proper field types
|
||||||
|
- Adds Prisma attributes (@id, @unique, @default, etc.)
|
||||||
|
- Generates relationship fields
|
||||||
|
- Includes datasource and generator configurations
|
||||||
|
- Maps table/column names with @map and @@map
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.prisma",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"datasource_provider": "postgresql",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := prisma.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate Prisma schema from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output prisma \
|
||||||
|
--out-file schema.prisma
|
||||||
|
|
||||||
|
# Convert GORM models to Prisma
|
||||||
|
relspec --input gorm --in-file models.go --output prisma --out-file schema.prisma
|
||||||
|
|
||||||
|
# Convert JSON to Prisma schema
|
||||||
|
relspec --input json --in-file database.json --output prisma --out-file prisma/schema.prisma
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```prisma
|
||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
}
|
||||||
|
|
||||||
|
generator client {
|
||||||
|
provider = "prisma-client-js"
|
||||||
|
}
|
||||||
|
|
||||||
|
model User {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
username String @unique @db.VarChar(50)
|
||||||
|
email String @db.VarChar(100)
|
||||||
|
bio String? @db.Text
|
||||||
|
createdAt DateTime @default(now()) @map("created_at")
|
||||||
|
|
||||||
|
posts Post[]
|
||||||
|
|
||||||
|
@@map("users")
|
||||||
|
}
|
||||||
|
|
||||||
|
model Post {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
userId Int @map("user_id")
|
||||||
|
title String @db.VarChar(200)
|
||||||
|
content String? @db.Text
|
||||||
|
|
||||||
|
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@map("posts")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Prisma Attributes
|
||||||
|
|
||||||
|
### Field Attributes
|
||||||
|
- `@id` - Primary key
|
||||||
|
- `@unique` - Unique constraint
|
||||||
|
- `@default()` - Default value
|
||||||
|
- `@map()` - Column name mapping
|
||||||
|
- `@db.*` - Database-specific types
|
||||||
|
- `@relation()` - Relationship definition
|
||||||
|
|
||||||
|
### Model Attributes
|
||||||
|
- `@@map()` - Table name mapping
|
||||||
|
- `@@unique()` - Composite unique constraints
|
||||||
|
- `@@index()` - Index definitions
|
||||||
|
- `@@id()` - Composite primary keys
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | Prisma Type | Database Type |
|
||||||
|
|----------|-------------|---------------|
|
||||||
|
| bigint | Int | @db.BigInt |
|
||||||
|
| integer | Int | - |
|
||||||
|
| varchar(n) | String | @db.VarChar(n) |
|
||||||
|
| text | String | @db.Text |
|
||||||
|
| boolean | Boolean | - |
|
||||||
|
| timestamp | DateTime | @db.Timestamp |
|
||||||
|
| uuid | String | @db.Uuid |
|
||||||
|
| json | Json | - |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Model names are PascalCase (e.g., `User`, `Post`)
|
||||||
|
- Field names are camelCase with `@map` for snake_case columns
|
||||||
|
- Table names use `@@map` when different from model name
|
||||||
|
- Nullable fields are marked with `?`
|
||||||
|
- Relationship fields are automatically generated
|
||||||
|
- Datasource provider defaults to `postgresql`
|
||||||
551
pkg/writers/prisma/writer.go
Normal file
551
pkg/writers/prisma/writer.go
Normal file
@@ -0,0 +1,551 @@
|
|||||||
|
package prisma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for Prisma schema format
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new Prisma writer with the given options
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase writes a Database model to Prisma schema format
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
content := w.databaseToPrisma(db)
|
||||||
|
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema writes a Schema model to Prisma schema format
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
// Create temporary database for schema
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable writes a Table model to Prisma schema format
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
// Create temporary schema and database for table
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
return w.WriteSchema(schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// databaseToPrisma converts a Database to Prisma schema format string
|
||||||
|
func (w *Writer) databaseToPrisma(db *models.Database) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Write datasource block
|
||||||
|
sb.WriteString(w.generateDatasource(db))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Write generator block
|
||||||
|
sb.WriteString(w.generateGenerator())
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Process all schemas (typically just one in Prisma)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Write enums
|
||||||
|
if len(schema.Enums) > 0 {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
sb.WriteString(w.enumToPrisma(enum))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Identify join tables for implicit M2M
|
||||||
|
joinTables := w.identifyJoinTables(schema)
|
||||||
|
|
||||||
|
// Write models (excluding join tables)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if joinTables[table.Name] {
|
||||||
|
continue // Skip join tables
|
||||||
|
}
|
||||||
|
sb.WriteString(w.tableToPrisma(table, schema, joinTables))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateDatasource generates the datasource block
|
||||||
|
func (w *Writer) generateDatasource(db *models.Database) string {
|
||||||
|
provider := "postgresql"
|
||||||
|
|
||||||
|
// Map database type to Prisma provider
|
||||||
|
switch db.DatabaseType {
|
||||||
|
case models.PostgresqlDatabaseType:
|
||||||
|
provider = "postgresql"
|
||||||
|
case models.MSSQLDatabaseType:
|
||||||
|
provider = "sqlserver"
|
||||||
|
case models.SqlLiteDatabaseType:
|
||||||
|
provider = "sqlite"
|
||||||
|
case "mysql":
|
||||||
|
provider = "mysql"
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(`datasource db {
|
||||||
|
provider = "%s"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
}
|
||||||
|
`, provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateGenerator generates the generator block
|
||||||
|
func (w *Writer) generateGenerator() string {
|
||||||
|
return `generator client {
|
||||||
|
provider = "prisma-client-js"
|
||||||
|
}
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
// enumToPrisma converts an Enum to Prisma enum block
|
||||||
|
func (w *Writer) enumToPrisma(enum *models.Enum) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("enum %s {\n", enum.Name))
|
||||||
|
for _, value := range enum.Values {
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s\n", value))
|
||||||
|
}
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// identifyJoinTables identifies tables that are join tables for M2M relations
|
||||||
|
func (w *Writer) identifyJoinTables(schema *models.Schema) map[string]bool {
|
||||||
|
joinTables := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check if this is a join table:
|
||||||
|
// 1. Starts with _ (Prisma convention)
|
||||||
|
// 2. Has exactly 2 FK constraints
|
||||||
|
// 3. Has composite PK with those 2 columns
|
||||||
|
// 4. Has no other columns except the FK columns
|
||||||
|
|
||||||
|
if !strings.HasPrefix(table.Name, "_") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
if len(fks) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if columns are only the FK columns
|
||||||
|
if len(table.Columns) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if both FK columns are part of PK
|
||||||
|
pkCols := 0
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
pkCols++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkCols == 2 {
|
||||||
|
joinTables[table.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return joinTables
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableToPrisma converts a Table to Prisma model block
|
||||||
|
func (w *Writer) tableToPrisma(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("model %s {\n", table.Name))
|
||||||
|
|
||||||
|
// Collect columns to write
|
||||||
|
columns := make([]*models.Column, 0, len(table.Columns))
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort columns for consistent output
|
||||||
|
sort.Slice(columns, func(i, j int) bool {
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
})
|
||||||
|
|
||||||
|
// Write scalar fields
|
||||||
|
for _, col := range columns {
|
||||||
|
// Skip if this column is part of a relation that will be output as array field
|
||||||
|
if w.isRelationColumn(col, table) {
|
||||||
|
// We'll output this with the relation field
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(w.columnToField(col, table, schema))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write relation fields
|
||||||
|
sb.WriteString(w.generateRelationFields(table, schema, joinTables))
|
||||||
|
|
||||||
|
// Write block attributes (@@id, @@unique, @@index)
|
||||||
|
sb.WriteString(w.generateBlockAttributes(table))
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// columnToField converts a Column to a Prisma field definition
|
||||||
|
func (w *Writer) columnToField(col *models.Column, table *models.Table, schema *models.Schema) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Field name
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s", col.Name))
|
||||||
|
|
||||||
|
// Field type
|
||||||
|
prismaType := w.sqlTypeToPrisma(col.Type, schema)
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s", prismaType))
|
||||||
|
|
||||||
|
// Optional modifier
|
||||||
|
if !col.NotNull && !col.IsPrimaryKey {
|
||||||
|
sb.WriteString("?")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field attributes
|
||||||
|
attributes := w.generateFieldAttributes(col, table)
|
||||||
|
if attributes != "" {
|
||||||
|
sb.WriteString(" ")
|
||||||
|
sb.WriteString(attributes)
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// sqlTypeToPrisma converts SQL types to Prisma types
|
||||||
|
func (w *Writer) sqlTypeToPrisma(sqlType string, schema *models.Schema) string {
|
||||||
|
// Check if it's an enum
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
if strings.EqualFold(sqlType, enum.Name) {
|
||||||
|
return enum.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard type mapping
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"text": "String",
|
||||||
|
"varchar": "String",
|
||||||
|
"character varying": "String",
|
||||||
|
"char": "String",
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"bool": "Boolean",
|
||||||
|
"integer": "Int",
|
||||||
|
"int": "Int",
|
||||||
|
"int4": "Int",
|
||||||
|
"bigint": "BigInt",
|
||||||
|
"int8": "BigInt",
|
||||||
|
"double precision": "Float",
|
||||||
|
"float": "Float",
|
||||||
|
"float8": "Float",
|
||||||
|
"decimal": "Decimal",
|
||||||
|
"numeric": "Decimal",
|
||||||
|
"timestamp": "DateTime",
|
||||||
|
"timestamptz": "DateTime",
|
||||||
|
"date": "DateTime",
|
||||||
|
"jsonb": "Json",
|
||||||
|
"json": "Json",
|
||||||
|
"bytea": "Bytes",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlPattern, prismaType := range typeMap {
|
||||||
|
if strings.Contains(strings.ToLower(sqlType), sqlPattern) {
|
||||||
|
return prismaType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to String for unknown types
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateFieldAttributes generates field attributes like @id, @unique, @default
|
||||||
|
func (w *Writer) generateFieldAttributes(col *models.Column, table *models.Table) string {
|
||||||
|
attrs := make([]string, 0)
|
||||||
|
|
||||||
|
// @id
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
// Check if this is part of a composite key
|
||||||
|
pkCount := 0
|
||||||
|
for _, c := range table.Columns {
|
||||||
|
if c.IsPrimaryKey {
|
||||||
|
pkCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pkCount == 1 {
|
||||||
|
attrs = append(attrs, "@id")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @unique
|
||||||
|
if w.hasUniqueConstraint(col.Name, table) {
|
||||||
|
attrs = append(attrs, "@unique")
|
||||||
|
}
|
||||||
|
|
||||||
|
// @default
|
||||||
|
if col.AutoIncrement {
|
||||||
|
attrs = append(attrs, "@default(autoincrement())")
|
||||||
|
} else if col.Default != nil {
|
||||||
|
defaultAttr := w.formatDefaultValue(col.Default)
|
||||||
|
if defaultAttr != "" {
|
||||||
|
attrs = append(attrs, fmt.Sprintf("@default(%s)", defaultAttr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @updatedAt (check comment)
|
||||||
|
if strings.Contains(col.Comment, "@updatedAt") {
|
||||||
|
attrs = append(attrs, "@updatedAt")
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(attrs, " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatDefaultValue formats a default value for Prisma
|
||||||
|
func (w *Writer) formatDefaultValue(defaultValue any) string {
|
||||||
|
switch v := defaultValue.(type) {
|
||||||
|
case string:
|
||||||
|
if v == "now()" {
|
||||||
|
return "now()"
|
||||||
|
} else if v == "gen_random_uuid()" {
|
||||||
|
return "uuid()"
|
||||||
|
} else if strings.Contains(strings.ToLower(v), "uuid") {
|
||||||
|
return "uuid()"
|
||||||
|
} else {
|
||||||
|
// String literal
|
||||||
|
return fmt.Sprintf(`"%s"`, v)
|
||||||
|
}
|
||||||
|
case bool:
|
||||||
|
if v {
|
||||||
|
return "true"
|
||||||
|
}
|
||||||
|
return "false"
|
||||||
|
case int, int64, int32:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasUniqueConstraint checks if a column has a unique constraint
|
||||||
|
func (w *Writer) hasUniqueConstraint(colName string, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint &&
|
||||||
|
len(constraint.Columns) == 1 &&
|
||||||
|
constraint.Columns[0] == colName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// isRelationColumn checks if a column is a FK column
|
||||||
|
func (w *Writer) isRelationColumn(col *models.Column, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == col.Name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateRelationFields generates relation fields and their FK columns
|
||||||
|
func (w *Writer) generateRelationFields(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Get all FK constraints
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
|
||||||
|
for _, fk := range fks {
|
||||||
|
// Generate the FK scalar field
|
||||||
|
for _, fkCol := range fk.Columns {
|
||||||
|
if col, exists := table.Columns[fkCol]; exists {
|
||||||
|
sb.WriteString(w.columnToField(col, table, schema))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate the relation field
|
||||||
|
relationType := fk.ReferencedTable
|
||||||
|
isOptional := false
|
||||||
|
|
||||||
|
// Check if FK column is nullable
|
||||||
|
for _, fkCol := range fk.Columns {
|
||||||
|
if col, exists := table.Columns[fkCol]; exists {
|
||||||
|
if !col.NotNull {
|
||||||
|
isOptional = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
relationName := relationType
|
||||||
|
if strings.HasSuffix(strings.ToLower(relationName), "s") {
|
||||||
|
relationName = relationName[:len(relationName)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s %s", strings.ToLower(relationName), relationType))
|
||||||
|
|
||||||
|
if isOptional {
|
||||||
|
sb.WriteString("?")
|
||||||
|
}
|
||||||
|
|
||||||
|
// @relation attribute
|
||||||
|
relationAttr := w.generateRelationAttribute(fk)
|
||||||
|
if relationAttr != "" {
|
||||||
|
sb.WriteString(" ")
|
||||||
|
sb.WriteString(relationAttr)
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate inverse relations (arrays) for tables that reference this one
|
||||||
|
sb.WriteString(w.generateInverseRelations(table, schema, joinTables))
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateRelationAttribute generates the @relation(...) attribute
|
||||||
|
func (w *Writer) generateRelationAttribute(fk *models.Constraint) string {
|
||||||
|
parts := make([]string, 0)
|
||||||
|
|
||||||
|
// fields
|
||||||
|
fieldsStr := strings.Join(fk.Columns, ", ")
|
||||||
|
parts = append(parts, fmt.Sprintf("fields: [%s]", fieldsStr))
|
||||||
|
|
||||||
|
// references
|
||||||
|
referencesStr := strings.Join(fk.ReferencedColumns, ", ")
|
||||||
|
parts = append(parts, fmt.Sprintf("references: [%s]", referencesStr))
|
||||||
|
|
||||||
|
// onDelete
|
||||||
|
if fk.OnDelete != "" {
|
||||||
|
parts = append(parts, fmt.Sprintf("onDelete: %s", fk.OnDelete))
|
||||||
|
}
|
||||||
|
|
||||||
|
// onUpdate
|
||||||
|
if fk.OnUpdate != "" {
|
||||||
|
parts = append(parts, fmt.Sprintf("onUpdate: %s", fk.OnUpdate))
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("@relation(%s)", strings.Join(parts, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateInverseRelations generates array fields for reverse relationships
|
||||||
|
func (w *Writer) generateInverseRelations(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Find all tables that have FKs pointing to this table
|
||||||
|
for _, otherTable := range schema.Tables {
|
||||||
|
if otherTable.Name == table.Name {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a join table
|
||||||
|
if joinTables[otherTable.Name] {
|
||||||
|
// Handle implicit M2M
|
||||||
|
if w.isJoinTableFor(otherTable, table.Name) {
|
||||||
|
// Find the other side of the M2M
|
||||||
|
for _, fk := range otherTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable != table.Name {
|
||||||
|
// This is the other side
|
||||||
|
otherSide := fk.ReferencedTable
|
||||||
|
sb.WriteString(fmt.Sprintf(" %ss %s[]\n",
|
||||||
|
strings.ToLower(otherSide), otherSide))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regular one-to-many inverse relation
|
||||||
|
for _, fk := range otherTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable == table.Name {
|
||||||
|
// This table is referenced by otherTable
|
||||||
|
pluralName := otherTable.Name
|
||||||
|
if !strings.HasSuffix(pluralName, "s") {
|
||||||
|
pluralName += "s"
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s %s[]\n",
|
||||||
|
strings.ToLower(pluralName), otherTable.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// isJoinTableFor checks if a table is a join table involving the specified model
|
||||||
|
func (w *Writer) isJoinTableFor(joinTable *models.Table, modelName string) bool {
|
||||||
|
for _, fk := range joinTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable == modelName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateBlockAttributes generates block-level attributes like @@id, @@unique, @@index
|
||||||
|
func (w *Writer) generateBlockAttributes(table *models.Table) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// @@id for composite primary key
|
||||||
|
pkCols := make([]string, 0)
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
pkCols = append(pkCols, col.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(pkCols) > 1 {
|
||||||
|
sort.Strings(pkCols)
|
||||||
|
sb.WriteString(fmt.Sprintf(" @@id([%s])\n", strings.Join(pkCols, ", ")))
|
||||||
|
}
|
||||||
|
|
||||||
|
// @@unique for multi-column unique constraints
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint && len(constraint.Columns) > 1 {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @@unique([%s])\n", strings.Join(constraint.Columns, ", ")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @@index for indexes
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
if !index.Unique { // Unique indexes are handled by @@unique
|
||||||
|
sb.WriteString(fmt.Sprintf(" @@index([%s])\n", strings.Join(index.Columns, ", ")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
226
pkg/writers/sqlexec/README.md
Normal file
226
pkg/writers/sqlexec/README.md
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
# SQL Executor Writer
|
||||||
|
|
||||||
|
The SQL Executor Writer (`sqlexec`) executes SQL scripts from `models.Script` objects against a PostgreSQL database. Scripts are executed in order based on Priority (ascending) and Sequence (ascending).
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Ordered Execution**: Scripts execute in Priority→Sequence order
|
||||||
|
- **PostgreSQL Support**: Uses `pgx/v5` driver for robust PostgreSQL connectivity
|
||||||
|
- **Stop on Error**: Execution halts immediately on first error (default behavior)
|
||||||
|
- **Progress Reporting**: Prints execution status to stdout
|
||||||
|
- **Multiple Schemas**: Can execute scripts from multiple schemas in a database
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://user:password@localhost:5432/dbname?sslmode=disable",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute all scripts from database
|
||||||
|
err := writer.WriteDatabase(database)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Execution failed: %v", err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Execute Single Schema
|
||||||
|
|
||||||
|
```go
|
||||||
|
err := writer.WriteSchema(schema)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Schema execution failed: %v", err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Complete Example with SQL Directory Reader
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqldir"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/sqlexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Read SQL scripts from directory
|
||||||
|
reader := sqldir.NewReader(&readers.ReaderOptions{
|
||||||
|
FilePath: "./migrations",
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute scripts against PostgreSQL
|
||||||
|
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/myapp",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := writer.WriteDatabase(db); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Required Metadata
|
||||||
|
|
||||||
|
- **connection_string**: PostgreSQL connection string (required)
|
||||||
|
|
||||||
|
### Connection String Format
|
||||||
|
|
||||||
|
```
|
||||||
|
postgres://[user[:password]@][host][:port][/dbname][?param1=value1&...]
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
postgres://localhost/mydb
|
||||||
|
postgres://user:pass@localhost:5432/mydb?sslmode=disable
|
||||||
|
postgres://user@localhost/mydb?sslmode=require
|
||||||
|
postgresql://user:pass@prod-db.example.com:5432/production
|
||||||
|
```
|
||||||
|
|
||||||
|
## Execution Order
|
||||||
|
|
||||||
|
Scripts are sorted and executed based on:
|
||||||
|
|
||||||
|
1. **Priority** (ascending): Lower priority values execute first
|
||||||
|
2. **Sequence** (ascending): Within same priority, lower sequence values execute first
|
||||||
|
|
||||||
|
### Example Execution Order
|
||||||
|
|
||||||
|
Given these scripts:
|
||||||
|
```
|
||||||
|
Script A: Priority=2, Sequence=1
|
||||||
|
Script B: Priority=1, Sequence=3
|
||||||
|
Script C: Priority=1, Sequence=1
|
||||||
|
Script D: Priority=1, Sequence=2
|
||||||
|
Script E: Priority=3, Sequence=1
|
||||||
|
```
|
||||||
|
|
||||||
|
Execution order: **C → D → B → A → E**
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
The writer prints progress to stdout:
|
||||||
|
|
||||||
|
```
|
||||||
|
Executing script: create_users (Priority=1, Sequence=1)
|
||||||
|
✓ Successfully executed: create_users
|
||||||
|
Executing script: create_posts (Priority=1, Sequence=2)
|
||||||
|
✓ Successfully executed: create_posts
|
||||||
|
Executing script: add_indexes (Priority=2, Sequence=1)
|
||||||
|
✓ Successfully executed: add_indexes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### Connection Errors
|
||||||
|
|
||||||
|
If the database connection fails, execution stops immediately:
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: failed to connect to database: connection refused
|
||||||
|
```
|
||||||
|
|
||||||
|
### Script Execution Errors
|
||||||
|
|
||||||
|
If a script fails, execution stops and returns the error with context:
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: failed to execute script add_indexes (Priority=2, Sequence=1):
|
||||||
|
syntax error at or near "IDNEX"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Behavior**: Stop on first error (scripts executed before the error remain committed)
|
||||||
|
|
||||||
|
### Empty Script Handling
|
||||||
|
|
||||||
|
Scripts with empty SQL content are skipped silently.
|
||||||
|
|
||||||
|
## Database Support
|
||||||
|
|
||||||
|
Currently supports:
|
||||||
|
- ✅ PostgreSQL (via pgx/v5)
|
||||||
|
|
||||||
|
Future support planned for:
|
||||||
|
- MySQL/MariaDB
|
||||||
|
- SQLite
|
||||||
|
- Generic SQL via database/sql
|
||||||
|
|
||||||
|
## Transaction Behavior
|
||||||
|
|
||||||
|
**Current**: Each script executes in its own implicit transaction (PostgreSQL default behavior)
|
||||||
|
|
||||||
|
**Future Enhancement**: Option to wrap all scripts in a single transaction for atomic execution with rollback on error.
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
- Scripts execute sequentially (not in parallel)
|
||||||
|
- Each script creates a database round-trip
|
||||||
|
- For large migrations, consider:
|
||||||
|
- Combining related statements into fewer scripts
|
||||||
|
- Using PostgreSQL's COPY command for bulk data
|
||||||
|
- Running during low-traffic periods
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/writers/sqlexec/
|
||||||
|
```
|
||||||
|
|
||||||
|
Current tests include:
|
||||||
|
- Validation and error handling
|
||||||
|
- Script sorting logic
|
||||||
|
- Configuration validation
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
|
||||||
|
For integration testing with a real database:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start PostgreSQL (example with Docker)
|
||||||
|
docker run -d --name postgres-test \
|
||||||
|
-e POSTGRES_PASSWORD=test \
|
||||||
|
-e POSTGRES_DB=testdb \
|
||||||
|
-p 5432:5432 \
|
||||||
|
postgres:16
|
||||||
|
|
||||||
|
# Run your integration tests
|
||||||
|
go test -tags=integration ./pkg/writers/sqlexec/
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
docker stop postgres-test
|
||||||
|
docker rm postgres-test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- `WriteTable()` is not supported (returns error)
|
||||||
|
- Requires PostgreSQL connection (no offline mode)
|
||||||
|
- No built-in transaction wrapping (yet)
|
||||||
|
- No rollback script support (yet, though `models.Script.Rollback` field exists)
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
- **SQL Directory Reader**: `pkg/readers/sqldir/` - Read scripts from filesystem
|
||||||
|
- **Script Model**: `pkg/models/models.go` - Script structure definition
|
||||||
|
- **pgx Documentation**: https://github.com/jackc/pgx - PostgreSQL driver docs
|
||||||
125
pkg/writers/sqlexec/writer.go
Normal file
125
pkg/writers/sqlexec/writer.go
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
package sqlexec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for executing SQL scripts
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new SQL executor writer
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase executes all scripts from all schemas in the database
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
if db == nil {
|
||||||
|
return fmt.Errorf("database is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get connection string from metadata
|
||||||
|
connString, ok := w.options.Metadata["connection_string"].(string)
|
||||||
|
if !ok || connString == "" {
|
||||||
|
return fmt.Errorf("connection_string is required in writer metadata")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to database
|
||||||
|
ctx := context.Background()
|
||||||
|
conn, err := pgx.Connect(ctx, connString)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to connect to database: %w", err)
|
||||||
|
}
|
||||||
|
defer conn.Close(ctx)
|
||||||
|
|
||||||
|
// Execute scripts from all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if err := w.executeScripts(ctx, conn, schema.Scripts); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute scripts from schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema executes all scripts from a single schema
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
if schema == nil {
|
||||||
|
return fmt.Errorf("schema is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get connection string from metadata
|
||||||
|
connString, ok := w.options.Metadata["connection_string"].(string)
|
||||||
|
if !ok || connString == "" {
|
||||||
|
return fmt.Errorf("connection_string is required in writer metadata")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to database
|
||||||
|
ctx := context.Background()
|
||||||
|
conn, err := pgx.Connect(ctx, connString)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to connect to database: %w", err)
|
||||||
|
}
|
||||||
|
defer conn.Close(ctx)
|
||||||
|
|
||||||
|
// Execute scripts
|
||||||
|
if err := w.executeScripts(ctx, conn, schema.Scripts); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute scripts: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable is not applicable for SQL script execution
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
return fmt.Errorf("WriteTable is not supported for SQL script execution")
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeScripts executes scripts in Priority then Sequence order
|
||||||
|
func (w *Writer) executeScripts(ctx context.Context, conn *pgx.Conn, scripts []*models.Script) error {
|
||||||
|
if len(scripts) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort scripts by Priority (ascending) then Sequence (ascending)
|
||||||
|
sortedScripts := make([]*models.Script, len(scripts))
|
||||||
|
copy(sortedScripts, scripts)
|
||||||
|
sort.Slice(sortedScripts, func(i, j int) bool {
|
||||||
|
if sortedScripts[i].Priority != sortedScripts[j].Priority {
|
||||||
|
return sortedScripts[i].Priority < sortedScripts[j].Priority
|
||||||
|
}
|
||||||
|
return sortedScripts[i].Sequence < sortedScripts[j].Sequence
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute each script in order
|
||||||
|
for _, script := range sortedScripts {
|
||||||
|
if script.SQL == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Executing script: %s (Priority=%d, Sequence=%d)\n",
|
||||||
|
script.Name, script.Priority, script.Sequence)
|
||||||
|
|
||||||
|
// Execute the SQL script
|
||||||
|
_, err := conn.Exec(ctx, script.SQL)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to execute script %s (Priority=%d, Sequence=%d): %w",
|
||||||
|
script.Name, script.Priority, script.Sequence, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("✓ Successfully executed: %s\n", script.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
201
pkg/writers/sqlexec/writer_test.go
Normal file
201
pkg/writers/sqlexec/writer_test.go
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
package sqlexec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewWriter(t *testing.T) {
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/test",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
if writer == nil {
|
||||||
|
t.Fatal("Expected non-nil writer")
|
||||||
|
}
|
||||||
|
if writer.options != opts {
|
||||||
|
t.Error("Writer options not set correctly")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_NilDatabase(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/test",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
err := writer.WriteDatabase(nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for nil database, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_MissingConnectionString(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{},
|
||||||
|
})
|
||||||
|
|
||||||
|
db := &models.Database{
|
||||||
|
Name: "test",
|
||||||
|
Schemas: []*models.Schema{
|
||||||
|
{
|
||||||
|
Name: "public",
|
||||||
|
Scripts: []*models.Script{
|
||||||
|
{Name: "test", SQL: "SELECT 1;"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for missing connection_string, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteSchema_NilSchema(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://localhost/test",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
err := writer.WriteSchema(nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for nil schema, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteSchema_MissingConnectionString(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{},
|
||||||
|
})
|
||||||
|
|
||||||
|
schema := &models.Schema{
|
||||||
|
Name: "public",
|
||||||
|
Scripts: []*models.Script{
|
||||||
|
{Name: "test", SQL: "SELECT 1;"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := writer.WriteSchema(schema)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for missing connection_string, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteTable(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{})
|
||||||
|
|
||||||
|
err := writer.WriteTable(&models.Table{})
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for WriteTable (not supported), got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestScriptSorting verifies that scripts are sorted correctly by Priority then Sequence
|
||||||
|
func TestScriptSorting(t *testing.T) {
|
||||||
|
scripts := []*models.Script{
|
||||||
|
{Name: "script1", Priority: 2, Sequence: 1, SQL: "SELECT 1;"},
|
||||||
|
{Name: "script2", Priority: 1, Sequence: 3, SQL: "SELECT 2;"},
|
||||||
|
{Name: "script3", Priority: 1, Sequence: 1, SQL: "SELECT 3;"},
|
||||||
|
{Name: "script4", Priority: 1, Sequence: 2, SQL: "SELECT 4;"},
|
||||||
|
{Name: "script5", Priority: 3, Sequence: 1, SQL: "SELECT 5;"},
|
||||||
|
{Name: "script6", Priority: 2, Sequence: 2, SQL: "SELECT 6;"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a copy and sort it using the same logic as executeScripts
|
||||||
|
sortedScripts := make([]*models.Script, len(scripts))
|
||||||
|
copy(sortedScripts, scripts)
|
||||||
|
|
||||||
|
// Use the same sorting logic from executeScripts
|
||||||
|
for i := 0; i < len(sortedScripts)-1; i++ {
|
||||||
|
for j := i + 1; j < len(sortedScripts); j++ {
|
||||||
|
if sortedScripts[i].Priority > sortedScripts[j].Priority ||
|
||||||
|
(sortedScripts[i].Priority == sortedScripts[j].Priority &&
|
||||||
|
sortedScripts[i].Sequence > sortedScripts[j].Sequence) {
|
||||||
|
sortedScripts[i], sortedScripts[j] = sortedScripts[j], sortedScripts[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expected order after sorting
|
||||||
|
expectedOrder := []string{
|
||||||
|
"script3", // Priority 1, Sequence 1
|
||||||
|
"script4", // Priority 1, Sequence 2
|
||||||
|
"script2", // Priority 1, Sequence 3
|
||||||
|
"script1", // Priority 2, Sequence 1
|
||||||
|
"script6", // Priority 2, Sequence 2
|
||||||
|
"script5", // Priority 3, Sequence 1
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, expected := range expectedOrder {
|
||||||
|
if sortedScripts[i].Name != expected {
|
||||||
|
t.Errorf("Position %d: expected %s, got %s", i, expected, sortedScripts[i].Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify priorities are ascending
|
||||||
|
for i := 0; i < len(sortedScripts)-1; i++ {
|
||||||
|
if sortedScripts[i].Priority > sortedScripts[i+1].Priority {
|
||||||
|
t.Errorf("Priority not ascending at position %d: %d > %d",
|
||||||
|
i, sortedScripts[i].Priority, sortedScripts[i+1].Priority)
|
||||||
|
}
|
||||||
|
// Within same priority, sequences should be ascending
|
||||||
|
if sortedScripts[i].Priority == sortedScripts[i+1].Priority &&
|
||||||
|
sortedScripts[i].Sequence > sortedScripts[i+1].Sequence {
|
||||||
|
t.Errorf("Sequence not ascending at position %d with same priority %d: %d > %d",
|
||||||
|
i, sortedScripts[i].Priority, sortedScripts[i].Sequence, sortedScripts[i+1].Sequence)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteSchema_EmptyScripts(t *testing.T) {
|
||||||
|
// This test verifies that writing an empty script list doesn't cause errors
|
||||||
|
// even without a database connection (should return early)
|
||||||
|
writer := NewWriter(&writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"connection_string": "postgres://invalid/test",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
schema := &models.Schema{
|
||||||
|
Name: "public",
|
||||||
|
Scripts: []*models.Script{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: This will try to connect even with empty scripts
|
||||||
|
// In a real scenario, the executeScripts function returns early for empty scripts
|
||||||
|
// but the connection is made before that. This test documents the behavior.
|
||||||
|
err := writer.WriteSchema(schema)
|
||||||
|
// We expect a connection error since we're using an invalid connection string
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected connection error, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: Integration tests for actual database execution should be added separately
|
||||||
|
// Those tests would require:
|
||||||
|
// 1. A running PostgreSQL instance
|
||||||
|
// 2. Test database setup/teardown
|
||||||
|
// 3. Verification of actual script execution
|
||||||
|
// 4. Testing error handling during execution
|
||||||
|
// 5. Testing transaction behavior if added
|
||||||
|
//
|
||||||
|
// Example integration test structure:
|
||||||
|
// func TestWriter_Integration_ExecuteScripts(t *testing.T) {
|
||||||
|
// if testing.Short() {
|
||||||
|
// t.Skip("Skipping integration test")
|
||||||
|
// }
|
||||||
|
// // Setup test database
|
||||||
|
// // Create test scripts
|
||||||
|
// // Execute scripts
|
||||||
|
// // Verify results
|
||||||
|
// // Cleanup
|
||||||
|
// }
|
||||||
169
pkg/writers/typeorm/README.md
Normal file
169
pkg/writers/typeorm/README.md
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
# TypeORM Writer
|
||||||
|
|
||||||
|
Generates TypeScript files with TypeORM entity definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The TypeORM Writer converts RelSpec's internal database model representation into TypeScript source code with TypeORM entity classes, including proper decorators, relationships, and column configurations.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates TypeORM-compatible TypeScript entities
|
||||||
|
- Creates proper decorator usage (@Entity, @Column, etc.)
|
||||||
|
- Adds relationship decorators (@OneToMany, @ManyToOne, @JoinColumn)
|
||||||
|
- Handles column types and options
|
||||||
|
- Supports constraints and indexes
|
||||||
|
- Outputs formatted TypeScript code
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "entities/",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := typeorm.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate TypeORM entities from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output typeorm \
|
||||||
|
--out-file entities/
|
||||||
|
|
||||||
|
# Convert GORM models to TypeORM
|
||||||
|
relspec --input gorm --in-file models.go --output typeorm --out-file src/entities/
|
||||||
|
|
||||||
|
# Convert JSON to TypeORM entities
|
||||||
|
relspec --input json --in-file schema.json --output typeorm --out-file entities/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
Entity,
|
||||||
|
PrimaryGeneratedColumn,
|
||||||
|
Column,
|
||||||
|
CreateDateColumn,
|
||||||
|
OneToMany,
|
||||||
|
ManyToOne,
|
||||||
|
JoinColumn,
|
||||||
|
Index,
|
||||||
|
} from 'typeorm';
|
||||||
|
import { Post } from './Post';
|
||||||
|
|
||||||
|
@Entity('users')
|
||||||
|
export class User {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 50, unique: true })
|
||||||
|
@Index()
|
||||||
|
username: string;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 100 })
|
||||||
|
email: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text', nullable: true })
|
||||||
|
bio: string | null;
|
||||||
|
|
||||||
|
@CreateDateColumn({ name: 'created_at' })
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@OneToMany(() => Post, (post) => post.user)
|
||||||
|
posts: Post[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity('posts')
|
||||||
|
export class Post {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ name: 'user_id' })
|
||||||
|
userId: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 200 })
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text', nullable: true })
|
||||||
|
content: string | null;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, (user) => user.posts, { onDelete: 'CASCADE' })
|
||||||
|
@JoinColumn({ name: 'user_id' })
|
||||||
|
user: User;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported TypeORM Decorators
|
||||||
|
|
||||||
|
### Entity Decorators
|
||||||
|
- `@Entity()` - Define entity/table
|
||||||
|
- `@PrimaryGeneratedColumn()` - Auto-increment primary key
|
||||||
|
- `@PrimaryColumn()` - Primary key
|
||||||
|
- `@Column()` - Column definition
|
||||||
|
- `@CreateDateColumn()` - Auto-set creation timestamp
|
||||||
|
- `@UpdateDateColumn()` - Auto-update timestamp
|
||||||
|
|
||||||
|
### Relationship Decorators
|
||||||
|
- `@OneToMany()` - One-to-many relationship
|
||||||
|
- `@ManyToOne()` - Many-to-one relationship
|
||||||
|
- `@JoinColumn()` - Foreign key column specification
|
||||||
|
|
||||||
|
### Constraint Decorators
|
||||||
|
- `@Index()` - Create index
|
||||||
|
- `@Unique()` - Unique constraint
|
||||||
|
|
||||||
|
## Column Options
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Column({
|
||||||
|
type: 'varchar', // Column type
|
||||||
|
length: 255, // Length for varchar/char
|
||||||
|
nullable: true, // Allow NULL
|
||||||
|
unique: true, // Unique constraint
|
||||||
|
default: 'value', // Default value
|
||||||
|
name: 'column_name', // Database column name
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | TypeScript Type | TypeORM Type |
|
||||||
|
|----------|-----------------|--------------|
|
||||||
|
| bigint | number | 'bigint' |
|
||||||
|
| integer | number | 'int' |
|
||||||
|
| varchar | string | 'varchar' |
|
||||||
|
| text | string | 'text' |
|
||||||
|
| boolean | boolean | 'boolean' |
|
||||||
|
| timestamp | Date | 'timestamp' |
|
||||||
|
| json | object | 'json' |
|
||||||
|
| uuid | string | 'uuid' |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Entity class names are PascalCase
|
||||||
|
- One file per entity (named after the entity)
|
||||||
|
- Relationship imports are auto-generated
|
||||||
|
- Nullable columns use TypeScript union with `null`
|
||||||
|
- Foreign key actions (CASCADE, etc.) are included
|
||||||
|
- Schema names can be specified in `@Entity()` decorator
|
||||||
631
pkg/writers/typeorm/writer.go
Normal file
631
pkg/writers/typeorm/writer.go
Normal file
@@ -0,0 +1,631 @@
|
|||||||
|
package typeorm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for TypeORM entity format
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new TypeORM writer with the given options
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase writes a Database model to TypeORM entity format
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
content := w.databaseToTypeORM(db)
|
||||||
|
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema writes a Schema model to TypeORM entity format
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable writes a Table model to TypeORM entity format
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
return w.WriteSchema(schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// databaseToTypeORM converts a Database to TypeORM entity format string
|
||||||
|
func (w *Writer) databaseToTypeORM(db *models.Database) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate imports
|
||||||
|
sb.WriteString(w.generateImports(db))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Process all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Identify join tables
|
||||||
|
joinTables := w.identifyJoinTables(schema)
|
||||||
|
|
||||||
|
// Write entities (excluding join tables)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if joinTables[table.Name] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
sb.WriteString(w.tableToEntity(table, schema, joinTables))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write view entities
|
||||||
|
for _, view := range schema.Views {
|
||||||
|
sb.WriteString(w.viewToEntity(view))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateImports generates the TypeORM import statement
|
||||||
|
func (w *Writer) generateImports(db *models.Database) string {
|
||||||
|
imports := make([]string, 0)
|
||||||
|
|
||||||
|
// Always include basic decorators
|
||||||
|
imports = append(imports, "Entity", "PrimaryGeneratedColumn", "Column")
|
||||||
|
|
||||||
|
// Check if we need relation decorators
|
||||||
|
needsManyToOne := false
|
||||||
|
needsOneToMany := false
|
||||||
|
needsManyToMany := false
|
||||||
|
needsJoinTable := false
|
||||||
|
needsCreateDate := false
|
||||||
|
needsUpdateDate := false
|
||||||
|
needsViewEntity := false
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Check for views
|
||||||
|
if len(schema.Views) > 0 {
|
||||||
|
needsViewEntity = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check for timestamp columns
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.Default == "now()" {
|
||||||
|
needsCreateDate = true
|
||||||
|
}
|
||||||
|
if strings.Contains(col.Comment, "auto-update") {
|
||||||
|
needsUpdateDate = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for relations
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
needsManyToOne = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OneToMany is the inverse of ManyToOne
|
||||||
|
if needsManyToOne {
|
||||||
|
needsOneToMany = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for M2M (join tables indicate M2M relations)
|
||||||
|
joinTables := make(map[string]bool)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
jt := w.identifyJoinTables(schema)
|
||||||
|
for name := range jt {
|
||||||
|
joinTables[name] = true
|
||||||
|
needsManyToMany = true
|
||||||
|
needsJoinTable = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if needsManyToOne {
|
||||||
|
imports = append(imports, "ManyToOne")
|
||||||
|
}
|
||||||
|
if needsOneToMany {
|
||||||
|
imports = append(imports, "OneToMany")
|
||||||
|
}
|
||||||
|
if needsManyToMany {
|
||||||
|
imports = append(imports, "ManyToMany")
|
||||||
|
}
|
||||||
|
if needsJoinTable {
|
||||||
|
imports = append(imports, "JoinTable")
|
||||||
|
}
|
||||||
|
if needsCreateDate {
|
||||||
|
imports = append(imports, "CreateDateColumn")
|
||||||
|
}
|
||||||
|
if needsUpdateDate {
|
||||||
|
imports = append(imports, "UpdateDateColumn")
|
||||||
|
}
|
||||||
|
if needsViewEntity {
|
||||||
|
imports = append(imports, "ViewEntity")
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("import { %s } from 'typeorm';\n", strings.Join(imports, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// identifyJoinTables identifies tables that are join tables for M2M relations
|
||||||
|
func (w *Writer) identifyJoinTables(schema *models.Schema) map[string]bool {
|
||||||
|
joinTables := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check if this is a join table:
|
||||||
|
// 1. Has exactly 2 FK constraints
|
||||||
|
// 2. Has composite PK with those 2 columns
|
||||||
|
// 3. Has no other columns except the FK columns
|
||||||
|
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
if len(fks) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if columns are only the FK columns
|
||||||
|
if len(table.Columns) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if both FK columns are part of PK
|
||||||
|
pkCols := 0
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
pkCols++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkCols == 2 {
|
||||||
|
joinTables[table.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return joinTables
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableToEntity converts a Table to a TypeORM entity class
|
||||||
|
func (w *Writer) tableToEntity(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate @Entity decorator with options
|
||||||
|
entityOptions := w.buildEntityOptions(table)
|
||||||
|
sb.WriteString(fmt.Sprintf("@Entity({\n%s\n})\n", entityOptions))
|
||||||
|
|
||||||
|
// Get class name (from metadata if different from table name)
|
||||||
|
className := table.Name
|
||||||
|
if table.Metadata != nil {
|
||||||
|
if classNameVal, ok := table.Metadata["class_name"]; ok {
|
||||||
|
if classNameStr, ok := classNameVal.(string); ok {
|
||||||
|
className = classNameStr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("export class %s {\n", className))
|
||||||
|
|
||||||
|
// Collect and sort columns
|
||||||
|
columns := make([]*models.Column, 0, len(table.Columns))
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
// Skip FK columns (they'll be represented as relations)
|
||||||
|
if w.isForeignKeyColumn(col, table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(columns, func(i, j int) bool {
|
||||||
|
// Put PK first, then alphabetical
|
||||||
|
if columns[i].IsPrimaryKey && !columns[j].IsPrimaryKey {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if !columns[i].IsPrimaryKey && columns[j].IsPrimaryKey {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
})
|
||||||
|
|
||||||
|
// Write scalar fields
|
||||||
|
for _, col := range columns {
|
||||||
|
sb.WriteString(w.columnToField(col, table))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write relation fields
|
||||||
|
sb.WriteString(w.generateRelationFields(table, schema, joinTables))
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// viewToEntity converts a View to a TypeORM @ViewEntity class
|
||||||
|
func (w *Writer) viewToEntity(view *models.View) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate @ViewEntity decorator with expression
|
||||||
|
sb.WriteString("@ViewEntity({\n")
|
||||||
|
if view.Definition != "" {
|
||||||
|
// Format the SQL expression with proper indentation
|
||||||
|
sb.WriteString(" expression: `\n")
|
||||||
|
sb.WriteString(" ")
|
||||||
|
sb.WriteString(view.Definition)
|
||||||
|
sb.WriteString("\n `,\n")
|
||||||
|
}
|
||||||
|
sb.WriteString("})\n")
|
||||||
|
|
||||||
|
// Generate class
|
||||||
|
sb.WriteString(fmt.Sprintf("export class %s {\n", view.Name))
|
||||||
|
|
||||||
|
// Generate field definitions (without decorators for view fields)
|
||||||
|
columns := make([]*models.Column, 0, len(view.Columns))
|
||||||
|
for _, col := range view.Columns {
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
sort.Slice(columns, func(i, j int) bool {
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, col := range columns {
|
||||||
|
tsType := w.sqlTypeToTypeScript(col.Type)
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s: %s;\n", col.Name, tsType))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// columnToField converts a Column to a TypeORM field
|
||||||
|
func (w *Writer) columnToField(col *models.Column, table *models.Table) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate decorator
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
if col.AutoIncrement {
|
||||||
|
sb.WriteString(" @PrimaryGeneratedColumn('increment')\n")
|
||||||
|
} else if col.Type == "uuid" || strings.Contains(fmt.Sprint(col.Default), "uuid") {
|
||||||
|
sb.WriteString(" @PrimaryGeneratedColumn('uuid')\n")
|
||||||
|
} else {
|
||||||
|
sb.WriteString(" @PrimaryGeneratedColumn()\n")
|
||||||
|
}
|
||||||
|
} else if col.Default == "now()" {
|
||||||
|
sb.WriteString(" @CreateDateColumn()\n")
|
||||||
|
} else if strings.Contains(col.Comment, "auto-update") {
|
||||||
|
sb.WriteString(" @UpdateDateColumn()\n")
|
||||||
|
} else {
|
||||||
|
// Regular @Column decorator
|
||||||
|
options := w.buildColumnOptions(col, table)
|
||||||
|
if options != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @Column({ %s })\n", options))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(" @Column()\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate field declaration
|
||||||
|
tsType := w.sqlTypeToTypeScript(col.Type)
|
||||||
|
nullable := ""
|
||||||
|
if !col.NotNull {
|
||||||
|
nullable = " | null"
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s: %s%s;", col.Name, tsType, nullable))
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildColumnOptions builds the options object for @Column decorator
|
||||||
|
func (w *Writer) buildColumnOptions(col *models.Column, table *models.Table) string {
|
||||||
|
options := make([]string, 0)
|
||||||
|
|
||||||
|
// Type (if not default)
|
||||||
|
if w.needsExplicitType(col.Type) {
|
||||||
|
options = append(options, fmt.Sprintf("type: '%s'", col.Type))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Nullable
|
||||||
|
if !col.NotNull {
|
||||||
|
options = append(options, "nullable: true")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique
|
||||||
|
if w.hasUniqueConstraint(col.Name, table) {
|
||||||
|
options = append(options, "unique: true")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default
|
||||||
|
if col.Default != nil && col.Default != "now()" {
|
||||||
|
defaultStr := fmt.Sprint(col.Default)
|
||||||
|
if defaultStr != "" {
|
||||||
|
options = append(options, fmt.Sprintf("default: '%s'", defaultStr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(options, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// needsExplicitType checks if a SQL type needs explicit type declaration
|
||||||
|
func (w *Writer) needsExplicitType(sqlType string) bool {
|
||||||
|
// Types that don't map cleanly to TypeScript types need explicit declaration
|
||||||
|
explicitTypes := []string{"text", "uuid", "jsonb", "bigint"}
|
||||||
|
for _, t := range explicitTypes {
|
||||||
|
if strings.Contains(sqlType, t) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasUniqueConstraint checks if a column has a unique constraint
|
||||||
|
func (w *Writer) hasUniqueConstraint(colName string, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint &&
|
||||||
|
len(constraint.Columns) == 1 &&
|
||||||
|
constraint.Columns[0] == colName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// sqlTypeToTypeScript converts SQL types to TypeScript types
|
||||||
|
func (w *Writer) sqlTypeToTypeScript(sqlType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"character varying": "string",
|
||||||
|
"char": "string",
|
||||||
|
"uuid": "string",
|
||||||
|
"boolean": "boolean",
|
||||||
|
"bool": "boolean",
|
||||||
|
"integer": "number",
|
||||||
|
"int": "number",
|
||||||
|
"bigint": "number",
|
||||||
|
"double precision": "number",
|
||||||
|
"float": "number",
|
||||||
|
"decimal": "number",
|
||||||
|
"numeric": "number",
|
||||||
|
"timestamp": "Date",
|
||||||
|
"timestamptz": "Date",
|
||||||
|
"date": "Date",
|
||||||
|
"jsonb": "any",
|
||||||
|
"json": "any",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlPattern, tsType := range typeMap {
|
||||||
|
if strings.Contains(strings.ToLower(sqlType), sqlPattern) {
|
||||||
|
return tsType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "any"
|
||||||
|
}
|
||||||
|
|
||||||
|
// isForeignKeyColumn checks if a column is a FK column
|
||||||
|
func (w *Writer) isForeignKeyColumn(col *models.Column, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == col.Name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateRelationFields generates relation fields for a table
|
||||||
|
func (w *Writer) generateRelationFields(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Get all FK constraints
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
|
||||||
|
// Generate @ManyToOne fields
|
||||||
|
for _, fk := range fks {
|
||||||
|
relatedTable := fk.ReferencedTable
|
||||||
|
fieldName := strings.ToLower(relatedTable)
|
||||||
|
|
||||||
|
// Determine if nullable
|
||||||
|
isNullable := false
|
||||||
|
for _, fkCol := range fk.Columns {
|
||||||
|
if col, exists := table.Columns[fkCol]; exists {
|
||||||
|
if !col.NotNull {
|
||||||
|
isNullable = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nullable := ""
|
||||||
|
if isNullable {
|
||||||
|
nullable = " | null"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find inverse field name if possible
|
||||||
|
inverseField := w.findInverseFieldName(table.Name, relatedTable, schema)
|
||||||
|
|
||||||
|
if inverseField != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s, %s => %s.%s)\n",
|
||||||
|
relatedTable, strings.ToLower(relatedTable), strings.ToLower(relatedTable), inverseField))
|
||||||
|
} else {
|
||||||
|
if isNullable {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s, { nullable: true })\n", relatedTable))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s)\n", relatedTable))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s: %s%s;\n", fieldName, relatedTable, nullable))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate @OneToMany fields (inverse of FKs pointing to this table)
|
||||||
|
w.generateInverseRelations(table, schema, joinTables, &sb)
|
||||||
|
|
||||||
|
// Generate @ManyToMany fields
|
||||||
|
w.generateManyToManyRelations(table, schema, joinTables, &sb)
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// findInverseFieldName finds the inverse field name for a relation
|
||||||
|
func (w *Writer) findInverseFieldName(fromTable, toTable string, schema *models.Schema) string {
|
||||||
|
// Look for tables that have FKs pointing back to fromTable
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name != toTable {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint && constraint.ReferencedTable == fromTable {
|
||||||
|
// Found an inverse relation
|
||||||
|
// Use pluralized form of fromTable
|
||||||
|
return w.pluralize(strings.ToLower(fromTable))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateInverseRelations generates @OneToMany fields
|
||||||
|
func (w *Writer) generateInverseRelations(table *models.Table, schema *models.Schema, joinTables map[string]bool, sb *strings.Builder) {
|
||||||
|
for _, otherTable := range schema.Tables {
|
||||||
|
if otherTable.Name == table.Name || joinTables[otherTable.Name] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fk := range otherTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable == table.Name {
|
||||||
|
// This table is referenced by otherTable
|
||||||
|
fieldName := w.pluralize(strings.ToLower(otherTable.Name))
|
||||||
|
inverseName := strings.ToLower(table.Name)
|
||||||
|
|
||||||
|
fmt.Fprintf(sb, " @OneToMany(() => %s, %s => %s.%s)\n",
|
||||||
|
otherTable.Name, strings.ToLower(otherTable.Name), strings.ToLower(otherTable.Name), inverseName)
|
||||||
|
fmt.Fprintf(sb, " %s: %s[];\n", fieldName, otherTable.Name)
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateManyToManyRelations generates @ManyToMany fields
|
||||||
|
func (w *Writer) generateManyToManyRelations(table *models.Table, schema *models.Schema, joinTables map[string]bool, sb *strings.Builder) {
|
||||||
|
for joinTableName := range joinTables {
|
||||||
|
joinTable := w.findTable(joinTableName, schema)
|
||||||
|
if joinTable == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fks := joinTable.GetForeignKeys()
|
||||||
|
if len(fks) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this table is part of the M2M relation
|
||||||
|
var thisTableFK *models.Constraint
|
||||||
|
var otherTableFK *models.Constraint
|
||||||
|
|
||||||
|
for i, fk := range fks {
|
||||||
|
if fk.ReferencedTable == table.Name {
|
||||||
|
thisTableFK = fk
|
||||||
|
if i == 0 {
|
||||||
|
otherTableFK = fks[1]
|
||||||
|
} else {
|
||||||
|
otherTableFK = fks[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if thisTableFK == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which side owns the relation (has @JoinTable)
|
||||||
|
// We'll make the first entity alphabetically the owner
|
||||||
|
isOwner := table.Name < otherTableFK.ReferencedTable
|
||||||
|
|
||||||
|
otherTable := otherTableFK.ReferencedTable
|
||||||
|
fieldName := w.pluralize(strings.ToLower(otherTable))
|
||||||
|
inverseName := w.pluralize(strings.ToLower(table.Name))
|
||||||
|
|
||||||
|
if isOwner {
|
||||||
|
fmt.Fprintf(sb, " @ManyToMany(() => %s, %s => %s.%s)\n",
|
||||||
|
otherTable, strings.ToLower(otherTable), strings.ToLower(otherTable), inverseName)
|
||||||
|
sb.WriteString(" @JoinTable()\n")
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(sb, " @ManyToMany(() => %s, %s => %s.%s)\n",
|
||||||
|
otherTable, strings.ToLower(otherTable), strings.ToLower(otherTable), inverseName)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(sb, " %s: %s[];\n", fieldName, otherTable)
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// findTable finds a table by name in a schema
|
||||||
|
func (w *Writer) findTable(name string, schema *models.Schema) *models.Table {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == name {
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildEntityOptions builds the options object for @Entity decorator
|
||||||
|
func (w *Writer) buildEntityOptions(table *models.Table) string {
|
||||||
|
options := make([]string, 0)
|
||||||
|
|
||||||
|
// Always include table name
|
||||||
|
options = append(options, fmt.Sprintf(" name: \"%s\"", table.Name))
|
||||||
|
|
||||||
|
// Always include schema
|
||||||
|
options = append(options, fmt.Sprintf(" schema: \"%s\"", table.Schema))
|
||||||
|
|
||||||
|
// Database name from metadata
|
||||||
|
if table.Metadata != nil {
|
||||||
|
if database, ok := table.Metadata["database"]; ok {
|
||||||
|
if databaseStr, ok := database.(string); ok {
|
||||||
|
options = append(options, fmt.Sprintf(" database: \"%s\"", databaseStr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Engine from metadata
|
||||||
|
if engine, ok := table.Metadata["engine"]; ok {
|
||||||
|
if engineStr, ok := engine.(string); ok {
|
||||||
|
options = append(options, fmt.Sprintf(" engine: \"%s\"", engineStr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(options, ",\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// pluralize adds 's' to make a word plural (simple version)
|
||||||
|
func (w *Writer) pluralize(word string) string {
|
||||||
|
if strings.HasSuffix(word, "s") {
|
||||||
|
return word
|
||||||
|
}
|
||||||
|
return word + "s"
|
||||||
|
}
|
||||||
212
pkg/writers/yaml/README.md
Normal file
212
pkg/writers/yaml/README.md
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
# YAML Writer
|
||||||
|
|
||||||
|
Generates database schema definitions in YAML format.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The YAML Writer converts RelSpec's internal database model representation into YAML format, providing a human-readable, structured representation of the database schema.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates RelSpec's canonical YAML schema format
|
||||||
|
- Human-readable alternative to JSON
|
||||||
|
- Complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
- Supports comments
|
||||||
|
- Ideal for manual editing and configuration
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.yaml",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := yaml.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Export PostgreSQL database to YAML
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output yaml \
|
||||||
|
--out-file schema.yaml
|
||||||
|
|
||||||
|
# Convert GORM models to YAML
|
||||||
|
relspec --input gorm --in-file models.go --output yaml --out-file schema.yaml
|
||||||
|
|
||||||
|
# Convert JSON to YAML
|
||||||
|
relspec --input json --in-file schema.json --output yaml --out-file schema.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated YAML Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: myapp
|
||||||
|
database_type: postgresql
|
||||||
|
source_format: pgsql
|
||||||
|
schemas:
|
||||||
|
- name: public
|
||||||
|
tables:
|
||||||
|
- name: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
auto_increment: true
|
||||||
|
sequence: 1
|
||||||
|
username:
|
||||||
|
name: username
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
type: varchar
|
||||||
|
length: 50
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
email:
|
||||||
|
name: email
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
type: varchar
|
||||||
|
length: 100
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
constraints:
|
||||||
|
pk_users:
|
||||||
|
name: pk_users
|
||||||
|
type: PRIMARY KEY
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- id
|
||||||
|
uq_users_username:
|
||||||
|
name: uq_users_username
|
||||||
|
type: UNIQUE
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- username
|
||||||
|
indexes:
|
||||||
|
idx_users_email:
|
||||||
|
name: idx_users_email
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- email
|
||||||
|
unique: false
|
||||||
|
type: btree
|
||||||
|
|
||||||
|
- name: posts
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
sequence: 1
|
||||||
|
user_id:
|
||||||
|
name: user_id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
title:
|
||||||
|
name: title
|
||||||
|
type: varchar
|
||||||
|
length: 200
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
content:
|
||||||
|
name: content
|
||||||
|
type: text
|
||||||
|
not_null: false
|
||||||
|
sequence: 4
|
||||||
|
constraints:
|
||||||
|
fk_posts_user_id:
|
||||||
|
name: fk_posts_user_id
|
||||||
|
type: FOREIGN KEY
|
||||||
|
table: posts
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- user_id
|
||||||
|
referenced_table: users
|
||||||
|
referenced_schema: public
|
||||||
|
referenced_columns:
|
||||||
|
- id
|
||||||
|
on_delete: CASCADE
|
||||||
|
on_update: NO ACTION
|
||||||
|
indexes:
|
||||||
|
idx_posts_user_id:
|
||||||
|
name: idx_posts_user_id
|
||||||
|
columns:
|
||||||
|
- user_id
|
||||||
|
unique: false
|
||||||
|
type: btree
|
||||||
|
views: []
|
||||||
|
sequences: []
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The YAML format mirrors the JSON structure with human-readable syntax:
|
||||||
|
|
||||||
|
- Database level: `name`, `database_type`, `source_format`, `schemas`
|
||||||
|
- Schema level: `name`, `tables`, `views`, `sequences`
|
||||||
|
- Table level: `name`, `schema`, `columns`, `constraints`, `indexes`
|
||||||
|
- Column level: `name`, `type`, `length`, `not_null`, etc.
|
||||||
|
- Constraint level: `name`, `type`, `columns`, foreign key details
|
||||||
|
- Index level: `name`, `columns`, `unique`, `type`
|
||||||
|
|
||||||
|
## Advantages Over JSON
|
||||||
|
|
||||||
|
- More human-readable
|
||||||
|
- Easier to edit manually
|
||||||
|
- Supports comments
|
||||||
|
- Less verbose (no braces/brackets)
|
||||||
|
- Better for configuration files
|
||||||
|
- Natural indentation
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
- **Configuration** - Schema as configuration
|
||||||
|
- **Documentation** - Human-readable schema docs
|
||||||
|
- **Version Control** - Easier to read diffs
|
||||||
|
- **Manual Editing** - Easier to modify by hand
|
||||||
|
- **Code Generation** - Template-friendly format
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Output is properly indented (2 spaces)
|
||||||
|
- Preserves all schema metadata
|
||||||
|
- Can be round-tripped with YAML reader
|
||||||
|
- Compatible with YAML 1.2
|
||||||
|
- More readable than JSON for large schemas
|
||||||
|
- Ideal for documentation and manual workflows
|
||||||
60
tests/assets/bun/complex.go
Normal file
60
tests/assets/bun/complex.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ModelUser represents a user in the system
|
||||||
|
type ModelUser struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
Username string `bun:"username,notnull,type:varchar(100),unique:idx_username"`
|
||||||
|
Email string `bun:"email,notnull,type:varchar(255),unique"`
|
||||||
|
Password string `bun:"password,notnull,type:varchar(255)"`
|
||||||
|
FirstName *string `bun:"first_name,type:varchar(100)"`
|
||||||
|
LastName *string `bun:"last_name,type:varchar(100)"`
|
||||||
|
Bio *string `bun:"bio,type:text"`
|
||||||
|
IsActive bool `bun:"is_active,type:boolean"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,type:timestamp"`
|
||||||
|
|
||||||
|
Posts []*ModelPost `bun:"rel:has-many,join:id=user_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelPost represents a blog post
|
||||||
|
type ModelPost struct {
|
||||||
|
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
UserID int64 `bun:"user_id,notnull,type:bigint"`
|
||||||
|
Title string `bun:"title,notnull,type:varchar(255)"`
|
||||||
|
Slug string `bun:"slug,notnull,type:varchar(255),unique:idx_slug"`
|
||||||
|
Content string `bun:"content,notnull,type:text"`
|
||||||
|
Excerpt *string `bun:"excerpt,type:text"`
|
||||||
|
Published bool `bun:"published,type:boolean"`
|
||||||
|
ViewCount int64 `bun:"view_count,type:bigint"`
|
||||||
|
PublishedAt *time.Time `bun:"published_at,type:timestamp,nullzero"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,type:timestamp"`
|
||||||
|
|
||||||
|
User *ModelUser `bun:"rel:belongs-to,join:user_id=id"`
|
||||||
|
Comments []*ModelComment `bun:"rel:has-many,join:id=post_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelComment represents a comment on a post
|
||||||
|
type ModelComment struct {
|
||||||
|
bun.BaseModel `bun:"table:comments,alias:c"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
PostID int64 `bun:"post_id,notnull,type:bigint"`
|
||||||
|
UserID *int64 `bun:"user_id,type:bigint"`
|
||||||
|
Content string `bun:"content,notnull,type:text"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,type:timestamp"`
|
||||||
|
|
||||||
|
Post *ModelPost `bun:"rel:belongs-to,join:post_id=id"`
|
||||||
|
User *ModelUser `bun:"rel:belongs-to,join:user_id=id"`
|
||||||
|
}
|
||||||
18
tests/assets/bun/simple.go
Normal file
18
tests/assets/bun/simple.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
Email string `bun:"email,notnull,type:varchar(255),unique"`
|
||||||
|
Name string `bun:"name,type:text"`
|
||||||
|
Age *int `bun:"age,type:integer"`
|
||||||
|
IsActive bool `bun:"is_active,type:boolean"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp,default:now()"`
|
||||||
|
}
|
||||||
156
tests/assets/drizzle/schema-updated.ts
Normal file
156
tests/assets/drizzle/schema-updated.ts
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
import { pgTable, pgEnum, integer, bigint, smallint, serial, bigserial, smallserial, text, varchar, char, boolean, numeric, real, doublePrecision, timestamp, date, time, interval, json, jsonb, uuid, bytea } from 'drizzle-orm/pg-core';
|
||||||
|
import { sql } from 'drizzle-orm';
|
||||||
|
|
||||||
|
|
||||||
|
// Enums
|
||||||
|
export const userRole = pgEnum('UserRole', ['admin', 'user', 'moderator', 'guest']);
|
||||||
|
export const orderStatus = pgEnum('OrderStatus', ['pending', 'processing', 'shipped', 'delivered', 'cancelled']);
|
||||||
|
|
||||||
|
|
||||||
|
// Table: users
|
||||||
|
export const users = pgTable('users', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
email: varchar('email').notNull().unique(),
|
||||||
|
isActive: boolean('is_active').notNull().default(true),
|
||||||
|
lastLoginAt: timestamp('last_login_at'),
|
||||||
|
passwordHash: varchar('password_hash').notNull(),
|
||||||
|
profile: jsonb('profile'),
|
||||||
|
role: pgEnum('UserRole')('role').notNull(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
username: varchar('username').notNull().unique(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for users
|
||||||
|
export type Users = typeof users.$inferSelect;
|
||||||
|
export type NewUsers = typeof users.$inferInsert;
|
||||||
|
// Table: profiles
|
||||||
|
export const profiles = pgTable('profiles', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
avatarUrl: varchar('avatar_url'),
|
||||||
|
bio: text('bio'),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
dateOfBirth: date('date_of_birth'),
|
||||||
|
firstName: varchar('first_name'),
|
||||||
|
lastName: varchar('last_name'),
|
||||||
|
phoneNumber: varchar('phone_number'),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
userId: integer('user_id').notNull().unique().references(() => users.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for profiles
|
||||||
|
export type Profiles = typeof profiles.$inferSelect;
|
||||||
|
export type NewProfiles = typeof profiles.$inferInsert;
|
||||||
|
// Table: posts
|
||||||
|
export const posts = pgTable('posts', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
authorId: integer('author_id').notNull().references(() => users.id),
|
||||||
|
content: text('content').notNull(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
excerpt: text('excerpt'),
|
||||||
|
featuredImage: varchar('featured_image'),
|
||||||
|
isPublished: boolean('is_published').notNull().default(false),
|
||||||
|
publishedAt: timestamp('published_at'),
|
||||||
|
slug: varchar('slug').notNull().unique(),
|
||||||
|
title: varchar('title').notNull(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
viewCount: integer('view_count').notNull().default(0),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for posts
|
||||||
|
export type Posts = typeof posts.$inferSelect;
|
||||||
|
export type NewPosts = typeof posts.$inferInsert;
|
||||||
|
// Table: comments
|
||||||
|
export const comments = pgTable('comments', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
authorId: integer('author_id').notNull().references(() => users.id),
|
||||||
|
content: text('content').notNull(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
isApproved: boolean('is_approved').notNull().default(false),
|
||||||
|
parentId: integer('parent_id').references(() => comments.id),
|
||||||
|
postId: integer('post_id').notNull().references(() => posts.id),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for comments
|
||||||
|
export type Comments = typeof comments.$inferSelect;
|
||||||
|
export type NewComments = typeof comments.$inferInsert;
|
||||||
|
// Table: categories
|
||||||
|
export const categories = pgTable('categories', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
description: text('description'),
|
||||||
|
name: varchar('name').notNull().unique(),
|
||||||
|
parentId: integer('parent_id').references(() => categories.id),
|
||||||
|
slug: varchar('slug').notNull().unique(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for categories
|
||||||
|
export type Categories = typeof categories.$inferSelect;
|
||||||
|
export type NewCategories = typeof categories.$inferInsert;
|
||||||
|
// Table: post_categories
|
||||||
|
export const postCategories = pgTable('post_categories', {
|
||||||
|
categoryId: integer('category_id').notNull().references(() => categories.id),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
postId: integer('post_id').notNull().references(() => posts.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for post_categories
|
||||||
|
export type PostCategories = typeof postCategories.$inferSelect;
|
||||||
|
export type NewPostCategories = typeof postCategories.$inferInsert;
|
||||||
|
// Table: tags
|
||||||
|
export const tags = pgTable('tags', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
name: varchar('name').notNull().unique(),
|
||||||
|
slug: varchar('slug').notNull().unique(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for tags
|
||||||
|
export type Tags = typeof tags.$inferSelect;
|
||||||
|
export type NewTags = typeof tags.$inferInsert;
|
||||||
|
// Table: post_tags
|
||||||
|
export const postTags = pgTable('post_tags', {
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
postId: integer('post_id').notNull().references(() => posts.id),
|
||||||
|
tagId: integer('tag_id').notNull().references(() => tags.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for post_tags
|
||||||
|
export type PostTags = typeof postTags.$inferSelect;
|
||||||
|
export type NewPostTags = typeof postTags.$inferInsert;
|
||||||
|
// Table: orders
|
||||||
|
export const orders = pgTable('orders', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
billingAddress: jsonb('billing_address').notNull(),
|
||||||
|
completedAt: timestamp('completed_at'),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
currency: varchar('currency').notNull().default('USD'),
|
||||||
|
notes: text('notes'),
|
||||||
|
orderNumber: varchar('order_number').notNull().unique(),
|
||||||
|
shippingAddress: jsonb('shipping_address').notNull(),
|
||||||
|
status: pgEnum('OrderStatus')('status').notNull().default('pending'),
|
||||||
|
totalAmount: numeric('total_amount').notNull(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for orders
|
||||||
|
export type Orders = typeof orders.$inferSelect;
|
||||||
|
export type NewOrders = typeof orders.$inferInsert;
|
||||||
|
// Table: sessions
|
||||||
|
export const sessions = pgTable('sessions', {
|
||||||
|
id: uuid('id').primaryKey().default(sql`gen_random_uuid()`),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
expiresAt: timestamp('expires_at').notNull(),
|
||||||
|
ipAddress: varchar('ip_address'),
|
||||||
|
token: varchar('token').notNull().unique(),
|
||||||
|
userAgent: text('user_agent'),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for sessions
|
||||||
|
export type Sessions = typeof sessions.$inferSelect;
|
||||||
|
export type NewSessions = typeof sessions.$inferInsert;
|
||||||
90
tests/assets/drizzle/schema.ts
Normal file
90
tests/assets/drizzle/schema.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
import { pgTable, pgEnum, integer, bigint, smallint, serial, bigserial, smallserial, text, varchar, char, boolean, numeric, real, doublePrecision, timestamp, date, time, interval, json, jsonb, uuid, bytea } from 'drizzle-orm/pg-core';
|
||||||
|
import { sql } from 'drizzle-orm';
|
||||||
|
|
||||||
|
|
||||||
|
// Enums
|
||||||
|
export const role = pgEnum('Role', ['USER', 'ADMIN']);
|
||||||
|
export type Role = 'USER' | 'ADMIN';
|
||||||
|
|
||||||
|
|
||||||
|
// Table: User
|
||||||
|
export interface User {
|
||||||
|
id: number;
|
||||||
|
email: string;
|
||||||
|
name: string | null;
|
||||||
|
profile: string | null;
|
||||||
|
role: Role;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const user = pgTable('User', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
email: text('email').notNull().unique(),
|
||||||
|
name: text('name'),
|
||||||
|
profile: text('profile'),
|
||||||
|
role: pgEnum('Role')('role').notNull().default('USER'),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewUser = typeof user.$inferInsert;
|
||||||
|
// Table: Profile
|
||||||
|
export interface Profile {
|
||||||
|
id: number;
|
||||||
|
bio: string;
|
||||||
|
user: string;
|
||||||
|
userId: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const profile = pgTable('Profile', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
bio: text('bio').notNull(),
|
||||||
|
user: text('user').notNull(),
|
||||||
|
userId: integer('userId').notNull().unique().references(() => user.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewProfile = typeof profile.$inferInsert;
|
||||||
|
// Table: Post
|
||||||
|
export interface Post {
|
||||||
|
id: number;
|
||||||
|
author: string;
|
||||||
|
authorId: number;
|
||||||
|
createdAt: Date;
|
||||||
|
published: boolean;
|
||||||
|
title: string;
|
||||||
|
updatedAt: Date; // @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
export const post = pgTable('Post', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
author: text('author').notNull(),
|
||||||
|
authorId: integer('authorId').notNull().references(() => user.id),
|
||||||
|
createdAt: timestamp('createdAt').notNull().default(sql`now()`),
|
||||||
|
published: boolean('published').notNull().default(false),
|
||||||
|
title: text('title').notNull(),
|
||||||
|
updatedAt: timestamp('updatedAt').notNull(), // @updatedAt
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewPost = typeof post.$inferInsert;
|
||||||
|
// Table: Category
|
||||||
|
export interface Category {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const category = pgTable('Category', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
name: text('name').notNull(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewCategory = typeof category.$inferInsert;
|
||||||
|
// Table: _CategoryToPost
|
||||||
|
export interface Categorytopost {
|
||||||
|
categoryId: number;
|
||||||
|
postId: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const Categorytopost = pgTable('_CategoryToPost', {
|
||||||
|
categoryId: integer('CategoryId').primaryKey().references(() => category.id),
|
||||||
|
postId: integer('PostId').primaryKey().references(() => post.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewCategorytopost = typeof Categorytopost.$inferInsert;
|
||||||
65
tests/assets/gorm/complex.go
Normal file
65
tests/assets/gorm/complex.go
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ModelUser represents a user in the system
|
||||||
|
type ModelUser struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
Username string `gorm:"column:username;type:varchar(100);not null;uniqueIndex:idx_username"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(255);not null;uniqueIndex"`
|
||||||
|
Password string `gorm:"column:password;type:varchar(255);not null"`
|
||||||
|
FirstName *string `gorm:"column:first_name;type:varchar(100)"`
|
||||||
|
LastName *string `gorm:"column:last_name;type:varchar(100)"`
|
||||||
|
Bio *string `gorm:"column:bio;type:text"`
|
||||||
|
IsActive bool `gorm:"column:is_active;type:boolean;default:true"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
UpdatedAt time.Time `gorm:"column:updated_at;type:timestamp;default:now()"`
|
||||||
|
|
||||||
|
Posts []*ModelPost `gorm:"foreignKey:UserID;association_foreignkey:ID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
|
||||||
|
Comments []*ModelComment `gorm:"foreignKey:UserID;association_foreignkey:ID;constraint:OnDelete:SET NULL"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelUser) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelPost represents a blog post
|
||||||
|
type ModelPost struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
UserID int64 `gorm:"column:user_id;type:bigint;not null;index:idx_user_id"`
|
||||||
|
Title string `gorm:"column:title;type:varchar(255);not null"`
|
||||||
|
Slug string `gorm:"column:slug;type:varchar(255);not null;uniqueIndex:idx_slug"`
|
||||||
|
Content string `gorm:"column:content;type:text;not null"`
|
||||||
|
Excerpt *string `gorm:"column:excerpt;type:text"`
|
||||||
|
Published bool `gorm:"column:published;type:boolean;default:false"`
|
||||||
|
ViewCount int64 `gorm:"column:view_count;type:bigint;default:0"`
|
||||||
|
PublishedAt *time.Time `gorm:"column:published_at;type:timestamp"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
UpdatedAt time.Time `gorm:"column:updated_at;type:timestamp;default:now()"`
|
||||||
|
|
||||||
|
User *ModelUser `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
|
||||||
|
Comments []*ModelComment `gorm:"foreignKey:PostID;association_foreignkey:ID;constraint:OnDelete:CASCADE"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelPost) TableName() string {
|
||||||
|
return "posts"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelComment represents a comment on a post
|
||||||
|
type ModelComment struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
PostID int64 `gorm:"column:post_id;type:bigint;not null;index:idx_post_id"`
|
||||||
|
UserID *int64 `gorm:"column:user_id;type:bigint;index:idx_user_id"`
|
||||||
|
Content string `gorm:"column:content;type:text;not null"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
UpdatedAt time.Time `gorm:"column:updated_at;type:timestamp;default:now()"`
|
||||||
|
|
||||||
|
Post *ModelPost `gorm:"foreignKey:PostID;references:ID;constraint:OnDelete:CASCADE"`
|
||||||
|
User *ModelUser `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:SET NULL"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelComment) TableName() string {
|
||||||
|
return "comments"
|
||||||
|
}
|
||||||
18
tests/assets/gorm/simple.go
Normal file
18
tests/assets/gorm/simple.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(255);not null"`
|
||||||
|
Name string `gorm:"column:name;type:text"`
|
||||||
|
Age *int `gorm:"column:age;type:integer"`
|
||||||
|
IsActive bool `gorm:"column:is_active;type:boolean"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (User) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
46
tests/assets/graphql/complex.graphql
Normal file
46
tests/assets/graphql/complex.graphql
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# Complex GraphQL schema with multiple features
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
USER
|
||||||
|
ADMIN
|
||||||
|
MODERATOR
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
role: Role!
|
||||||
|
createdAt: DateTime!
|
||||||
|
posts: [Post!]!
|
||||||
|
profile: Profile
|
||||||
|
}
|
||||||
|
|
||||||
|
type Profile {
|
||||||
|
id: ID!
|
||||||
|
bio: String
|
||||||
|
avatar: String
|
||||||
|
metadata: JSON
|
||||||
|
user: User!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
slug: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
publishedAt: Date
|
||||||
|
author: User!
|
||||||
|
tags: [Tag!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
name: String!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
13
tests/assets/graphql/custom_scalars.graphql
Normal file
13
tests/assets/graphql/custom_scalars.graphql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# GraphQL schema with custom scalars
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
createdAt: DateTime!
|
||||||
|
metadata: JSON
|
||||||
|
birthDate: Date
|
||||||
|
}
|
||||||
13
tests/assets/graphql/enums.graphql
Normal file
13
tests/assets/graphql/enums.graphql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# GraphQL schema with enums
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
GUEST
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
role: Role!
|
||||||
|
}
|
||||||
16
tests/assets/graphql/relations.graphql
Normal file
16
tests/assets/graphql/relations.graphql
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# GraphQL schema with relationships
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
9
tests/assets/graphql/simple.graphql
Normal file
9
tests/assets/graphql/simple.graphql
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Simple GraphQL schema for testing basic type parsing
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String
|
||||||
|
age: Int
|
||||||
|
active: Boolean!
|
||||||
|
}
|
||||||
46
tests/assets/prisma/example.prisma
Normal file
46
tests/assets/prisma/example.prisma
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
}
|
||||||
|
|
||||||
|
generator client {
|
||||||
|
provider = "prisma-client"
|
||||||
|
output = "./generated"
|
||||||
|
}
|
||||||
|
|
||||||
|
model User {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
email String @unique
|
||||||
|
name String?
|
||||||
|
role Role @default(USER)
|
||||||
|
posts Post[]
|
||||||
|
profile Profile?
|
||||||
|
}
|
||||||
|
|
||||||
|
model Profile {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
bio String
|
||||||
|
user User @relation(fields: [userId], references: [id])
|
||||||
|
userId Int @unique
|
||||||
|
}
|
||||||
|
|
||||||
|
model Post {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
title String
|
||||||
|
published Boolean @default(false)
|
||||||
|
author User @relation(fields: [authorId], references: [id])
|
||||||
|
authorId Int
|
||||||
|
categories Category[]
|
||||||
|
}
|
||||||
|
|
||||||
|
model Category {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
name String
|
||||||
|
posts Post[]
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
USER
|
||||||
|
ADMIN
|
||||||
|
}
|
||||||
115
tests/assets/typeorm/example.ts
Normal file
115
tests/assets/typeorm/example.ts
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
//@ts-nocheck
|
||||||
|
import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, OneToMany, ManyToMany, JoinTable, CreateDateColumn, UpdateDateColumn } from 'typeorm';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class User {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column({ unique: true })
|
||||||
|
email: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@UpdateDateColumn()
|
||||||
|
updatedAt: Date;
|
||||||
|
|
||||||
|
@OneToMany(() => Project, project => project.owner)
|
||||||
|
ownedProjects: Project[];
|
||||||
|
|
||||||
|
@ManyToMany(() => Project, project => project.members)
|
||||||
|
@JoinTable()
|
||||||
|
projects: Project[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Project {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ nullable: true })
|
||||||
|
description: string;
|
||||||
|
|
||||||
|
@Column({ default: 'active' })
|
||||||
|
status: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, user => user.ownedProjects)
|
||||||
|
owner: User;
|
||||||
|
|
||||||
|
@ManyToMany(() => User, user => user.projects)
|
||||||
|
members: User[];
|
||||||
|
|
||||||
|
@OneToMany(() => Task, task => task.project)
|
||||||
|
tasks: Task[];
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Task {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text', nullable: true })
|
||||||
|
description: string;
|
||||||
|
|
||||||
|
@Column({ default: 'todo' })
|
||||||
|
status: string;
|
||||||
|
|
||||||
|
@Column({ nullable: true })
|
||||||
|
dueDate: Date;
|
||||||
|
|
||||||
|
@ManyToOne(() => Project, project => project.tasks)
|
||||||
|
project: Project;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, { nullable: true })
|
||||||
|
assignee: User;
|
||||||
|
|
||||||
|
@OneToMany(() => Comment, comment => comment.task)
|
||||||
|
comments: Comment[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Comment {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column('text')
|
||||||
|
content: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => Task, task => task.comments)
|
||||||
|
task: Task;
|
||||||
|
|
||||||
|
@ManyToOne(() => User)
|
||||||
|
author: User;
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Tag {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column({ unique: true })
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
color: string;
|
||||||
|
|
||||||
|
@ManyToMany(() => Task)
|
||||||
|
@JoinTable()
|
||||||
|
tasks: Task[];
|
||||||
|
}
|
||||||
@@ -79,7 +79,7 @@ SELECT
|
|||||||
"
|
"
|
||||||
|
|
||||||
# Set environment variable for tests
|
# Set environment variable for tests
|
||||||
export RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5433/relspec_test"
|
export RELSPEC_TEST_PG_CONN="postgres://relspec:relspec_test_password@localhost:5439/relspec_test"
|
||||||
|
|
||||||
echo -e "\n${YELLOW}Running PostgreSQL reader tests...${NC}"
|
echo -e "\n${YELLOW}Running PostgreSQL reader tests...${NC}"
|
||||||
echo "Connection string: $RELSPEC_TEST_PG_CONN"
|
echo "Connection string: $RELSPEC_TEST_PG_CONN"
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user