Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 9f29bc112e | |||
| b55737ab4c | |||
| 2a271b9859 | |||
| beb5b4fac8 | |||
| e61204cb3c | |||
| d52b9cdc14 | |||
| f98b278d72 | |||
| 666eab7cec | |||
| 35bc9dfb5c | |||
| aad5db5175 | |||
| d9225a7310 | |||
| 79effe6921 | |||
| 289715ba44 | |||
| 8ca2b50f9c | |||
| acedc4d4df | |||
| ef5e9bccd0 |
34
.github/workflows/ci.yml
vendored
34
.github/workflows/ci.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
go-version: ['1.23', '1.24', '1.25']
|
go-version: ['1.24', '1.25']
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -34,8 +34,8 @@ jobs:
|
|||||||
- name: Download dependencies
|
- name: Download dependencies
|
||||||
run: go mod download
|
run: go mod download
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run unit tests
|
||||||
run: go test -v -race -coverprofile=coverage.out -covermode=atomic ./...
|
run: make test
|
||||||
|
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v4
|
||||||
@@ -57,11 +57,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: '1.25'
|
go-version: '1.25'
|
||||||
|
|
||||||
- name: golangci-lint
|
- name: Install golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v6
|
run: |
|
||||||
with:
|
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin latest
|
||||||
version: latest
|
echo "$(go env GOPATH)/bin" >> $GITHUB_PATH
|
||||||
args: --config=.golangci.json
|
|
||||||
|
- name: Run linter
|
||||||
|
run: make lint
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build
|
name: Build
|
||||||
@@ -76,8 +78,20 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: '1.25'
|
go-version: '1.25'
|
||||||
|
|
||||||
- name: Build
|
- name: Download dependencies
|
||||||
run: go build -v ./cmd/relspec
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Build binary
|
||||||
|
run: make build
|
||||||
|
|
||||||
|
- name: Verify binary exists
|
||||||
|
run: |
|
||||||
|
if [ ! -f build/relspec ]; then
|
||||||
|
echo "Error: Binary not found at build/relspec"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Build successful: build/relspec"
|
||||||
|
ls -lh build/relspec
|
||||||
|
|
||||||
- name: Check mod tidiness
|
- name: Check mod tidiness
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
91
.github/workflows/integration-tests.yml
vendored
Normal file
91
.github/workflows/integration-tests.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
name: Integration Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
integration-tests:
|
||||||
|
name: Integration Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: '1.25'
|
||||||
|
|
||||||
|
- name: Cache Go modules
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/go/pkg/mod
|
||||||
|
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-go-
|
||||||
|
|
||||||
|
- name: Download dependencies
|
||||||
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Start PostgreSQL container
|
||||||
|
run: |
|
||||||
|
docker run -d \
|
||||||
|
--name relspec-test-postgres \
|
||||||
|
--network host \
|
||||||
|
-e POSTGRES_USER=relspec \
|
||||||
|
-e POSTGRES_PASSWORD=relspec_test_password \
|
||||||
|
-e POSTGRES_DB=relspec_test \
|
||||||
|
postgres:16-alpine
|
||||||
|
|
||||||
|
- name: Wait for PostgreSQL to be ready
|
||||||
|
run: |
|
||||||
|
echo "Waiting for PostgreSQL to start..."
|
||||||
|
for i in {1..30}; do
|
||||||
|
if docker exec relspec-test-postgres pg_isready -U relspec -d relspec_test > /dev/null 2>&1; then
|
||||||
|
echo "PostgreSQL is ready!"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo "Waiting... ($i/30)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
- name: Copy init script into container
|
||||||
|
run: |
|
||||||
|
docker cp tests/postgres/init.sql relspec-test-postgres:/tmp/init.sql
|
||||||
|
|
||||||
|
- name: Initialize test database
|
||||||
|
run: |
|
||||||
|
docker exec relspec-test-postgres psql -U relspec -d relspec_test -f /tmp/init.sql
|
||||||
|
|
||||||
|
- name: Verify database setup
|
||||||
|
run: |
|
||||||
|
echo "Verifying database initialization..."
|
||||||
|
docker exec relspec-test-postgres psql -U relspec -d relspec_test -c "
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(*) FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND nspname NOT LIKE 'pg_%') as schemas,
|
||||||
|
(SELECT COUNT(*) FROM pg_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as tables,
|
||||||
|
(SELECT COUNT(*) FROM pg_views WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as views,
|
||||||
|
(SELECT COUNT(*) FROM pg_sequences WHERE schemaname NOT IN ('pg_catalog', 'information_schema')) as sequences;
|
||||||
|
"
|
||||||
|
|
||||||
|
- name: Run integration tests
|
||||||
|
env:
|
||||||
|
RELSPEC_TEST_PG_CONN: postgres://relspec:relspec_test_password@localhost:5432/relspec_test
|
||||||
|
run: make test-integration
|
||||||
|
|
||||||
|
- name: Stop PostgreSQL container
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker stop relspec-test-postgres || true
|
||||||
|
docker rm relspec-test-postgres || true
|
||||||
|
|
||||||
|
- name: Summary
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
echo "Integration tests completed."
|
||||||
|
echo "PostgreSQL container has been cleaned up."
|
||||||
116
.github/workflows/release.yml
vendored
Normal file
116
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*.*.*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-release:
|
||||||
|
name: Build and Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: '1.25'
|
||||||
|
|
||||||
|
- name: Get version from tag
|
||||||
|
id: get_version
|
||||||
|
run: |
|
||||||
|
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||||
|
echo "Version: ${GITHUB_REF#refs/tags/}"
|
||||||
|
|
||||||
|
- name: Build binaries for multiple platforms
|
||||||
|
run: |
|
||||||
|
mkdir -p dist
|
||||||
|
|
||||||
|
# Linux AMD64
|
||||||
|
GOOS=linux GOARCH=amd64 go build -o dist/relspec-linux-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# Linux ARM64
|
||||||
|
GOOS=linux GOARCH=arm64 go build -o dist/relspec-linux-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# macOS AMD64
|
||||||
|
GOOS=darwin GOARCH=amd64 go build -o dist/relspec-darwin-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# macOS ARM64 (Apple Silicon)
|
||||||
|
GOOS=darwin GOARCH=arm64 go build -o dist/relspec-darwin-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# Windows AMD64
|
||||||
|
GOOS=windows GOARCH=amd64 go build -o dist/relspec-windows-amd64.exe -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec
|
||||||
|
|
||||||
|
# Create checksums
|
||||||
|
cd dist
|
||||||
|
sha256sum * > checksums.txt
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
id: release_notes
|
||||||
|
run: |
|
||||||
|
# Get the previous tag
|
||||||
|
previous_tag=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||||
|
|
||||||
|
if [ -z "$previous_tag" ]; then
|
||||||
|
# No previous tag, get all commits
|
||||||
|
commits=$(git log --pretty=format:"- %s (%h)" --no-merges)
|
||||||
|
else
|
||||||
|
# Get commits since the previous tag
|
||||||
|
commits=$(git log "${previous_tag}..HEAD" --pretty=format:"- %s (%h)" --no-merges)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create release notes
|
||||||
|
cat > release_notes.md << EOF
|
||||||
|
# Release ${{ steps.get_version.outputs.VERSION }}
|
||||||
|
|
||||||
|
## Changes
|
||||||
|
|
||||||
|
${commits}
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Download the appropriate binary for your platform:
|
||||||
|
|
||||||
|
- **Linux (AMD64)**: \`relspec-linux-amd64\`
|
||||||
|
- **Linux (ARM64)**: \`relspec-linux-arm64\`
|
||||||
|
- **macOS (Intel)**: \`relspec-darwin-amd64\`
|
||||||
|
- **macOS (Apple Silicon)**: \`relspec-darwin-arm64\`
|
||||||
|
- **Windows (AMD64)**: \`relspec-windows-amd64.exe\`
|
||||||
|
|
||||||
|
Make the binary executable (Linux/macOS):
|
||||||
|
\`\`\`bash
|
||||||
|
chmod +x relspec-*
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
Verify the download with the provided checksums.
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Create Release
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
body_path: release_notes.md
|
||||||
|
files: |
|
||||||
|
dist/relspec-linux-amd64
|
||||||
|
dist/relspec-linux-arm64
|
||||||
|
dist/relspec-darwin-amd64
|
||||||
|
dist/relspec-darwin-arm64
|
||||||
|
dist/relspec-windows-amd64.exe
|
||||||
|
dist/checksums.txt
|
||||||
|
draft: false
|
||||||
|
prerelease: false
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Summary
|
||||||
|
run: |
|
||||||
|
echo "Release ${{ steps.get_version.outputs.VERSION }} created successfully!"
|
||||||
|
echo "Binaries built for:"
|
||||||
|
echo " - Linux (amd64, arm64)"
|
||||||
|
echo " - macOS (amd64, arm64)"
|
||||||
|
echo " - Windows (amd64)"
|
||||||
35
AI_USE.md
Normal file
35
AI_USE.md
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# AI Usage Declaration
|
||||||
|
|
||||||
|
This Go project utilizes AI tools for the following purposes:
|
||||||
|
|
||||||
|
- Generating and improving documentation
|
||||||
|
- Writing and enhancing tests
|
||||||
|
- Refactoring and optimizing existing code
|
||||||
|
|
||||||
|
AI is **not** used for core design or architecture decisions.
|
||||||
|
All design decisions are deferred to human discussion.
|
||||||
|
AI is employed only for enhancements to human-written code.
|
||||||
|
|
||||||
|
We are aware of significant AI hallucinations; all AI-generated content is to be reviewed and verified by humans.
|
||||||
|
|
||||||
|
|
||||||
|
.-""""""-.
|
||||||
|
.' '.
|
||||||
|
/ O O \
|
||||||
|
: ` :
|
||||||
|
| |
|
||||||
|
: .------. :
|
||||||
|
\ ' ' /
|
||||||
|
'. .'
|
||||||
|
'-......-'
|
||||||
|
MEGAMIND AI
|
||||||
|
[============]
|
||||||
|
|
||||||
|
___________
|
||||||
|
/___________\
|
||||||
|
/_____________\
|
||||||
|
| ASSIMILATE |
|
||||||
|
| RESISTANCE |
|
||||||
|
| IS FUTILE |
|
||||||
|
\_____________/
|
||||||
|
\___________/
|
||||||
81
Makefile
81
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: all build test lint coverage clean install help docker-up docker-down docker-test docker-test-integration
|
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration release release-version
|
||||||
|
|
||||||
# Binary name
|
# Binary name
|
||||||
BINARY_NAME=relspec
|
BINARY_NAME=relspec
|
||||||
@@ -22,9 +22,23 @@ build: ## Build the binary
|
|||||||
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
|
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
|
||||||
@echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)"
|
@echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)"
|
||||||
|
|
||||||
test: ## Run tests
|
test: test-unit ## Run all unit tests (alias for test-unit)
|
||||||
@echo "Running tests..."
|
|
||||||
$(GOTEST) -v -race -coverprofile=coverage.out ./...
|
test-unit: ## Run unit tests (excludes integration tests)
|
||||||
|
@echo "Running unit tests..."
|
||||||
|
$(GOTEST) -v -race -coverprofile=coverage.out -covermode=atomic $$(go list ./... | grep -v '/tests/integration' | grep -v '/tests/assets' | grep -v '/pkg/readers/pgsql')
|
||||||
|
|
||||||
|
test-integration: ## Run integration tests (requires RELSPEC_TEST_PG_CONN environment variable)
|
||||||
|
@echo "Running integration tests..."
|
||||||
|
@if [ -z "$$RELSPEC_TEST_PG_CONN" ]; then \
|
||||||
|
echo "Error: RELSPEC_TEST_PG_CONN environment variable is not set"; \
|
||||||
|
echo "Example: export RELSPEC_TEST_PG_CONN='postgres://relspec:relspec_test_password@localhost:5432/relspec_test'"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
@echo "Running PostgreSQL reader tests..."
|
||||||
|
$(GOTEST) -v -count=1 ./pkg/readers/pgsql/
|
||||||
|
@echo "Running general integration tests..."
|
||||||
|
$(GOTEST) -v -count=1 ./tests/integration/
|
||||||
|
|
||||||
coverage: test ## Run tests with coverage report
|
coverage: test ## Run tests with coverage report
|
||||||
@echo "Generating coverage report..."
|
@echo "Generating coverage report..."
|
||||||
@@ -40,6 +54,15 @@ lint: ## Run linter
|
|||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
lintfix: ## Run linter
|
||||||
|
@echo "Running linter..."
|
||||||
|
@if command -v golangci-lint > /dev/null; then \
|
||||||
|
golangci-lint run --config=.golangci.json --fix; \
|
||||||
|
else \
|
||||||
|
echo "golangci-lint not installed. Install with: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
|
||||||
clean: ## Clean build artifacts
|
clean: ## Clean build artifacts
|
||||||
@echo "Cleaning..."
|
@echo "Cleaning..."
|
||||||
$(GOCLEAN)
|
$(GOCLEAN)
|
||||||
@@ -89,5 +112,55 @@ docker-test-integration: docker-up ## Start DB and run integration tests
|
|||||||
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
$(GOTEST) -v ./pkg/readers/pgsql/ -count=1 || (make docker-down && exit 1)
|
||||||
@make docker-down
|
@make docker-down
|
||||||
|
|
||||||
|
release: ## Create and push a new release tag (auto-increments patch version)
|
||||||
|
@echo "Creating new release..."
|
||||||
|
@latest_tag=$$(git describe --tags --abbrev=0 2>/dev/null || echo ""); \
|
||||||
|
if [ -z "$$latest_tag" ]; then \
|
||||||
|
version="v1.0.0"; \
|
||||||
|
echo "No existing tags found. Creating first release: $$version"; \
|
||||||
|
commit_logs=$$(git log --pretty=format:"- %s" --no-merges); \
|
||||||
|
else \
|
||||||
|
echo "Latest tag: $$latest_tag"; \
|
||||||
|
version_number=$${latest_tag#v}; \
|
||||||
|
IFS='.' read -r major minor patch <<< "$$version_number"; \
|
||||||
|
patch=$$((patch + 1)); \
|
||||||
|
version="v$$major.$$minor.$$patch"; \
|
||||||
|
echo "Creating new release: $$version"; \
|
||||||
|
commit_logs=$$(git log "$${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges); \
|
||||||
|
fi; \
|
||||||
|
if [ -z "$$commit_logs" ]; then \
|
||||||
|
tag_message="Release $$version"; \
|
||||||
|
else \
|
||||||
|
tag_message="Release $$version\n\n$$commit_logs"; \
|
||||||
|
fi; \
|
||||||
|
git tag -a "$$version" -m "$$tag_message"; \
|
||||||
|
git push origin "$$version"; \
|
||||||
|
echo "Tag $$version created and pushed to remote repository."
|
||||||
|
|
||||||
|
release-version: ## Create and push a release with specific version (use: make release-version VERSION=v1.2.3)
|
||||||
|
@if [ -z "$(VERSION)" ]; then \
|
||||||
|
echo "Error: VERSION is required. Usage: make release-version VERSION=v1.2.3"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
@version="$(VERSION)"; \
|
||||||
|
if ! echo "$$version" | grep -q "^v"; then \
|
||||||
|
version="v$$version"; \
|
||||||
|
fi; \
|
||||||
|
echo "Creating release: $$version"; \
|
||||||
|
latest_tag=$$(git describe --tags --abbrev=0 2>/dev/null || echo ""); \
|
||||||
|
if [ -z "$$latest_tag" ]; then \
|
||||||
|
commit_logs=$$(git log --pretty=format:"- %s" --no-merges); \
|
||||||
|
else \
|
||||||
|
commit_logs=$$(git log "$${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges); \
|
||||||
|
fi; \
|
||||||
|
if [ -z "$$commit_logs" ]; then \
|
||||||
|
tag_message="Release $$version"; \
|
||||||
|
else \
|
||||||
|
tag_message="Release $$version\n\n$$commit_logs"; \
|
||||||
|
fi; \
|
||||||
|
git tag -a "$$version" -m "$$tag_message"; \
|
||||||
|
git push origin "$$version"; \
|
||||||
|
echo "Tag $$version created and pushed to remote repository."
|
||||||
|
|
||||||
help: ## Display this help screen
|
help: ## Display this help screen
|
||||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||||
|
|||||||
76
README.md
76
README.md
@@ -6,34 +6,70 @@ RelSpec is a comprehensive database relations management tool that reads, transf
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
RelSpec provides bidirectional conversion between various database specification formats, allowing you to:
|
RelSpec provides bidirectional conversion and comparison between various database specification formats, allowing you to:
|
||||||
- Inspect live databases and extract their structure
|
- Inspect live databases and extract their structure
|
||||||
- Convert between different ORM models (GORM, Bun)
|
- Convert between different ORM models (GORM, Bun , etc.)
|
||||||
- Transform legacy schema definitions (Clarion DCTX, XML, JSON)
|
- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
|
||||||
- Generate standardized specification files (JSON, YAML)
|
- Generate standardized specification files (JSON, YAML, etc.)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
### Input Formats
|
### Readers (Input Formats)
|
||||||
- **XML** - Generic XML schema definitions
|
|
||||||
- **JSON** - JSON-based schema specifications
|
|
||||||
- **Clarion DCTX** - Clarion database dictionary format
|
|
||||||
- **Database Inspection** - Direct database introspection
|
|
||||||
- **GORM Models** - Read existing GORM Go structs
|
|
||||||
- **Bun Models** - Read existing Bun Go structs
|
|
||||||
|
|
||||||
### Output Formats
|
RelSpec can read database schemas from multiple sources:
|
||||||
- **GORM Models** - Generate GORM-compatible Go structs
|
|
||||||
- **Bun Models** - Generate Bun-compatible Go structs
|
#### ORM Models
|
||||||
- **JSON** - Standard JSON schema output
|
- [GORM](pkg/readers/gorm/README.md) - Go GORM model definitions
|
||||||
- **YAML** - Human-readable YAML format
|
- [Bun](pkg/readers/bun/README.md) - Go Bun model definitions
|
||||||
|
- [Drizzle](pkg/readers/drizzle/README.md) - TypeScript Drizzle ORM schemas
|
||||||
|
- [Prisma](pkg/readers/prisma/README.md) - Prisma schema language
|
||||||
|
- [TypeORM](pkg/readers/typeorm/README.md) - TypeScript TypeORM entities
|
||||||
|
|
||||||
|
#### Database Inspection
|
||||||
|
- [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection
|
||||||
|
|
||||||
|
#### Schema Formats
|
||||||
|
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
|
||||||
|
- [DCTX](pkg/readers/dctx/README.md) - Clarion database dictionary format
|
||||||
|
- [DrawDB](pkg/readers/drawdb/README.md) - DrawDB JSON format
|
||||||
|
- [GraphQL](pkg/readers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||||
|
- [JSON](pkg/readers/json/README.md) - RelSpec canonical JSON format
|
||||||
|
- [YAML](pkg/readers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
|
### Writers (Output Formats)
|
||||||
|
|
||||||
|
RelSpec can write database schemas to multiple formats:
|
||||||
|
|
||||||
|
#### ORM Models
|
||||||
|
- [GORM](pkg/writers/gorm/README.md) - Generate GORM-compatible Go structs
|
||||||
|
- [Bun](pkg/writers/bun/README.md) - Generate Bun-compatible Go structs
|
||||||
|
- [Drizzle](pkg/writers/drizzle/README.md) - Generate Drizzle ORM TypeScript schemas
|
||||||
|
- [Prisma](pkg/writers/prisma/README.md) - Generate Prisma schema files
|
||||||
|
- [TypeORM](pkg/writers/typeorm/README.md) - Generate TypeORM TypeScript entities
|
||||||
|
|
||||||
|
#### Database DDL
|
||||||
|
- [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.)
|
||||||
|
|
||||||
|
#### Schema Formats
|
||||||
|
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
|
||||||
|
- [DCTX](pkg/writers/dctx/README.md) - Clarion database dictionary format
|
||||||
|
- [DrawDB](pkg/writers/drawdb/README.md) - DrawDB JSON format
|
||||||
|
- [GraphQL](pkg/writers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
|
||||||
|
- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
|
||||||
|
- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
|
||||||
|
|
||||||
|
|
||||||
|
## Use of AI
|
||||||
|
[Rules and use of AI](./AI_USE.md)
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
go get github.com/wdevs/relspecgo
|
go get github.com/wdevs/relspecgo
|
||||||
|
|
||||||
|
go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
@@ -66,6 +102,10 @@ relspecgo/
|
|||||||
└── tests/ # Test files
|
└── tests/ # Test files
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Todo
|
||||||
|
|
||||||
|
[Todo List of Features](./TODO.md)
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
@@ -88,7 +128,7 @@ go test ./...
|
|||||||
|
|
||||||
Apache License 2.0 - See [LICENSE](LICENSE) for details.
|
Apache License 2.0 - See [LICENSE](LICENSE) for details.
|
||||||
|
|
||||||
Copyright 2025 wdevs
|
Copyright 2025 Warky Devs
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
|
|||||||
108
TODO.md
108
TODO.md
@@ -1,110 +1,46 @@
|
|||||||
# RelSpec - TODO List
|
# RelSpec - TODO List
|
||||||
|
|
||||||
|
|
||||||
|
## Input Readers / Writers
|
||||||
|
- [✔️] **Database Inspector**
|
||||||
## Input Readers
|
- [✔️] PostgreSQL driver
|
||||||
- [ ] **Database Inspector**
|
|
||||||
- [ ] PostgreSQL driver
|
|
||||||
- [ ] MySQL driver
|
- [ ] MySQL driver
|
||||||
- [ ] SQLite driver
|
- [ ] SQLite driver
|
||||||
- [ ] MSSQL driver
|
- [ ] MSSQL driver
|
||||||
- [ ] Foreign key detection
|
- [✔️] Foreign key detection
|
||||||
- [ ] Index extraction
|
- [✔️] Index extraction
|
||||||
|
- [*] .sql file generation with sequence and priority
|
||||||
|
- [✔️] .dbml: Database Markup Language (DBML) for textual schema representation.
|
||||||
|
- [✔️] Prisma schema support (PSL format) .prisma
|
||||||
|
- [✔️] Drizzle ORM support .ts (TypeScript / JavaScript) (Mr. Edd wanted to move from Prisma to Drizzle. If you are bugs, you are welcome to do pull requests or issues)
|
||||||
|
- [☠️] Entity Framework (.NET) model .edmx (Fuck no, EDMX files were bloated, verbose XML nightmares—hard to merge, error-prone, and a pain in teams. Microsoft wisely ditched them in EF Core for code-first. Classic overkill from old MS era.)
|
||||||
|
- [✔️] TypeORM support
|
||||||
|
- [] .hbm.xml / schema.xml: Hibernate/Propel mappings (Java/PHP) (💲 Someone can do this, not me)
|
||||||
|
- [ ] Django models.py (Python classes), Sequelize migrations (JS) (💲 Someone can do this, not me)
|
||||||
|
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
|
||||||
|
- [✔️] GraphQL schema generation
|
||||||
|
|
||||||
- [ ] **XML Reader**
|
|
||||||
- [ ] XML schema parser
|
|
||||||
- [ ] Validation against XSD
|
|
||||||
|
|
||||||
- [ ] **JSON Reader**
|
|
||||||
- [ ] JSON schema parser
|
|
||||||
- [ ] Schema validation
|
|
||||||
|
|
||||||
- [ ] **Clarion DCTX Reader**
|
|
||||||
- [ ] DCTX file parser
|
|
||||||
- [ ] Legacy format support
|
|
||||||
|
|
||||||
- [ ] **GORM Model Reader**
|
|
||||||
- [ ] Go AST parser for GORM tags
|
|
||||||
- [ ] Struct field analysis
|
|
||||||
- [ ] Relation tag extraction
|
|
||||||
|
|
||||||
- [ ] **Bun Model Reader**
|
|
||||||
- [ ] Go AST parser for Bun tags
|
|
||||||
- [ ] Struct field analysis
|
|
||||||
- [ ] Relation tag extraction
|
|
||||||
|
|
||||||
## Output Writers
|
|
||||||
- [ ] **GORM Writer**
|
|
||||||
- [ ] Go struct generation
|
|
||||||
- [ ] GORM tag formatting
|
|
||||||
- [ ] Relation definitions
|
|
||||||
- [ ] gofmt integration
|
|
||||||
|
|
||||||
- [ ] **Bun Writer**
|
|
||||||
- [ ] Go struct generation
|
|
||||||
- [ ] Bun tag formatting
|
|
||||||
- [ ] Relation definitions
|
|
||||||
- [ ] gofmt integration
|
|
||||||
|
|
||||||
- [ ] **JSON Writer**
|
|
||||||
- [ ] Schema serialization
|
|
||||||
- [ ] Pretty printing
|
|
||||||
|
|
||||||
- [ ] **YAML Writer**
|
|
||||||
- [ ] Schema serialization
|
|
||||||
- [ ] Comment preservation
|
|
||||||
|
|
||||||
## CLI Application
|
|
||||||
- [ ] Command-line interface using cobra
|
|
||||||
- [ ] Input format flags
|
|
||||||
- [ ] Output format flags
|
|
||||||
- [ ] Connection string handling
|
|
||||||
- [ ] File I/O operations
|
|
||||||
- [ ] Progress indicators
|
|
||||||
- [ ] Error handling and reporting
|
|
||||||
- [ ] Configuration file support
|
|
||||||
|
|
||||||
## Testing
|
|
||||||
- [ ] Unit tests for each reader
|
|
||||||
- [ ] Unit tests for each writer
|
|
||||||
- [ ] Integration tests for conversion pipelines
|
|
||||||
- [ ] Test fixtures for all formats
|
|
||||||
- [ ] Database test containers
|
|
||||||
- [ ] Benchmark tests for large schemas
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
- [ ] API documentation (godoc)
|
- [ ] API documentation (godoc)
|
||||||
- [ ] Usage examples for each format combination
|
- [ ] Usage examples for each format combination
|
||||||
- [ ] Migration guides
|
|
||||||
- [ ] Architecture documentation
|
|
||||||
- [ ] Contributing guidelines
|
|
||||||
|
|
||||||
## Advanced Features
|
## Advanced Features
|
||||||
- [ ] Dry-run mode for validation
|
- [ ] Dry-run mode for validation
|
||||||
- [ ] Diff tool for comparing specifications
|
- [x] Diff tool for comparing specifications
|
||||||
- [ ] Migration script generation
|
- [ ] Migration script generation
|
||||||
- [ ] Custom type mapping configuration
|
- [ ] Custom type mapping configuration
|
||||||
- [ ] Batch processing support
|
- [ ] Batch processing support
|
||||||
- [ ] Watch mode for auto-regeneration
|
- [ ] Watch mode for auto-regeneration
|
||||||
|
|
||||||
|
## Future Considerations
|
||||||
|
- [ ] Web UI for visual editing
|
||||||
|
- [ ] REST API server mode
|
||||||
|
- [ ] Support for NoSQL databases
|
||||||
|
|
||||||
|
|
||||||
## Performance
|
## Performance
|
||||||
- [ ] Concurrent processing for multiple tables
|
- [ ] Concurrent processing for multiple tables
|
||||||
- [ ] Streaming for large databases
|
- [ ] Streaming for large databases
|
||||||
- [ ] Memory optimization
|
- [ ] Memory optimization
|
||||||
- [ ] Caching layer for repeated operations
|
- [ ] Caching layer for repeated operations
|
||||||
|
|
||||||
## Quality & Maintenance
|
|
||||||
- [ ] Linting with golangci-lint
|
|
||||||
- [ ] Code coverage > 80%
|
|
||||||
- [ ] Security scanning
|
|
||||||
- [ ] Dependency updates automation
|
|
||||||
- [ ] Release automation
|
|
||||||
|
|
||||||
## Future Considerations
|
|
||||||
- [ ] Web UI for visual editing
|
|
||||||
- [ ] REST API server mode
|
|
||||||
- [ ] Support for NoSQL databases
|
|
||||||
- [ ] GraphQL schema generation
|
|
||||||
- [ ] Prisma schema support
|
|
||||||
- [ ] TypeORM support
|
|
||||||
|
|||||||
BIN
assets/image/relspec1_512.jpg
Normal file
BIN
assets/image/relspec1_512.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 107 KiB |
@@ -6,26 +6,35 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||||
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||||
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||||
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
|
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -51,20 +60,28 @@ Input formats:
|
|||||||
- dbml: DBML schema files
|
- dbml: DBML schema files
|
||||||
- dctx: DCTX schema files
|
- dctx: DCTX schema files
|
||||||
- drawdb: DrawDB JSON files
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
- json: JSON database schema
|
- json: JSON database schema
|
||||||
- yaml: YAML database schema
|
- yaml: YAML database schema
|
||||||
- gorm: GORM model files (Go, file or directory)
|
- gorm: GORM model files (Go, file or directory)
|
||||||
- bun: Bun model files (Go, file or directory)
|
- bun: Bun model files (Go, file or directory)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL database (live connection)
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
|
||||||
Output formats:
|
Output formats:
|
||||||
- dbml: DBML schema files
|
- dbml: DBML schema files
|
||||||
- dctx: DCTX schema files
|
- dctx: DCTX schema files
|
||||||
- drawdb: DrawDB JSON files
|
- drawdb: DrawDB JSON files
|
||||||
|
- graphql: GraphQL schema files (.graphql, SDL)
|
||||||
- json: JSON database schema
|
- json: JSON database schema
|
||||||
- yaml: YAML database schema
|
- yaml: YAML database schema
|
||||||
- gorm: GORM model files (Go)
|
- gorm: GORM model files (Go)
|
||||||
- bun: Bun model files (Go)
|
- bun: Bun model files (Go)
|
||||||
|
- drizzle: Drizzle ORM schema files (TypeScript)
|
||||||
|
- prisma: Prisma schema files (.prisma)
|
||||||
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL SQL schema
|
- pgsql: PostgreSQL SQL schema
|
||||||
|
|
||||||
PostgreSQL Connection String Examples:
|
PostgreSQL Connection String Examples:
|
||||||
@@ -123,18 +140,27 @@ Examples:
|
|||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
|
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
|
||||||
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
|
||||||
|
|
||||||
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
|
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
|
||||||
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
|
||||||
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
|
||||||
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
|
||||||
|
|
||||||
convertCmd.MarkFlagRequired("from")
|
err := convertCmd.MarkFlagRequired("from")
|
||||||
convertCmd.MarkFlagRequired("to")
|
if err != nil {
|
||||||
convertCmd.MarkFlagRequired("to-path")
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = convertCmd.MarkFlagRequired("to")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = convertCmd.MarkFlagRequired("to-path")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to-path flag as required: %v\n", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func runConvert(cmd *cobra.Command, args []string) error {
|
func runConvert(cmd *cobra.Command, args []string) error {
|
||||||
@@ -239,6 +265,30 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
|||||||
}
|
}
|
||||||
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "drizzle":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Drizzle format")
|
||||||
|
}
|
||||||
|
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "prisma":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Prisma format")
|
||||||
|
}
|
||||||
|
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "typeorm":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for TypeORM format")
|
||||||
|
}
|
||||||
|
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "graphql", "gql":
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL format")
|
||||||
|
}
|
||||||
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
return nil, fmt.Errorf("unsupported source format: %s", dbType)
|
||||||
}
|
}
|
||||||
@@ -287,9 +337,21 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
|||||||
}
|
}
|
||||||
writer = wbun.NewWriter(writerOpts)
|
writer = wbun.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "drizzle":
|
||||||
|
writer = wdrizzle.NewWriter(writerOpts)
|
||||||
|
|
||||||
case "pgsql", "postgres", "postgresql", "sql":
|
case "pgsql", "postgres", "postgresql", "sql":
|
||||||
writer = wpgsql.NewWriter(writerOpts)
|
writer = wpgsql.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "prisma":
|
||||||
|
writer = wprisma.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "typeorm":
|
||||||
|
writer = wtypeorm.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "graphql", "gql":
|
||||||
|
writer = wgraphql.NewWriter(writerOpts)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unsupported target format: %s", dbType)
|
return fmt.Errorf("unsupported target format: %s", dbType)
|
||||||
}
|
}
|
||||||
@@ -318,7 +380,7 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
|||||||
}
|
}
|
||||||
|
|
||||||
// For formats like DCTX that don't support full database writes, require schema filter
|
// For formats like DCTX that don't support full database writes, require schema filter
|
||||||
if strings.ToLower(dbType) == "dctx" {
|
if strings.EqualFold(dbType, "dctx") {
|
||||||
if len(db.Schemas) == 0 {
|
if len(db.Schemas) == 0 {
|
||||||
return fmt.Errorf("no schemas found in database")
|
return fmt.Errorf("no schemas found in database")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/diff"
|
"git.warky.dev/wdevs/relspecgo/pkg/diff"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
@@ -15,7 +17,6 @@ import (
|
|||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -96,8 +97,14 @@ func init() {
|
|||||||
diffCmd.Flags().StringVar(&outputFormat, "format", "summary", "Output format (summary, json, html)")
|
diffCmd.Flags().StringVar(&outputFormat, "format", "summary", "Output format (summary, json, html)")
|
||||||
diffCmd.Flags().StringVar(&outputPath, "output", "", "Output file path (default: stdout for summary, required for json/html)")
|
diffCmd.Flags().StringVar(&outputPath, "output", "", "Output file path (default: stdout for summary, required for json/html)")
|
||||||
|
|
||||||
diffCmd.MarkFlagRequired("from")
|
err := diffCmd.MarkFlagRequired("from")
|
||||||
diffCmd.MarkFlagRequired("to")
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
|
||||||
|
}
|
||||||
|
err = diffCmd.MarkFlagRequired("to")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func runDiff(cmd *cobra.Command, args []string) error {
|
func runDiff(cmd *cobra.Command, args []string) error {
|
||||||
|
|||||||
194
go.mod
194
go.mod
@@ -1,193 +1,31 @@
|
|||||||
module git.warky.dev/wdevs/relspecgo
|
module git.warky.dev/wdevs/relspecgo
|
||||||
|
|
||||||
go 1.25.5
|
go 1.24.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
|
github.com/google/uuid v1.6.0
|
||||||
4d63.com/gochecknoglobals v0.2.2 // indirect
|
github.com/jackc/pgx/v5 v5.7.6
|
||||||
github.com/4meepo/tagalign v1.4.2 // indirect
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/Abirdcfly/dupword v0.1.3 // indirect
|
github.com/stretchr/testify v1.11.1
|
||||||
github.com/Antonboom/errname v1.0.0 // indirect
|
github.com/uptrace/bun v1.2.16
|
||||||
github.com/Antonboom/nilnil v1.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
github.com/Antonboom/testifylint v1.5.2 // indirect
|
)
|
||||||
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect
|
|
||||||
github.com/Crocmagnon/fatcontext v0.7.1 // indirect
|
|
||||||
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect
|
|
||||||
github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1 // indirect
|
|
||||||
github.com/Masterminds/semver/v3 v3.3.0 // indirect
|
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
|
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
|
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.5 // indirect
|
|
||||||
github.com/alexkohler/prealloc v1.0.0 // indirect
|
|
||||||
github.com/alingse/asasalint v0.0.11 // indirect
|
|
||||||
github.com/alingse/nilnesserr v0.1.2 // indirect
|
|
||||||
github.com/ashanbrown/forbidigo v1.6.0 // indirect
|
|
||||||
github.com/ashanbrown/makezero v1.2.0 // indirect
|
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
|
||||||
|
|
||||||
github.com/bkielbasa/cyclop v1.2.3 // indirect
|
require (
|
||||||
github.com/blizzy78/varnamelen v0.8.0 // indirect
|
|
||||||
github.com/bombsimon/wsl/v4 v4.5.0 // indirect
|
|
||||||
github.com/breml/bidichk v0.3.2 // indirect
|
|
||||||
github.com/breml/errchkjson v0.4.0 // indirect
|
|
||||||
github.com/butuzov/ireturn v0.3.1 // indirect
|
|
||||||
github.com/butuzov/mirror v1.3.0 // indirect
|
|
||||||
github.com/catenacyber/perfsprint v0.8.2 // indirect
|
|
||||||
github.com/ccojocar/zxcvbn-go v1.0.2 // indirect
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
|
||||||
github.com/charithe/durationcheck v0.0.10 // indirect
|
|
||||||
github.com/chavacava/garif v0.1.0 // indirect
|
|
||||||
github.com/ckaznocha/intrange v0.3.0 // indirect
|
|
||||||
github.com/curioswitch/go-reassign v0.3.0 // indirect
|
|
||||||
github.com/daixiang0/gci v0.13.5 // indirect
|
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/denis-tingaikin/go-header v0.5.0 // indirect
|
|
||||||
github.com/ettle/strcase v0.2.0 // indirect
|
|
||||||
github.com/fatih/color v1.18.0 // indirect
|
|
||||||
github.com/fatih/structtag v1.2.0 // indirect
|
|
||||||
github.com/firefart/nonamedreturns v1.0.5 // indirect
|
|
||||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
|
||||||
github.com/fzipp/gocyclo v0.6.0 // indirect
|
|
||||||
github.com/ghostiam/protogetter v0.3.9 // indirect
|
|
||||||
github.com/go-critic/go-critic v0.12.0 // indirect
|
|
||||||
github.com/go-toolsmith/astcast v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astcopy v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astequal v1.2.0 // indirect
|
|
||||||
github.com/go-toolsmith/astfmt v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astp v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/strparse v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/typep v1.1.0 // indirect
|
|
||||||
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
|
|
||||||
github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
|
|
||||||
github.com/gobwas/glob v0.2.3 // indirect
|
|
||||||
github.com/gofrs/flock v0.12.1 // indirect
|
|
||||||
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
|
|
||||||
github.com/golangci/go-printf-func-name v0.1.0 // indirect
|
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
|
|
||||||
github.com/golangci/golangci-lint v1.64.8 // indirect
|
|
||||||
github.com/golangci/misspell v0.6.0 // indirect
|
|
||||||
github.com/golangci/plugin-module-register v0.1.1 // indirect
|
|
||||||
github.com/golangci/revgrep v0.8.0 // indirect
|
|
||||||
github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect
|
|
||||||
github.com/google/go-cmp v0.7.0 // indirect
|
|
||||||
github.com/google/uuid v1.6.0 // indirect
|
|
||||||
github.com/gordonklaus/ineffassign v0.1.0 // indirect
|
|
||||||
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
|
|
||||||
github.com/gostaticanalysis/comment v1.5.0 // indirect
|
|
||||||
github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
|
|
||||||
github.com/gostaticanalysis/nilerr v0.1.1 // indirect
|
|
||||||
github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
|
|
||||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
|
||||||
github.com/hexops/gotextdiff v1.0.3 // indirect
|
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
github.com/jackc/pgx/v5 v5.7.6 // indirect
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
github.com/jgautheron/goconst v1.7.1 // indirect
|
github.com/kr/pretty v0.3.1 // indirect
|
||||||
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
|
|
||||||
github.com/jjti/go-spancheck v0.6.4 // indirect
|
|
||||||
github.com/julz/importas v0.2.0 // indirect
|
|
||||||
github.com/karamaru-alpha/copyloopvar v1.2.1 // indirect
|
|
||||||
github.com/kisielk/errcheck v1.9.0 // indirect
|
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
|
|
||||||
github.com/kulti/thelper v0.6.3 // indirect
|
|
||||||
github.com/kunwardeep/paralleltest v1.0.10 // indirect
|
|
||||||
github.com/lasiar/canonicalheader v1.1.2 // indirect
|
|
||||||
github.com/ldez/exptostd v0.4.2 // indirect
|
|
||||||
github.com/ldez/gomoddirectives v0.6.1 // indirect
|
|
||||||
github.com/ldez/grignotin v0.9.0 // indirect
|
|
||||||
github.com/ldez/tagliatelle v0.7.1 // indirect
|
|
||||||
github.com/ldez/usetesting v0.4.2 // indirect
|
|
||||||
github.com/leonklingele/grouper v1.1.2 // indirect
|
|
||||||
github.com/macabu/inamedparam v0.1.3 // indirect
|
|
||||||
github.com/maratori/testableexamples v1.0.0 // indirect
|
|
||||||
github.com/maratori/testpackage v1.1.1 // indirect
|
|
||||||
github.com/matoous/godox v1.1.0 // indirect
|
|
||||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
|
||||||
github.com/mgechev/revive v1.7.0 // indirect
|
|
||||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
|
||||||
github.com/moricho/tparallel v0.3.2 // indirect
|
|
||||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
|
||||||
github.com/nakabonne/nestif v0.3.1 // indirect
|
|
||||||
github.com/nishanths/exhaustive v0.12.0 // indirect
|
|
||||||
github.com/nishanths/predeclared v0.2.2 // indirect
|
|
||||||
github.com/nunnatsa/ginkgolinter v0.19.1 // indirect
|
|
||||||
github.com/olekukonko/tablewriter v0.0.5 // indirect
|
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/polyfloyd/go-errorlint v1.7.1 // indirect
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||||
github.com/prometheus/client_golang v1.23.2 // indirect
|
|
||||||
github.com/prometheus/client_model v0.6.2 // indirect
|
|
||||||
github.com/prometheus/common v0.66.1 // indirect
|
|
||||||
github.com/prometheus/procfs v0.16.1 // indirect
|
|
||||||
github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 // indirect
|
|
||||||
github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect
|
|
||||||
github.com/quasilyte/gogrep v0.5.0 // indirect
|
|
||||||
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
|
|
||||||
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
|
|
||||||
github.com/raeperd/recvcheck v0.2.0 // indirect
|
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
|
||||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||||
github.com/ryancurrah/gomodguard v1.3.5 // indirect
|
|
||||||
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
|
|
||||||
github.com/sagikazarmark/locafero v0.11.0 // indirect
|
|
||||||
github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
|
|
||||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 // indirect
|
|
||||||
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
|
|
||||||
github.com/sashamelentyev/usestdlibvars v1.28.0 // indirect
|
|
||||||
github.com/securego/gosec/v2 v2.22.2 // indirect
|
|
||||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
|
||||||
github.com/sivchari/containedctx v1.0.3 // indirect
|
|
||||||
github.com/sivchari/tenv v1.12.1 // indirect
|
|
||||||
github.com/sonatard/noctx v0.1.0 // indirect
|
|
||||||
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect
|
|
||||||
github.com/sourcegraph/go-diff v0.7.0 // indirect
|
|
||||||
github.com/spf13/afero v1.15.0 // indirect
|
|
||||||
github.com/spf13/cast v1.10.0 // indirect
|
|
||||||
github.com/spf13/cobra v1.10.2 // indirect
|
|
||||||
github.com/spf13/pflag v1.0.10 // indirect
|
github.com/spf13/pflag v1.0.10 // indirect
|
||||||
github.com/spf13/viper v1.21.0 // indirect
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||||
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
github.com/stbenjam/no-sprintf-host-port v0.2.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
github.com/stretchr/objx v0.5.2 // indirect
|
|
||||||
github.com/stretchr/testify v1.11.1 // indirect
|
|
||||||
github.com/subosito/gotenv v1.6.0 // indirect
|
|
||||||
github.com/tdakkota/asciicheck v0.4.1 // indirect
|
|
||||||
github.com/tetafro/godot v1.5.0 // indirect
|
|
||||||
github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 // indirect
|
|
||||||
github.com/timonwong/loggercheck v0.10.1 // indirect
|
|
||||||
github.com/tomarrell/wrapcheck/v2 v2.10.0 // indirect
|
|
||||||
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
|
|
||||||
github.com/ultraware/funlen v0.2.0 // indirect
|
|
||||||
github.com/ultraware/whitespace v0.2.0 // indirect
|
|
||||||
github.com/uudashr/gocognit v1.2.0 // indirect
|
|
||||||
github.com/uudashr/iface v1.3.1 // indirect
|
|
||||||
github.com/xen0n/gosmopolitan v1.2.2 // indirect
|
|
||||||
github.com/yagipy/maintidx v1.0.0 // indirect
|
|
||||||
github.com/yeya24/promlinter v0.3.0 // indirect
|
|
||||||
github.com/ykadowak/zerologlint v0.1.5 // indirect
|
|
||||||
gitlab.com/bosi/decorder v0.4.2 // indirect
|
|
||||||
go-simpler.org/musttag v0.13.0 // indirect
|
|
||||||
go-simpler.org/sloglint v0.9.0 // indirect
|
|
||||||
go.uber.org/automaxprocs v1.6.0 // indirect
|
|
||||||
go.uber.org/multierr v1.10.0 // indirect
|
|
||||||
go.uber.org/zap v1.27.0 // indirect
|
|
||||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
|
||||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
|
||||||
golang.org/x/crypto v0.41.0 // indirect
|
golang.org/x/crypto v0.41.0 // indirect
|
||||||
golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac // indirect
|
golang.org/x/sys v0.38.0 // indirect
|
||||||
golang.org/x/mod v0.26.0 // indirect
|
|
||||||
golang.org/x/sync v0.16.0 // indirect
|
|
||||||
golang.org/x/sys v0.35.0 // indirect
|
|
||||||
golang.org/x/text v0.28.0 // indirect
|
golang.org/x/text v0.28.0 // indirect
|
||||||
golang.org/x/tools v0.35.0 // indirect
|
|
||||||
google.golang.org/protobuf v1.36.8 // indirect
|
|
||||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
|
||||||
honnef.co/go/tools v0.6.1 // indirect
|
|
||||||
mvdan.cc/gofumpt v0.7.0 // indirect
|
|
||||||
mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f // indirect
|
|
||||||
)
|
)
|
||||||
|
|||||||
514
go.sum
514
go.sum
@@ -1,171 +1,10 @@
|
|||||||
4d63.com/gocheckcompilerdirectives v1.3.0 h1:Ew5y5CtcAAQeTVKUVFrE7EwHMrTO6BggtEj8BZSjZ3A=
|
|
||||||
4d63.com/gocheckcompilerdirectives v1.3.0/go.mod h1:ofsJ4zx2QAuIP/NO/NAh1ig6R1Fb18/GI7RVMwz7kAY=
|
|
||||||
4d63.com/gochecknoglobals v0.2.2 h1:H1vdnwnMaZdQW/N+NrkT1SZMTBmcwHe9Vq8lJcYYTtU=
|
|
||||||
4d63.com/gochecknoglobals v0.2.2/go.mod h1:lLxwTQjL5eIesRbvnzIP3jZtG140FnTdz+AlMa+ogt0=
|
|
||||||
github.com/4meepo/tagalign v1.4.2 h1:0hcLHPGMjDyM1gHG58cS73aQF8J4TdVR96TZViorO9E=
|
|
||||||
github.com/4meepo/tagalign v1.4.2/go.mod h1:+p4aMyFM+ra7nb41CnFG6aSDXqRxU/w1VQqScKqDARI=
|
|
||||||
github.com/Abirdcfly/dupword v0.1.3 h1:9Pa1NuAsZvpFPi9Pqkd93I7LIYRURj+A//dFd5tgBeE=
|
|
||||||
github.com/Abirdcfly/dupword v0.1.3/go.mod h1:8VbB2t7e10KRNdwTVoxdBaxla6avbhGzb8sCTygUMhw=
|
|
||||||
github.com/Antonboom/errname v1.0.0 h1:oJOOWR07vS1kRusl6YRSlat7HFnb3mSfMl6sDMRoTBA=
|
|
||||||
github.com/Antonboom/errname v1.0.0/go.mod h1:gMOBFzK/vrTiXN9Oh+HFs+e6Ndl0eTFbtsRTSRdXyGI=
|
|
||||||
github.com/Antonboom/nilnil v1.0.1 h1:C3Tkm0KUxgfO4Duk3PM+ztPncTFlOf0b2qadmS0s4xs=
|
|
||||||
github.com/Antonboom/nilnil v1.0.1/go.mod h1:CH7pW2JsRNFgEh8B2UaPZTEPhCMuFowP/e8Udp9Nnb0=
|
|
||||||
github.com/Antonboom/testifylint v1.5.2 h1:4s3Xhuv5AvdIgbd8wOOEeo0uZG7PbDKQyKY5lGoQazk=
|
|
||||||
github.com/Antonboom/testifylint v1.5.2/go.mod h1:vxy8VJ0bc6NavlYqjZfmp6EfqXMtBgQ4+mhCojwC1P8=
|
|
||||||
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs=
|
|
||||||
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
|
||||||
github.com/Crocmagnon/fatcontext v0.7.1 h1:SC/VIbRRZQeQWj/TcQBS6JmrXcfA+BU4OGSVUt54PjM=
|
|
||||||
github.com/Crocmagnon/fatcontext v0.7.1/go.mod h1:1wMvv3NXEBJucFGfwOJBxSVWcoIO6emV215SMkW9MFU=
|
|
||||||
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM=
|
|
||||||
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
|
|
||||||
github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1 h1:Sz1JIXEcSfhz7fUi7xHnhpIE0thVASYjvosApmHuD2k=
|
|
||||||
github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1/go.mod h1:n/LSCXNuIYqVfBlVXyHfMQkZDdp1/mmxfSjADd3z1Zg=
|
|
||||||
github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
|
|
||||||
github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
|
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4=
|
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo=
|
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU=
|
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E=
|
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.5 h1:fP5qLgtwbx9EJE8dGEERT02YwS8En4r9nnZ71RK+EVU=
|
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.5/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU=
|
|
||||||
github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw=
|
|
||||||
github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE=
|
|
||||||
github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
|
|
||||||
github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I=
|
|
||||||
github.com/alingse/nilnesserr v0.1.2 h1:Yf8Iwm3z2hUUrP4muWfW83DF4nE3r1xZ26fGWUKCZlo=
|
|
||||||
github.com/alingse/nilnesserr v0.1.2/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg=
|
|
||||||
github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8gerOIVIY=
|
|
||||||
github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU=
|
|
||||||
github.com/ashanbrown/makezero v1.2.0 h1:/2Lp1bypdmK9wDIq7uWBlDF1iMUpIIS4A+pF6C9IEUU=
|
|
||||||
github.com/ashanbrown/makezero v1.2.0/go.mod h1:dxlPhHbDMC6N6xICzFBSK+4njQDdK8euNO0qjQMtGY4=
|
|
||||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
|
||||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
|
||||||
github.com/bitechdev/ResolveSpec v0.0.108 h1:0Asw4zt9SdBIDprNqtrGY67R4SovAPBmW2y1qRn/Wjw=
|
|
||||||
github.com/bitechdev/ResolveSpec v0.0.108/go.mod h1:/mtVcbXSBLNmWlTKeDnbQx18tmNqOnrpetpLOadLzqo=
|
|
||||||
github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w=
|
|
||||||
github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo=
|
|
||||||
github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M=
|
|
||||||
github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k=
|
|
||||||
github.com/bombsimon/wsl/v4 v4.5.0 h1:iZRsEvDdyhd2La0FVi5k6tYehpOR/R7qIUjmKk7N74A=
|
|
||||||
github.com/bombsimon/wsl/v4 v4.5.0/go.mod h1:NOQ3aLF4nD7N5YPXMruR6ZXDOAqLoM0GEpLwTdvmOSc=
|
|
||||||
github.com/breml/bidichk v0.3.2 h1:xV4flJ9V5xWTqxL+/PMFF6dtJPvZLPsyixAoPe8BGJs=
|
|
||||||
github.com/breml/bidichk v0.3.2/go.mod h1:VzFLBxuYtT23z5+iVkamXO386OB+/sVwZOpIj6zXGos=
|
|
||||||
github.com/breml/errchkjson v0.4.0 h1:gftf6uWZMtIa/Is3XJgibewBm2ksAQSY/kABDNFTAdk=
|
|
||||||
github.com/breml/errchkjson v0.4.0/go.mod h1:AuBOSTHyLSaaAFlWsRSuRBIroCh3eh7ZHh5YeelDIk8=
|
|
||||||
github.com/butuzov/ireturn v0.3.1 h1:mFgbEI6m+9W8oP/oDdfA34dLisRFCj2G6o/yiI1yZrY=
|
|
||||||
github.com/butuzov/ireturn v0.3.1/go.mod h1:ZfRp+E7eJLC0NQmk1Nrm1LOrn/gQlOykv+cVPdiXH5M=
|
|
||||||
github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc=
|
|
||||||
github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI=
|
|
||||||
github.com/catenacyber/perfsprint v0.8.2 h1:+o9zVmCSVa7M4MvabsWvESEhpsMkhfE7k0sHNGL95yw=
|
|
||||||
github.com/catenacyber/perfsprint v0.8.2/go.mod h1:q//VWC2fWbcdSLEY1R3l8n0zQCDPdE4IjZwyY1HMunM=
|
|
||||||
github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg=
|
|
||||||
github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60=
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
|
||||||
github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4=
|
|
||||||
github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ=
|
|
||||||
github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc=
|
|
||||||
github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+UIPD+Gww=
|
|
||||||
github.com/ckaznocha/intrange v0.3.0 h1:VqnxtK32pxgkhJgYQEeOArVidIPg+ahLP7WBOXZd5ZY=
|
|
||||||
github.com/ckaznocha/intrange v0.3.0/go.mod h1:+I/o2d2A1FBHgGELbGxzIcyd3/9l9DuwjM8FsbSS3Lo=
|
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88=
|
|
||||||
github.com/daixiang0/gci v0.13.5 h1:kThgmH1yBmZSBCh1EJVxQ7JsHpm5Oms0AMed/0LaH4c=
|
|
||||||
github.com/daixiang0/gci v0.13.5/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8=
|
|
||||||
github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY=
|
|
||||||
github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q=
|
|
||||||
github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A=
|
|
||||||
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
|
||||||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
|
||||||
github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4=
|
|
||||||
github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
|
|
||||||
github.com/firefart/nonamedreturns v1.0.5 h1:tM+Me2ZaXs8tfdDw3X6DOX++wMCOqzYUho6tUTYIdRA=
|
|
||||||
github.com/firefart/nonamedreturns v1.0.5/go.mod h1:gHJjDqhGM4WyPt639SOZs+G89Ko7QKH5R5BhnO6xJhw=
|
|
||||||
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
|
||||||
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
|
||||||
github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo=
|
|
||||||
github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA=
|
|
||||||
github.com/ghostiam/protogetter v0.3.9 h1:j+zlLLWzqLay22Cz/aYwTHKQ88GE2DQ6GkWSYFOI4lQ=
|
|
||||||
github.com/ghostiam/protogetter v0.3.9/go.mod h1:WZ0nw9pfzsgxuRsPOFQomgDVSWtDLJRfQJEhsGbmQMA=
|
|
||||||
github.com/go-critic/go-critic v0.12.0 h1:iLosHZuye812wnkEz1Xu3aBwn5ocCPfc9yqmFG9pa6w=
|
|
||||||
github.com/go-critic/go-critic v0.12.0/go.mod h1:DpE0P6OVc6JzVYzmM5gq5jMU31zLr4am5mB/VfFK64w=
|
|
||||||
github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8=
|
|
||||||
github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU=
|
|
||||||
github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s=
|
|
||||||
github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw=
|
|
||||||
github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4=
|
|
||||||
github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ=
|
|
||||||
github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw=
|
|
||||||
github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY=
|
|
||||||
github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco=
|
|
||||||
github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4=
|
|
||||||
github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA=
|
|
||||||
github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA=
|
|
||||||
github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
|
|
||||||
github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw=
|
|
||||||
github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ=
|
|
||||||
github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus=
|
|
||||||
github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig=
|
|
||||||
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
|
|
||||||
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
|
||||||
github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUWY=
|
|
||||||
github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
|
|
||||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
|
||||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
|
||||||
github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E=
|
|
||||||
github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0=
|
|
||||||
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 h1:WUvBfQL6EW/40l6OmeSBYQJNSif4O11+bmWEz+C7FYw=
|
|
||||||
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32/go.mod h1:NUw9Zr2Sy7+HxzdjIULge71wI6yEg1lWQr7Evcu8K0E=
|
|
||||||
github.com/golangci/go-printf-func-name v0.1.0 h1:dVokQP+NMTO7jwO4bwsRwLWeudOVUPPyAKJuzv8pEJU=
|
|
||||||
github.com/golangci/go-printf-func-name v0.1.0/go.mod h1:wqhWFH5mUdJQhweRnldEywnR5021wTdZSNgwYceV14s=
|
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE=
|
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY=
|
|
||||||
github.com/golangci/golangci-lint v1.64.8 h1:y5TdeVidMtBGG32zgSC7ZXTFNHrsJkDnpO4ItB3Am+I=
|
|
||||||
github.com/golangci/golangci-lint v1.64.8/go.mod h1:5cEsUQBSr6zi8XI8OjmcY2Xmliqc4iYL7YoPrL+zLJ4=
|
|
||||||
github.com/golangci/misspell v0.6.0 h1:JCle2HUTNWirNlDIAUO44hUsKhOFqGPoC4LZxlaSXDs=
|
|
||||||
github.com/golangci/misspell v0.6.0/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo=
|
|
||||||
github.com/golangci/plugin-module-register v0.1.1 h1:TCmesur25LnyJkpsVrupv1Cdzo+2f7zX0H6Jkw1Ol6c=
|
|
||||||
github.com/golangci/plugin-module-register v0.1.1/go.mod h1:TTpqoB6KkwOJMV8u7+NyXMrkwwESJLOkfl9TxR1DGFc=
|
|
||||||
github.com/golangci/revgrep v0.8.0 h1:EZBctwbVd0aMeRnNUsFogoyayvKHyxlV3CdUA46FX2s=
|
|
||||||
github.com/golangci/revgrep v0.8.0/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k=
|
|
||||||
github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed h1:IURFTjxeTfNFP0hTEi1YKjB/ub8zkpaOqFFMApi2EAs=
|
|
||||||
github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed/go.mod h1:XLXN8bNw4CGRPaqgl3bv/lhz7bsGPh4/xSaMTbo2vkQ=
|
|
||||||
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
|
||||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
|
||||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
|
||||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
|
||||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/gordonklaus/ineffassign v0.1.0 h1:y2Gd/9I7MdY1oEIt+n+rowjBNDcLQq3RsH5hwJd0f9s=
|
|
||||||
github.com/gordonklaus/ineffassign v0.1.0/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0=
|
|
||||||
github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk=
|
|
||||||
github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc=
|
|
||||||
github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado=
|
|
||||||
github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM=
|
|
||||||
github.com/gostaticanalysis/comment v1.5.0 h1:X82FLl+TswsUMpMh17srGRuKaaXprTaytmEpgnKIDu8=
|
|
||||||
github.com/gostaticanalysis/comment v1.5.0/go.mod h1:V6eb3gpCv9GNVqb6amXzEUX3jXLVK/AdA+IrAMSqvEc=
|
|
||||||
github.com/gostaticanalysis/forcetypeassert v0.2.0 h1:uSnWrrUEYDr86OCxWa4/Tp2jeYDlogZiZHzGkWFefTk=
|
|
||||||
github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXSFghQjljW1+l/Uke3PXHS6ILY=
|
|
||||||
github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk=
|
|
||||||
github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A=
|
|
||||||
github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M=
|
|
||||||
github.com/hashicorp/go-immutable-radix/v2 v2.1.0 h1:CUW5RYIcysz+D3B+l1mDeXrQ7fUvGGCwJfdASSzbrfo=
|
|
||||||
github.com/hashicorp/go-immutable-radix/v2 v2.1.0/go.mod h1:hgdqLXA4f6NIjRVisM1TJ9aOJVNRqKZj+xDGF6m7PBw=
|
|
||||||
github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
|
||||||
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
|
|
||||||
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
|
||||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
|
||||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
@@ -174,356 +13,53 @@ github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7Ulw
|
|||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||||
github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk=
|
github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk=
|
||||||
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
||||||
github.com/jgautheron/goconst v1.7.1 h1:VpdAG7Ca7yvvJk5n8dMwQhfEZJh95kl/Hl9S1OI5Jkk=
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
github.com/jgautheron/goconst v1.7.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4=
|
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs=
|
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||||
github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c=
|
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||||
github.com/jjti/go-spancheck v0.6.4 h1:Tl7gQpYf4/TMU7AT84MN83/6PutY21Nb9fuQjFTpRRc=
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
github.com/jjti/go-spancheck v0.6.4/go.mod h1:yAEYdKJ2lRkDA8g7X+oKUHXOWVAXSBJRv04OhF+QUjk=
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/karamaru-alpha/copyloopvar v1.2.1 h1:wmZaZYIjnJ0b5UoKDjUHrikcV0zuPyyxI4SVplLd2CI=
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
github.com/karamaru-alpha/copyloopvar v1.2.1/go.mod h1:nFmMlFNlClC2BPvNaHMdkirmTJxVCY0lhxBtlfOypMM=
|
|
||||||
github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M=
|
|
||||||
github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
|
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE=
|
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg=
|
|
||||||
github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs=
|
|
||||||
github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I=
|
|
||||||
github.com/kunwardeep/paralleltest v1.0.10 h1:wrodoaKYzS2mdNVnc4/w31YaXFtsc21PCTdvWJ/lDDs=
|
|
||||||
github.com/kunwardeep/paralleltest v1.0.10/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY=
|
|
||||||
github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4=
|
|
||||||
github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI=
|
|
||||||
github.com/ldez/exptostd v0.4.2 h1:l5pOzHBz8mFOlbcifTxzfyYbgEmoUqjxLFHZkjlbHXs=
|
|
||||||
github.com/ldez/exptostd v0.4.2/go.mod h1:iZBRYaUmcW5jwCR3KROEZ1KivQQp6PHXbDPk9hqJKCQ=
|
|
||||||
github.com/ldez/gomoddirectives v0.6.1 h1:Z+PxGAY+217f/bSGjNZr/b2KTXcyYLgiWI6geMBN2Qc=
|
|
||||||
github.com/ldez/gomoddirectives v0.6.1/go.mod h1:cVBiu3AHR9V31em9u2kwfMKD43ayN5/XDgr+cdaFaKs=
|
|
||||||
github.com/ldez/grignotin v0.9.0 h1:MgOEmjZIVNn6p5wPaGp/0OKWyvq42KnzAt/DAb8O4Ow=
|
|
||||||
github.com/ldez/grignotin v0.9.0/go.mod h1:uaVTr0SoZ1KBii33c47O1M8Jp3OP3YDwhZCmzT9GHEk=
|
|
||||||
github.com/ldez/tagliatelle v0.7.1 h1:bTgKjjc2sQcsgPiT902+aadvMjCeMHrY7ly2XKFORIk=
|
|
||||||
github.com/ldez/tagliatelle v0.7.1/go.mod h1:3zjxUpsNB2aEZScWiZTHrAXOl1x25t3cRmzfK1mlo2I=
|
|
||||||
github.com/ldez/usetesting v0.4.2 h1:J2WwbrFGk3wx4cZwSMiCQQ00kjGR0+tuuyW0Lqm4lwA=
|
|
||||||
github.com/ldez/usetesting v0.4.2/go.mod h1:eEs46T3PpQ+9RgN9VjpY6qWdiw2/QmfiDeWmdZdrjIQ=
|
|
||||||
github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY=
|
|
||||||
github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA=
|
|
||||||
github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk=
|
|
||||||
github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I=
|
|
||||||
github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s93SLMxb2vI=
|
|
||||||
github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE=
|
|
||||||
github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04=
|
|
||||||
github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc=
|
|
||||||
github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4=
|
|
||||||
github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs=
|
|
||||||
github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
|
|
||||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
|
||||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
|
||||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
|
||||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
|
||||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
|
||||||
github.com/mgechev/revive v1.7.0 h1:JyeQ4yO5K8aZhIKf5rec56u0376h8AlKNQEmjfkjKlY=
|
|
||||||
github.com/mgechev/revive v1.7.0/go.mod h1:qZnwcNhoguE58dfi96IJeSTPeZQejNeoMQLUZGi4SW4=
|
|
||||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
|
||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
|
||||||
github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI=
|
|
||||||
github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U=
|
|
||||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
|
||||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
|
||||||
github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U=
|
|
||||||
github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE=
|
|
||||||
github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg=
|
|
||||||
github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs=
|
|
||||||
github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk=
|
|
||||||
github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c=
|
|
||||||
github.com/nunnatsa/ginkgolinter v0.19.1 h1:mjwbOlDQxZi9Cal+KfbEJTCz327OLNfwNvoZ70NJ+c4=
|
|
||||||
github.com/nunnatsa/ginkgolinter v0.19.1/go.mod h1:jkQ3naZDmxaZMXPWaS9rblH+i+GWXQCaS/JFIWcOH2s=
|
|
||||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
|
||||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
|
||||||
github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
|
|
||||||
github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
|
|
||||||
github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
|
|
||||||
github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
|
|
||||||
github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
|
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/polyfloyd/go-errorlint v1.7.1 h1:RyLVXIbosq1gBdk/pChWA8zWYLsq9UEw7a1L5TVMCnA=
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
||||||
github.com/polyfloyd/go-errorlint v1.7.1/go.mod h1:aXjNb1x2TNhoLsk26iv1yl7a+zTnXPhwEMtEXukiLR8=
|
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||||
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||||
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
|
||||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
|
||||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
|
||||||
github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
|
|
||||||
github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
|
|
||||||
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
|
||||||
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
|
|
||||||
github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 h1:+Wl/0aFp0hpuHM3H//KMft64WQ1yX9LdJY64Qm/gFCo=
|
|
||||||
github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI=
|
|
||||||
github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE=
|
|
||||||
github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
|
|
||||||
github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo=
|
|
||||||
github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng=
|
|
||||||
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU=
|
|
||||||
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0=
|
|
||||||
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs=
|
|
||||||
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ=
|
|
||||||
github.com/raeperd/recvcheck v0.2.0 h1:GnU+NsbiCqdC2XX5+vMZzP+jAJC5fht7rcVTAhX74UI=
|
|
||||||
github.com/raeperd/recvcheck v0.2.0/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU=
|
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
|
||||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/ryancurrah/gomodguard v1.3.5 h1:cShyguSwUEeC0jS7ylOiG/idnd1TpJ1LfHGpV3oJmPU=
|
|
||||||
github.com/ryancurrah/gomodguard v1.3.5/go.mod h1:MXlEPQRxgfPQa62O8wzK3Ozbkv9Rkqr+wKjSxTdsNJE=
|
|
||||||
github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU=
|
|
||||||
github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ=
|
|
||||||
github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc=
|
|
||||||
github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik=
|
|
||||||
github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0=
|
|
||||||
github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4=
|
|
||||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw=
|
|
||||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU=
|
|
||||||
github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw=
|
|
||||||
github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ=
|
|
||||||
github.com/sashamelentyev/usestdlibvars v1.28.0 h1:jZnudE2zKCtYlGzLVreNp5pmCdOxXUzwsMDBkR21cyQ=
|
|
||||||
github.com/sashamelentyev/usestdlibvars v1.28.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8=
|
|
||||||
github.com/securego/gosec/v2 v2.22.2 h1:IXbuI7cJninj0nRpZSLCUlotsj8jGusohfONMrHoF6g=
|
|
||||||
github.com/securego/gosec/v2 v2.22.2/go.mod h1:UEBGA+dSKb+VqM6TdehR7lnQtIIMorYJ4/9CW1KVQBE=
|
|
||||||
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
|
||||||
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
|
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
|
||||||
github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE=
|
|
||||||
github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
|
|
||||||
github.com/sivchari/tenv v1.12.1 h1:+E0QzjktdnExv/wwsnnyk4oqZBUfuh89YMQT1cyuvSY=
|
|
||||||
github.com/sivchari/tenv v1.12.1/go.mod h1:1LjSOUCc25snIr5n3DtGGrENhX3LuWefcplwVGC24mw=
|
|
||||||
github.com/sonatard/noctx v0.1.0 h1:JjqOc2WN16ISWAjAk8M5ej0RfExEXtkEyExl2hLW+OM=
|
|
||||||
github.com/sonatard/noctx v0.1.0/go.mod h1:0RvBxqY8D4j9cTTTWE8ylt2vqj2EPI8fHmrxHdsaZ2c=
|
|
||||||
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw=
|
|
||||||
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U=
|
|
||||||
github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
|
|
||||||
github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs=
|
|
||||||
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
|
||||||
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
|
||||||
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
|
||||||
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
|
||||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
|
||||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||||
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
|
|
||||||
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
|
|
||||||
github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0=
|
|
||||||
github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I=
|
|
||||||
github.com/stbenjam/no-sprintf-host-port v0.2.0 h1:i8pxvGrt1+4G0czLr/WnmyH7zbZ8Bg8etvARQ1rpyl4=
|
|
||||||
github.com/stbenjam/no-sprintf-host-port v0.2.0/go.mod h1:eL0bQ9PasS0hsyTyfTjjG+E80QIyPnBVQbYZyv20Jfk=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
|
||||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
|
||||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
|
||||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
|
||||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
|
||||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo=
|
||||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
|
||||||
github.com/tdakkota/asciicheck v0.4.1 h1:bm0tbcmi0jezRA2b5kg4ozmMuGAFotKI3RZfrhfovg8=
|
github.com/uptrace/bun v1.2.16 h1:QlObi6ZIK5Ao7kAALnh91HWYNZUBbVwye52fmlQM9kc=
|
||||||
github.com/tdakkota/asciicheck v0.4.1/go.mod h1:0k7M3rCfRXb0Z6bwgvkEIMleKH3kXNz9UqJ9Xuqopr8=
|
github.com/uptrace/bun v1.2.16/go.mod h1:jMoNg2n56ckaawi/O/J92BHaECmrz6IRjuMWqlMaMTM=
|
||||||
github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0=
|
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||||
github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY=
|
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||||
github.com/tetafro/godot v1.5.0 h1:aNwfVI4I3+gdxjMgYPus9eHmoBeJIbnajOyqZYStzuw=
|
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||||
github.com/tetafro/godot v1.5.0/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio=
|
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||||
github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 h1:y4mJRFlM6fUyPhoXuFg/Yu02fg/nIPFMOY8tOqppoFg=
|
|
||||||
github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460=
|
|
||||||
github.com/timonwong/loggercheck v0.10.1 h1:uVZYClxQFpw55eh+PIoqM7uAOHMrhVcDoWDery9R8Lg=
|
|
||||||
github.com/timonwong/loggercheck v0.10.1/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8=
|
|
||||||
github.com/tomarrell/wrapcheck/v2 v2.10.0 h1:SzRCryzy4IrAH7bVGG4cK40tNUhmVmMDuJujy4XwYDg=
|
|
||||||
github.com/tomarrell/wrapcheck/v2 v2.10.0/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo=
|
|
||||||
github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw=
|
|
||||||
github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw=
|
|
||||||
github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLkI=
|
|
||||||
github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA=
|
|
||||||
github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g=
|
|
||||||
github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
|
|
||||||
github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA=
|
|
||||||
github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU=
|
|
||||||
github.com/uudashr/iface v1.3.1 h1:bA51vmVx1UIhiIsQFSNq6GZ6VPTk3WNMZgRiCe9R29U=
|
|
||||||
github.com/uudashr/iface v1.3.1/go.mod h1:4QvspiRd3JLPAEXBQ9AiZpLbJlrWWgRChOKDJEuQTdg=
|
|
||||||
github.com/xen0n/gosmopolitan v1.2.2 h1:/p2KTnMzwRexIW8GlKawsTWOxn7UHA+jCMF/V8HHtvU=
|
|
||||||
github.com/xen0n/gosmopolitan v1.2.2/go.mod h1:7XX7Mj61uLYrj0qmeN0zi7XDon9JRAEhYQqAPLVNTeg=
|
|
||||||
github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM=
|
|
||||||
github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk=
|
|
||||||
github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs=
|
|
||||||
github.com/yeya24/promlinter v0.3.0/go.mod h1:cDfJQQYv9uYciW60QT0eeHlFodotkYZlL+YcPQN+mW4=
|
|
||||||
github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw=
|
|
||||||
github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg=
|
|
||||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
|
||||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
|
||||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
|
||||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
|
||||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
|
||||||
gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo=
|
|
||||||
gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8=
|
|
||||||
go-simpler.org/musttag v0.13.0 h1:Q/YAW0AHvaoaIbsPj3bvEI5/QFP7w696IMUpnKXQfCE=
|
|
||||||
go-simpler.org/musttag v0.13.0/go.mod h1:FTzIGeK6OkKlUDVpj0iQUXZLUO1Js9+mvykDQy9C5yM=
|
|
||||||
go-simpler.org/sloglint v0.9.0 h1:/40NQtjRx9txvsB/RN022KsUJU+zaaSb/9q9BSefSrE=
|
|
||||||
go-simpler.org/sloglint v0.9.0/go.mod h1:G/OrAF6uxj48sHahCzrbarVMptL2kjWTaUeC8+fOGww=
|
|
||||||
go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
|
|
||||||
go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
|
|
||||||
go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
|
|
||||||
go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
|
||||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
|
||||||
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
|
||||||
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
|
||||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
|
||||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
|
||||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
|
||||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
|
||||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
|
||||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
|
||||||
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
||||||
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
||||||
golang.org/x/exp v0.0.0-20250711185948-6ae5c78190dc h1:TS73t7x3KarrNd5qAipmspBDS1rkMcgVG/fS1aRb4Rc=
|
|
||||||
golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
|
|
||||||
golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
|
|
||||||
golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac h1:TSSpLIG4v+p0rPv1pNOQtl1I8knsO4S9trOxNMOLVP4=
|
|
||||||
golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
|
|
||||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|
||||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|
||||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|
||||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
|
||||||
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
|
||||||
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
|
||||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
|
||||||
golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
|
||||||
golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg=
|
|
||||||
golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ=
|
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
|
||||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
|
||||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
|
||||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
|
||||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
|
||||||
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
|
||||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
|
||||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
|
||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
|
||||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
|
||||||
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
|
||||||
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
|
||||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
|
||||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
|
||||||
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
|
|
||||||
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
|
||||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
|
||||||
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
|
||||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
|
||||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
|
||||||
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|
||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
|
||||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
|
||||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
|
||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
|
||||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
|
||||||
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||||
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
|
||||||
golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
|
|
||||||
golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
|
|
||||||
golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
|
||||||
golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
|
||||||
golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
|
||||||
golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
|
|
||||||
golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
|
|
||||||
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
|
||||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
|
||||||
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
|
||||||
golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
|
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
|
||||||
golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
|
|
||||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
|
||||||
golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
|
|
||||||
golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0=
|
|
||||||
golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw=
|
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
|
||||||
google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
|
|
||||||
google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
honnef.co/go/tools v0.6.1 h1:R094WgE8K4JirYjBaOpz/AvTyUu/3wbmAoskKN/pxTI=
|
|
||||||
honnef.co/go/tools v0.6.1/go.mod h1:3puzxxljPCe8RGJX7BIy1plGbxEOZni5mR2aXe3/uk4=
|
|
||||||
mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU=
|
|
||||||
mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo=
|
|
||||||
mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f h1:lMpcwN6GxNbWtbpI1+xzFLSW8XzX0u72NttUGVFjO3U=
|
|
||||||
mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f/go.mod h1:RSLa7mKKCNeTTMHBw5Hsy2rfJmd6O2ivt9Dw9ZqCQpQ=
|
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Ask if the user wants to make a release version
|
|
||||||
read -p "Do you want to make a release version? (y/n): " make_release
|
|
||||||
|
|
||||||
if [[ $make_release =~ ^[Yy]$ ]]; then
|
|
||||||
# Get the latest tag from git
|
|
||||||
latest_tag=$(git describe --tags --abbrev=0 2>/dev/null)
|
|
||||||
|
|
||||||
if [ -z "$latest_tag" ]; then
|
|
||||||
# No tags exist yet, start with v1.0.0
|
|
||||||
suggested_version="v1.0.0"
|
|
||||||
echo "No existing tags found. Starting with $suggested_version"
|
|
||||||
else
|
|
||||||
echo "Latest tag: $latest_tag"
|
|
||||||
|
|
||||||
# Remove 'v' prefix if present
|
|
||||||
version_number="${latest_tag#v}"
|
|
||||||
|
|
||||||
# Split version into major.minor.patch
|
|
||||||
IFS='.' read -r major minor patch <<< "$version_number"
|
|
||||||
|
|
||||||
# Increment patch version
|
|
||||||
patch=$((patch + 1))
|
|
||||||
|
|
||||||
# Construct new version
|
|
||||||
suggested_version="v${major}.${minor}.${patch}"
|
|
||||||
echo "Suggested next version: $suggested_version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ask the user for the version number with the suggested version as default
|
|
||||||
read -p "Enter the version number (press Enter for $suggested_version): " version
|
|
||||||
|
|
||||||
# Use suggested version if user pressed Enter without input
|
|
||||||
if [ -z "$version" ]; then
|
|
||||||
version="$suggested_version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Prepend 'v' to the version if it doesn't start with it
|
|
||||||
if ! [[ $version =~ ^v ]]; then
|
|
||||||
version="v$version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get commit logs since the last tag
|
|
||||||
if [ -z "$latest_tag" ]; then
|
|
||||||
# No previous tag, get all commits
|
|
||||||
commit_logs=$(git log --pretty=format:"- %s" --no-merges)
|
|
||||||
else
|
|
||||||
# Get commits since the last tag
|
|
||||||
commit_logs=$(git log "${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges)
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create the tag message
|
|
||||||
if [ -z "$commit_logs" ]; then
|
|
||||||
tag_message="Release $version"
|
|
||||||
else
|
|
||||||
tag_message="Release $version
|
|
||||||
|
|
||||||
${commit_logs}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create an annotated tag with the commit logs
|
|
||||||
git tag -a "$version" -m "$tag_message"
|
|
||||||
|
|
||||||
# Push the tag to the remote repository
|
|
||||||
git push origin "$version"
|
|
||||||
|
|
||||||
echo "Tag $version created and pushed to the remote repository."
|
|
||||||
else
|
|
||||||
echo "No release version created."
|
|
||||||
fi
|
|
||||||
@@ -2,14 +2,15 @@ package diff
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CompareDatabases compares two database models and returns the differences
|
// CompareDatabases compares two database models and returns the differences
|
||||||
func CompareDatabases(source, target *models.Database) *DiffResult {
|
func CompareDatabases(source, target *models.Database) *DiffResult {
|
||||||
result := &DiffResult{
|
result := &DiffResult{
|
||||||
Source: source.Name,
|
Source: source.Name,
|
||||||
Target: target.Name,
|
Target: target.Name,
|
||||||
Schemas: compareSchemas(source.Schemas, target.Schemas),
|
Schemas: compareSchemas(source.Schemas, target.Schemas),
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import "git.warky.dev/wdevs/relspecgo/pkg/models"
|
|||||||
|
|
||||||
// DiffResult represents the complete difference analysis between two databases
|
// DiffResult represents the complete difference analysis between two databases
|
||||||
type DiffResult struct {
|
type DiffResult struct {
|
||||||
Source string `json:"source"`
|
Source string `json:"source"`
|
||||||
Target string `json:"target"`
|
Target string `json:"target"`
|
||||||
Schemas *SchemaDiff `json:"schemas"`
|
Schemas *SchemaDiff `json:"schemas"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -18,17 +18,17 @@ type SchemaDiff struct {
|
|||||||
|
|
||||||
// SchemaChange represents changes within a schema
|
// SchemaChange represents changes within a schema
|
||||||
type SchemaChange struct {
|
type SchemaChange struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Tables *TableDiff `json:"tables,omitempty"`
|
Tables *TableDiff `json:"tables,omitempty"`
|
||||||
Views *ViewDiff `json:"views,omitempty"`
|
Views *ViewDiff `json:"views,omitempty"`
|
||||||
Sequences *SequenceDiff `json:"sequences,omitempty"`
|
Sequences *SequenceDiff `json:"sequences,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableDiff represents differences in tables
|
// TableDiff represents differences in tables
|
||||||
type TableDiff struct {
|
type TableDiff struct {
|
||||||
Missing []*models.Table `json:"missing"` // Tables in source but not in target
|
Missing []*models.Table `json:"missing"` // Tables in source but not in target
|
||||||
Extra []*models.Table `json:"extra"` // Tables in target but not in source
|
Extra []*models.Table `json:"extra"` // Tables in target but not in source
|
||||||
Modified []*TableChange `json:"modified"` // Tables that exist in both but differ
|
Modified []*TableChange `json:"modified"` // Tables that exist in both but differ
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableChange represents changes within a table
|
// TableChange represents changes within a table
|
||||||
@@ -50,16 +50,16 @@ type ColumnDiff struct {
|
|||||||
|
|
||||||
// ColumnChange represents a modified column
|
// ColumnChange represents a modified column
|
||||||
type ColumnChange struct {
|
type ColumnChange struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Source *models.Column `json:"source"`
|
Source *models.Column `json:"source"`
|
||||||
Target *models.Column `json:"target"`
|
Target *models.Column `json:"target"`
|
||||||
Changes map[string]any `json:"changes"` // Map of field name to what changed
|
Changes map[string]any `json:"changes"` // Map of field name to what changed
|
||||||
}
|
}
|
||||||
|
|
||||||
// IndexDiff represents differences in indexes
|
// IndexDiff represents differences in indexes
|
||||||
type IndexDiff struct {
|
type IndexDiff struct {
|
||||||
Missing []*models.Index `json:"missing"` // Indexes in source but not in target
|
Missing []*models.Index `json:"missing"` // Indexes in source but not in target
|
||||||
Extra []*models.Index `json:"extra"` // Indexes in target but not in source
|
Extra []*models.Index `json:"extra"` // Indexes in target but not in source
|
||||||
Modified []*IndexChange `json:"modified"` // Indexes that exist in both but differ
|
Modified []*IndexChange `json:"modified"` // Indexes that exist in both but differ
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,8 +103,8 @@ type RelationshipChange struct {
|
|||||||
|
|
||||||
// ViewDiff represents differences in views
|
// ViewDiff represents differences in views
|
||||||
type ViewDiff struct {
|
type ViewDiff struct {
|
||||||
Missing []*models.View `json:"missing"` // Views in source but not in target
|
Missing []*models.View `json:"missing"` // Views in source but not in target
|
||||||
Extra []*models.View `json:"extra"` // Views in target but not in source
|
Extra []*models.View `json:"extra"` // Views in target but not in source
|
||||||
Modified []*ViewChange `json:"modified"` // Views that exist in both but differ
|
Modified []*ViewChange `json:"modified"` // Views that exist in both but differ
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -133,14 +133,14 @@ type SequenceChange struct {
|
|||||||
|
|
||||||
// Summary provides counts for quick overview
|
// Summary provides counts for quick overview
|
||||||
type Summary struct {
|
type Summary struct {
|
||||||
Schemas SchemaSummary `json:"schemas"`
|
Schemas SchemaSummary `json:"schemas"`
|
||||||
Tables TableSummary `json:"tables"`
|
Tables TableSummary `json:"tables"`
|
||||||
Columns ColumnSummary `json:"columns"`
|
Columns ColumnSummary `json:"columns"`
|
||||||
Indexes IndexSummary `json:"indexes"`
|
Indexes IndexSummary `json:"indexes"`
|
||||||
Constraints ConstraintSummary `json:"constraints"`
|
Constraints ConstraintSummary `json:"constraints"`
|
||||||
Relationships RelationshipSummary `json:"relationships"`
|
Relationships RelationshipSummary `json:"relationships"`
|
||||||
Views ViewSummary `json:"views"`
|
Views ViewSummary `json:"views"`
|
||||||
Sequences SequenceSummary `json:"sequences"`
|
Sequences SequenceSummary `json:"sequences"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type SchemaSummary struct {
|
type SchemaSummary struct {
|
||||||
|
|||||||
@@ -2,7 +2,13 @@ package models
|
|||||||
|
|
||||||
import "encoding/xml"
|
import "encoding/xml"
|
||||||
|
|
||||||
// DCTXDictionary represents the root element of a DCTX file
|
// DCTX File Format Models
|
||||||
|
//
|
||||||
|
// This file defines the data structures for parsing and generating DCTX
|
||||||
|
// (Data Dictionary) XML files, which are used by Clarion development tools
|
||||||
|
// for database schema definitions.
|
||||||
|
|
||||||
|
// DCTXDictionary represents the root element of a DCTX file.
|
||||||
type DCTXDictionary struct {
|
type DCTXDictionary struct {
|
||||||
XMLName xml.Name `xml:"Dictionary"`
|
XMLName xml.Name `xml:"Dictionary"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -11,7 +17,7 @@ type DCTXDictionary struct {
|
|||||||
Relations []DCTXRelation `xml:"Relation,omitempty"`
|
Relations []DCTXRelation `xml:"Relation,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXTable represents a table definition in DCTX
|
// DCTXTable represents a table definition in DCTX format.
|
||||||
type DCTXTable struct {
|
type DCTXTable struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -25,7 +31,8 @@ type DCTXTable struct {
|
|||||||
Options []DCTXOption `xml:"Option,omitempty"`
|
Options []DCTXOption `xml:"Option,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXField represents a field/column definition in DCTX
|
// DCTXField represents a field/column definition in DCTX format.
|
||||||
|
// Fields can be nested for GROUP structures.
|
||||||
type DCTXField struct {
|
type DCTXField struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -37,7 +44,7 @@ type DCTXField struct {
|
|||||||
Options []DCTXOption `xml:"Option,omitempty"`
|
Options []DCTXOption `xml:"Option,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXKey represents an index or key definition in DCTX
|
// DCTXKey represents an index or key definition in DCTX format.
|
||||||
type DCTXKey struct {
|
type DCTXKey struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Name string `xml:"Name,attr"`
|
Name string `xml:"Name,attr"`
|
||||||
@@ -49,7 +56,7 @@ type DCTXKey struct {
|
|||||||
Components []DCTXComponent `xml:"Component"`
|
Components []DCTXComponent `xml:"Component"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXComponent represents a component of a key (field reference)
|
// DCTXComponent represents a component of a key, referencing a field in the index.
|
||||||
type DCTXComponent struct {
|
type DCTXComponent struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
FieldId string `xml:"FieldId,attr,omitempty"`
|
FieldId string `xml:"FieldId,attr,omitempty"`
|
||||||
@@ -57,14 +64,14 @@ type DCTXComponent struct {
|
|||||||
Ascend bool `xml:"Ascend,attr,omitempty"`
|
Ascend bool `xml:"Ascend,attr,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXOption represents a property option in DCTX
|
// DCTXOption represents a property option in DCTX format for metadata storage.
|
||||||
type DCTXOption struct {
|
type DCTXOption struct {
|
||||||
Property string `xml:"Property,attr"`
|
Property string `xml:"Property,attr"`
|
||||||
PropertyType string `xml:"PropertyType,attr,omitempty"`
|
PropertyType string `xml:"PropertyType,attr,omitempty"`
|
||||||
PropertyValue string `xml:"PropertyValue,attr"`
|
PropertyValue string `xml:"PropertyValue,attr"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXRelation represents a relationship/foreign key in DCTX
|
// DCTXRelation represents a relationship/foreign key in DCTX format.
|
||||||
type DCTXRelation struct {
|
type DCTXRelation struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
PrimaryTable string `xml:"PrimaryTable,attr"`
|
PrimaryTable string `xml:"PrimaryTable,attr"`
|
||||||
@@ -77,7 +84,7 @@ type DCTXRelation struct {
|
|||||||
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping,omitempty"`
|
PrimaryMappings []DCTXFieldMapping `xml:"PrimaryMapping,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DCTXFieldMapping represents a field mapping in a relation
|
// DCTXFieldMapping represents a field mapping in a relation for multi-column foreign keys.
|
||||||
type DCTXFieldMapping struct {
|
type DCTXFieldMapping struct {
|
||||||
Guid string `xml:"Guid,attr"`
|
Guid string `xml:"Guid,attr"`
|
||||||
Field string `xml:"Field,attr"`
|
Field string `xml:"Field,attr"`
|
||||||
|
|||||||
@@ -2,11 +2,14 @@ package models
|
|||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
// =============================================================================
|
// Flat/Denormalized Views
|
||||||
// Flat/Denormalized Views - Flattened structures with fully qualified names
|
//
|
||||||
// =============================================================================
|
// This file provides flattened data structures with fully qualified names
|
||||||
|
// for easier querying and analysis of database schemas without navigating
|
||||||
|
// nested hierarchies.
|
||||||
|
|
||||||
// FlatColumn represents a column with full context in a single structure
|
// FlatColumn represents a column with full database context in a single structure.
|
||||||
|
// It includes fully qualified names for easy identification and querying.
|
||||||
type FlatColumn struct {
|
type FlatColumn struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -25,7 +28,7 @@ type FlatColumn struct {
|
|||||||
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatColumns converts a Database to a slice of FlatColumns
|
// ToFlatColumns converts a Database to a slice of FlatColumns for denormalized access to all columns.
|
||||||
func (d *Database) ToFlatColumns() []*FlatColumn {
|
func (d *Database) ToFlatColumns() []*FlatColumn {
|
||||||
flatColumns := make([]*FlatColumn, 0)
|
flatColumns := make([]*FlatColumn, 0)
|
||||||
|
|
||||||
@@ -56,7 +59,7 @@ func (d *Database) ToFlatColumns() []*FlatColumn {
|
|||||||
return flatColumns
|
return flatColumns
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatTable represents a table with full context
|
// FlatTable represents a table with full database context and aggregated counts.
|
||||||
type FlatTable struct {
|
type FlatTable struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -70,7 +73,7 @@ type FlatTable struct {
|
|||||||
IndexCount int `json:"index_count" yaml:"index_count" xml:"index_count"`
|
IndexCount int `json:"index_count" yaml:"index_count" xml:"index_count"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatTables converts a Database to a slice of FlatTables
|
// ToFlatTables converts a Database to a slice of FlatTables for denormalized access to all tables.
|
||||||
func (d *Database) ToFlatTables() []*FlatTable {
|
func (d *Database) ToFlatTables() []*FlatTable {
|
||||||
flatTables := make([]*FlatTable, 0)
|
flatTables := make([]*FlatTable, 0)
|
||||||
|
|
||||||
@@ -94,7 +97,7 @@ func (d *Database) ToFlatTables() []*FlatTable {
|
|||||||
return flatTables
|
return flatTables
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatConstraint represents a constraint with full context
|
// FlatConstraint represents a constraint with full database context and resolved references.
|
||||||
type FlatConstraint struct {
|
type FlatConstraint struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
|
||||||
@@ -112,7 +115,7 @@ type FlatConstraint struct {
|
|||||||
OnUpdate string `json:"on_update,omitempty" yaml:"on_update,omitempty" xml:"on_update,omitempty"`
|
OnUpdate string `json:"on_update,omitempty" yaml:"on_update,omitempty" xml:"on_update,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatConstraints converts a Database to a slice of FlatConstraints
|
// ToFlatConstraints converts a Database to a slice of FlatConstraints for denormalized access to all constraints.
|
||||||
func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
||||||
flatConstraints := make([]*FlatConstraint, 0)
|
flatConstraints := make([]*FlatConstraint, 0)
|
||||||
|
|
||||||
@@ -148,7 +151,7 @@ func (d *Database) ToFlatConstraints() []*FlatConstraint {
|
|||||||
return flatConstraints
|
return flatConstraints
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlatRelationship represents a relationship with full context
|
// FlatRelationship represents a relationship with full database context and fully qualified table names.
|
||||||
type FlatRelationship struct {
|
type FlatRelationship struct {
|
||||||
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
DatabaseName string `json:"database_name" yaml:"database_name" xml:"database_name"`
|
||||||
RelationshipName string `json:"relationship_name" yaml:"relationship_name" xml:"relationship_name"`
|
RelationshipName string `json:"relationship_name" yaml:"relationship_name" xml:"relationship_name"`
|
||||||
@@ -164,7 +167,7 @@ type FlatRelationship struct {
|
|||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToFlatRelationships converts a Database to a slice of FlatRelationships
|
// ToFlatRelationships converts a Database to a slice of FlatRelationships for denormalized access to all relationships.
|
||||||
func (d *Database) ToFlatRelationships() []*FlatRelationship {
|
func (d *Database) ToFlatRelationships() []*FlatRelationship {
|
||||||
flatRelationships := make([]*FlatRelationship, 0)
|
flatRelationships := make([]*FlatRelationship, 0)
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
|
// Package models provides the core data structures for representing database schemas.
|
||||||
|
// It defines types for databases, schemas, tables, columns, relationships, constraints,
|
||||||
|
// indexes, views, sequences, and other database objects. These models serve as the
|
||||||
|
// intermediate representation for converting between various database schema formats.
|
||||||
package models
|
package models
|
||||||
|
|
||||||
import "strings"
|
import "strings"
|
||||||
|
|
||||||
|
// DatabaseType represents the type of database system.
|
||||||
type DatabaseType string
|
type DatabaseType string
|
||||||
|
|
||||||
|
// Supported database types.
|
||||||
const (
|
const (
|
||||||
PostgresqlDatabaseType DatabaseType = "pgsql"
|
PostgresqlDatabaseType DatabaseType = "pgsql" // PostgreSQL database
|
||||||
MSSQLDatabaseType DatabaseType = "mssql"
|
MSSQLDatabaseType DatabaseType = "mssql" // Microsoft SQL Server database
|
||||||
SqlLiteDatabaseType DatabaseType = "sqlite"
|
SqlLiteDatabaseType DatabaseType = "sqlite" // SQLite database
|
||||||
)
|
)
|
||||||
|
|
||||||
// Database represents the complete database schema
|
// Database represents the complete database schema
|
||||||
@@ -21,11 +27,13 @@ type Database struct {
|
|||||||
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLNamer returns the database name in lowercase
|
// SQLName returns the database name in lowercase for SQL compatibility.
|
||||||
func (d *Database) SQLName() string {
|
func (d *Database) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Schema represents a database schema, which is a logical grouping of database objects
|
||||||
|
// such as tables, views, sequences, and relationships within a database.
|
||||||
type Schema struct {
|
type Schema struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -40,13 +48,16 @@ type Schema struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
|
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
|
||||||
|
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the schema name in lowercase
|
// SQLName returns the schema name in lowercase for SQL compatibility.
|
||||||
func (d *Schema) SQLName() string {
|
func (d *Schema) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Table represents a database table with its columns, constraints, indexes,
|
||||||
|
// and relationships. Tables are the primary data storage structures in a database.
|
||||||
type Table struct {
|
type Table struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -62,11 +73,12 @@ type Table struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the table name in lowercase
|
// SQLName returns the table name in lowercase for SQL compatibility.
|
||||||
func (d *Table) SQLName() string {
|
func (d *Table) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetPrimaryKey returns the primary key column for the table, or nil if none exists.
|
||||||
func (m Table) GetPrimaryKey() *Column {
|
func (m Table) GetPrimaryKey() *Column {
|
||||||
for _, column := range m.Columns {
|
for _, column := range m.Columns {
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
@@ -76,6 +88,7 @@ func (m Table) GetPrimaryKey() *Column {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetForeignKeys returns all foreign key constraints for the table.
|
||||||
func (m Table) GetForeignKeys() []*Constraint {
|
func (m Table) GetForeignKeys() []*Constraint {
|
||||||
keys := make([]*Constraint, 0)
|
keys := make([]*Constraint, 0)
|
||||||
|
|
||||||
@@ -100,7 +113,7 @@ type View struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the view name in lowercase
|
// SQLName returns the view name in lowercase for SQL compatibility.
|
||||||
func (d *View) SQLName() string {
|
func (d *View) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
@@ -123,7 +136,7 @@ type Sequence struct {
|
|||||||
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the sequence name in lowercase
|
// SQLName returns the sequence name in lowercase for SQL compatibility.
|
||||||
func (d *Sequence) SQLName() string {
|
func (d *Sequence) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
@@ -147,11 +160,13 @@ type Column struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the table name in lowercase
|
// SQLName returns the column name in lowercase for SQL compatibility.
|
||||||
func (d *Column) SQLName() string {
|
func (d *Column) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Index represents a database index for optimizing query performance.
|
||||||
|
// Indexes can be unique, partial, or include additional columns.
|
||||||
type Index struct {
|
type Index struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -167,19 +182,23 @@ type Index struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the Indexin lowercase
|
// SQLName returns the index name in lowercase for SQL compatibility.
|
||||||
func (d *Index) SQLName() string {
|
func (d *Index) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RelationType represents the type of relationship between database tables.
|
||||||
type RelationType string
|
type RelationType string
|
||||||
|
|
||||||
|
// Supported relationship types.
|
||||||
const (
|
const (
|
||||||
OneToOne RelationType = "one_to_one"
|
OneToOne RelationType = "one_to_one" // One record in table A relates to one record in table B
|
||||||
OneToMany RelationType = "one_to_many"
|
OneToMany RelationType = "one_to_many" // One record in table A relates to many records in table B
|
||||||
ManyToMany RelationType = "many_to_many"
|
ManyToMany RelationType = "many_to_many" // Many records in table A relate to many records in table B
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Relationship represents a relationship between two database tables.
|
||||||
|
// Relationships can be one-to-one, one-to-many, or many-to-many.
|
||||||
type Relationship struct {
|
type Relationship struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Type RelationType `json:"type" yaml:"type" xml:"type"`
|
Type RelationType `json:"type" yaml:"type" xml:"type"`
|
||||||
@@ -197,11 +216,13 @@ type Relationship struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLName returns the Relationship lowercase
|
// SQLName returns the relationship name in lowercase for SQL compatibility.
|
||||||
func (d *Relationship) SQLName() string {
|
func (d *Relationship) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Constraint represents a database constraint that enforces data integrity rules.
|
||||||
|
// Constraints can be primary keys, foreign keys, unique constraints, check constraints, or not-null constraints.
|
||||||
type Constraint struct {
|
type Constraint struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Type ConstraintType `json:"type" yaml:"type" xml:"type"`
|
Type ConstraintType `json:"type" yaml:"type" xml:"type"`
|
||||||
@@ -219,20 +240,37 @@ type Constraint struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLName returns the constraint name in lowercase for SQL compatibility.
|
||||||
func (d *Constraint) SQLName() string {
|
func (d *Constraint) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ConstraintType represents the type of database constraint.
|
||||||
type ConstraintType string
|
type ConstraintType string
|
||||||
|
|
||||||
|
// Enum represents a database enumeration type with a set of allowed values.
|
||||||
|
type Enum struct {
|
||||||
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
|
Values []string `json:"values" yaml:"values" xml:"values"`
|
||||||
|
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLName returns the enum name in lowercase for SQL compatibility.
|
||||||
|
func (d *Enum) SQLName() string {
|
||||||
|
return strings.ToLower(d.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supported constraint types.
|
||||||
const (
|
const (
|
||||||
PrimaryKeyConstraint ConstraintType = "primary_key"
|
PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record
|
||||||
ForeignKeyConstraint ConstraintType = "foreign_key"
|
ForeignKeyConstraint ConstraintType = "foreign_key" // Foreign key references another table
|
||||||
UniqueConstraint ConstraintType = "unique"
|
UniqueConstraint ConstraintType = "unique" // Unique constraint ensures all values are different
|
||||||
CheckConstraint ConstraintType = "check"
|
CheckConstraint ConstraintType = "check" // Check constraint validates data against an expression
|
||||||
NotNullConstraint ConstraintType = "not_null"
|
NotNullConstraint ConstraintType = "not_null" // Not null constraint requires a value
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Script represents a database migration or initialization script.
|
||||||
|
// Scripts can have dependencies and rollback capabilities.
|
||||||
type Script struct {
|
type Script struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description" yaml:"description" xml:"description"`
|
Description string `json:"description" yaml:"description" xml:"description"`
|
||||||
@@ -245,11 +283,12 @@ type Script struct {
|
|||||||
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SQLName returns the script name in lowercase for SQL compatibility.
|
||||||
func (d *Script) SQLName() string {
|
func (d *Script) SQLName() string {
|
||||||
return strings.ToLower(d.Name)
|
return strings.ToLower(d.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize functions
|
// Initialization functions for creating new model instances with proper defaults.
|
||||||
|
|
||||||
// InitDatabase initializes a new Database with empty slices
|
// InitDatabase initializes a new Database with empty slices
|
||||||
func InitDatabase(name string) *Database {
|
func InitDatabase(name string) *Database {
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
// =============================================================================
|
// Summary/Compact Views
|
||||||
// Summary/Compact Views - Lightweight views with essential fields
|
//
|
||||||
// =============================================================================
|
// This file provides lightweight summary structures with essential fields
|
||||||
|
// and aggregated counts for quick database schema overviews without loading
|
||||||
|
// full object graphs.
|
||||||
|
|
||||||
// DatabaseSummary provides a compact overview of a database
|
// DatabaseSummary provides a compact overview of a database with aggregated statistics.
|
||||||
type DatabaseSummary struct {
|
type DatabaseSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -15,7 +17,7 @@ type DatabaseSummary struct {
|
|||||||
TotalColumns int `json:"total_columns" yaml:"total_columns" xml:"total_columns"`
|
TotalColumns int `json:"total_columns" yaml:"total_columns" xml:"total_columns"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Database to a DatabaseSummary
|
// ToSummary converts a Database to a DatabaseSummary with calculated counts.
|
||||||
func (d *Database) ToSummary() *DatabaseSummary {
|
func (d *Database) ToSummary() *DatabaseSummary {
|
||||||
summary := &DatabaseSummary{
|
summary := &DatabaseSummary{
|
||||||
Name: d.Name,
|
Name: d.Name,
|
||||||
@@ -36,7 +38,7 @@ func (d *Database) ToSummary() *DatabaseSummary {
|
|||||||
return summary
|
return summary
|
||||||
}
|
}
|
||||||
|
|
||||||
// SchemaSummary provides a compact overview of a schema
|
// SchemaSummary provides a compact overview of a schema with aggregated statistics.
|
||||||
type SchemaSummary struct {
|
type SchemaSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
|
||||||
@@ -47,7 +49,7 @@ type SchemaSummary struct {
|
|||||||
TotalConstraints int `json:"total_constraints" yaml:"total_constraints" xml:"total_constraints"`
|
TotalConstraints int `json:"total_constraints" yaml:"total_constraints" xml:"total_constraints"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Schema to a SchemaSummary
|
// ToSummary converts a Schema to a SchemaSummary with calculated counts.
|
||||||
func (s *Schema) ToSummary() *SchemaSummary {
|
func (s *Schema) ToSummary() *SchemaSummary {
|
||||||
summary := &SchemaSummary{
|
summary := &SchemaSummary{
|
||||||
Name: s.Name,
|
Name: s.Name,
|
||||||
@@ -66,7 +68,7 @@ func (s *Schema) ToSummary() *SchemaSummary {
|
|||||||
return summary
|
return summary
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableSummary provides a compact overview of a table
|
// TableSummary provides a compact overview of a table with aggregated statistics.
|
||||||
type TableSummary struct {
|
type TableSummary struct {
|
||||||
Name string `json:"name" yaml:"name" xml:"name"`
|
Name string `json:"name" yaml:"name" xml:"name"`
|
||||||
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
Schema string `json:"schema" yaml:"schema" xml:"schema"`
|
||||||
@@ -79,7 +81,7 @@ type TableSummary struct {
|
|||||||
ForeignKeyCount int `json:"foreign_key_count" yaml:"foreign_key_count" xml:"foreign_key_count"`
|
ForeignKeyCount int `json:"foreign_key_count" yaml:"foreign_key_count" xml:"foreign_key_count"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSummary converts a Table to a TableSummary
|
// ToSummary converts a Table to a TableSummary with calculated counts.
|
||||||
func (t *Table) ToSummary() *TableSummary {
|
func (t *Table) ToSummary() *TableSummary {
|
||||||
summary := &TableSummary{
|
summary := &TableSummary{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
|
|||||||
106
pkg/readers/bun/README.md
Normal file
106
pkg/readers/bun/README.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
# Bun Reader
|
||||||
|
|
||||||
|
Reads Go source files containing Bun model definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Bun Reader parses Go source code files that define Bun models (structs with `bun` struct tags) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Bun struct tags to extract column definitions
|
||||||
|
- Extracts table names from `bun:"table:tablename"` tags
|
||||||
|
- Identifies primary keys, foreign keys, and indexes
|
||||||
|
- Supports relationship detection
|
||||||
|
- Handles both single files and directories
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/models.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := bun.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read Bun models and convert to JSON
|
||||||
|
relspec --input bun --in-file models/ --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert Bun models to GORM
|
||||||
|
relspec --input bun --in-file models.go --output gorm --out-file gorm_models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Bun Tags
|
||||||
|
|
||||||
|
The reader recognizes the following Bun struct tags:
|
||||||
|
|
||||||
|
- `table` - Table name
|
||||||
|
- `column` - Column name
|
||||||
|
- `type` - SQL data type
|
||||||
|
- `pk` - Primary key
|
||||||
|
- `notnull` - NOT NULL constraint
|
||||||
|
- `autoincrement` - Auto-increment column
|
||||||
|
- `default` - Default value
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `rel` - Relationship definition
|
||||||
|
|
||||||
|
## Example Bun Model
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement"`
|
||||||
|
Username string `bun:"username,notnull,unique"`
|
||||||
|
Email string `bun:"email,notnull"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull,default:now()"`
|
||||||
|
|
||||||
|
Posts []*Post `bun:"rel:has-many,join:id=user_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post struct {
|
||||||
|
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk"`
|
||||||
|
UserID int64 `bun:"user_id,notnull"`
|
||||||
|
Title string `bun:"title,notnull"`
|
||||||
|
Content string `bun:"content"`
|
||||||
|
|
||||||
|
User *User `bun:"rel:belongs-to,join:user_id=id"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Test files (ending in `_test.go`) are automatically excluded
|
||||||
|
- The `bun.BaseModel` embedded struct is automatically recognized
|
||||||
|
- Schema defaults to `public` if not specified
|
||||||
@@ -382,6 +382,23 @@ func (r *Reader) isRelationship(tag string) bool {
|
|||||||
return strings.Contains(tag, "bun:\"rel:") || strings.Contains(tag, ",rel:")
|
return strings.Contains(tag, "bun:\"rel:") || strings.Contains(tag, ",rel:")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getRelationType extracts the relationship type from a bun tag
|
||||||
|
func (r *Reader) getRelationType(bunTag string) string {
|
||||||
|
if strings.Contains(bunTag, "rel:has-many") {
|
||||||
|
return "has-many"
|
||||||
|
}
|
||||||
|
if strings.Contains(bunTag, "rel:belongs-to") {
|
||||||
|
return "belongs-to"
|
||||||
|
}
|
||||||
|
if strings.Contains(bunTag, "rel:has-one") {
|
||||||
|
return "has-one"
|
||||||
|
}
|
||||||
|
if strings.Contains(bunTag, "rel:many-to-many") {
|
||||||
|
return "many-to-many"
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
// parseRelationshipConstraints parses relationship fields to extract foreign key constraints
|
// parseRelationshipConstraints parses relationship fields to extract foreign key constraints
|
||||||
func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *ast.StructType, structMap map[string]*models.Table) {
|
func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *ast.StructType, structMap map[string]*models.Table) {
|
||||||
for _, field := range structType.Fields.List {
|
for _, field := range structType.Fields.List {
|
||||||
@@ -409,27 +426,51 @@ func (r *Reader) parseRelationshipConstraints(table *models.Table, structType *a
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Parse the join information: join:user_id=id
|
// Parse the join information: join:user_id=id
|
||||||
// This means: referencedTable.user_id = thisTable.id
|
// This means: thisTable.user_id = referencedTable.id
|
||||||
joinInfo := r.parseJoinInfo(bunTag)
|
joinInfo := r.parseJoinInfo(bunTag)
|
||||||
if joinInfo == nil {
|
if joinInfo == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// The FK is on the referenced table
|
// Determine which table gets the FK based on relationship type
|
||||||
|
relType := r.getRelationType(bunTag)
|
||||||
|
|
||||||
|
var fkTable *models.Table
|
||||||
|
var fkColumn, refTable, refColumn string
|
||||||
|
|
||||||
|
switch strings.ToLower(relType) {
|
||||||
|
case "belongs-to":
|
||||||
|
// For belongs-to: FK is on the current table
|
||||||
|
// join:user_id=id means table.user_id references referencedTable.id
|
||||||
|
fkTable = table
|
||||||
|
fkColumn = joinInfo.ForeignKey
|
||||||
|
refTable = referencedTable.Name
|
||||||
|
refColumn = joinInfo.ReferencedKey
|
||||||
|
case "has-many":
|
||||||
|
// For has-many: FK is on the referenced table
|
||||||
|
// join:id=user_id means referencedTable.user_id references table.id
|
||||||
|
fkTable = referencedTable
|
||||||
|
fkColumn = joinInfo.ReferencedKey
|
||||||
|
refTable = table.Name
|
||||||
|
refColumn = joinInfo.ForeignKey
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
constraint := &models.Constraint{
|
constraint := &models.Constraint{
|
||||||
Name: fmt.Sprintf("fk_%s_%s", referencedTable.Name, table.Name),
|
Name: fmt.Sprintf("fk_%s_%s", fkTable.Name, refTable),
|
||||||
Type: models.ForeignKeyConstraint,
|
Type: models.ForeignKeyConstraint,
|
||||||
Table: referencedTable.Name,
|
Table: fkTable.Name,
|
||||||
Schema: referencedTable.Schema,
|
Schema: fkTable.Schema,
|
||||||
Columns: []string{joinInfo.ForeignKey},
|
Columns: []string{fkColumn},
|
||||||
ReferencedTable: table.Name,
|
ReferencedTable: refTable,
|
||||||
ReferencedSchema: table.Schema,
|
ReferencedSchema: fkTable.Schema,
|
||||||
ReferencedColumns: []string{joinInfo.ReferencedKey},
|
ReferencedColumns: []string{refColumn},
|
||||||
OnDelete: "NO ACTION", // Bun doesn't specify this in tags
|
OnDelete: "NO ACTION", // Bun doesn't specify this in tags
|
||||||
OnUpdate: "NO ACTION",
|
OnUpdate: "NO ACTION",
|
||||||
}
|
}
|
||||||
|
|
||||||
referencedTable.Constraints[constraint.Name] = constraint
|
fkTable.Constraints[constraint.Name] = constraint
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -626,17 +667,14 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
|||||||
// - nullzero tag means the field is nullable (can be NULL in DB)
|
// - nullzero tag means the field is nullable (can be NULL in DB)
|
||||||
// - absence of nullzero means the field is NOT NULL
|
// - absence of nullzero means the field is NOT NULL
|
||||||
// - primitive types (int64, bool, string) are NOT NULL by default
|
// - primitive types (int64, bool, string) are NOT NULL by default
|
||||||
|
column.NotNull = true
|
||||||
|
// Primary keys are always NOT NULL
|
||||||
|
|
||||||
if strings.Contains(bunTag, "nullzero") {
|
if strings.Contains(bunTag, "nullzero") {
|
||||||
column.NotNull = false
|
column.NotNull = false
|
||||||
} else if r.isNullableGoType(fieldType) {
|
|
||||||
// SqlString, SqlInt, etc. without nullzero tag means NOT NULL
|
|
||||||
column.NotNull = true
|
|
||||||
} else {
|
} else {
|
||||||
// Primitive types are NOT NULL by default
|
column.NotNull = !r.isNullableGoType(fieldType)
|
||||||
column.NotNull = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Primary keys are always NOT NULL
|
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
}
|
}
|
||||||
|
|||||||
522
pkg/readers/bun/reader_test.go
Normal file
522
pkg/readers/bun/reader_test.go
Normal file
@@ -0,0 +1,522 @@
|
|||||||
|
package bun
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[0]
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify id column - primary key should be NOT NULL
|
||||||
|
idCol, exists := table.Columns["id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'id' not found")
|
||||||
|
}
|
||||||
|
if !idCol.IsPrimaryKey {
|
||||||
|
t.Error("Column 'id' should be primary key")
|
||||||
|
}
|
||||||
|
if !idCol.AutoIncrement {
|
||||||
|
t.Error("Column 'id' should be auto-increment")
|
||||||
|
}
|
||||||
|
if !idCol.NotNull {
|
||||||
|
t.Error("Column 'id' should be NOT NULL (primary keys are always NOT NULL)")
|
||||||
|
}
|
||||||
|
if idCol.Type != "bigint" {
|
||||||
|
t.Errorf("Expected id type 'bigint', got '%s'", idCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify email column - explicit notnull tag should be NOT NULL
|
||||||
|
emailCol, exists := table.Columns["email"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'email' not found")
|
||||||
|
}
|
||||||
|
if !emailCol.NotNull {
|
||||||
|
t.Error("Column 'email' should be NOT NULL (explicit 'notnull' tag)")
|
||||||
|
}
|
||||||
|
if emailCol.Type != "varchar" || emailCol.Length != 255 {
|
||||||
|
t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify name column - primitive string type should be NOT NULL by default in Bun
|
||||||
|
nameCol, exists := table.Columns["name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'name' not found")
|
||||||
|
}
|
||||||
|
if !nameCol.NotNull {
|
||||||
|
t.Error("Column 'name' should be NOT NULL (primitive string type, no nullzero tag)")
|
||||||
|
}
|
||||||
|
if nameCol.Type != "text" {
|
||||||
|
t.Errorf("Expected name type 'text', got '%s'", nameCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify age column - pointer type should be nullable (NOT NULL = false)
|
||||||
|
ageCol, exists := table.Columns["age"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'age' not found")
|
||||||
|
}
|
||||||
|
if ageCol.NotNull {
|
||||||
|
t.Error("Column 'age' should be nullable (pointer type *int)")
|
||||||
|
}
|
||||||
|
if ageCol.Type != "integer" {
|
||||||
|
t.Errorf("Expected age type 'integer', got '%s'", ageCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify is_active column - primitive bool type should be NOT NULL by default
|
||||||
|
isActiveCol, exists := table.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type, no nullzero tag)")
|
||||||
|
}
|
||||||
|
if isActiveCol.Type != "boolean" {
|
||||||
|
t.Errorf("Expected is_active type 'boolean', got '%s'", isActiveCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify created_at column - time.Time should be NOT NULL by default
|
||||||
|
createdAtCol, exists := table.Columns["created_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'created_at' not found")
|
||||||
|
}
|
||||||
|
if !createdAtCol.NotNull {
|
||||||
|
t.Error("Column 'created_at' should be NOT NULL (time.Time is NOT NULL by default)")
|
||||||
|
}
|
||||||
|
if createdAtCol.Type != "timestamp" {
|
||||||
|
t.Errorf("Expected created_at type 'timestamp', got '%s'", createdAtCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify unique index on email
|
||||||
|
if len(table.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on users table")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "complex.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify schema
|
||||||
|
if len(db.Schemas) != 1 {
|
||||||
|
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify tables
|
||||||
|
if len(schema.Tables) != 3 {
|
||||||
|
t.Fatalf("Expected 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find tables
|
||||||
|
var usersTable, postsTable, commentsTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
switch table.Name {
|
||||||
|
case "users":
|
||||||
|
usersTable = table
|
||||||
|
case "posts":
|
||||||
|
postsTable = table
|
||||||
|
case "comments":
|
||||||
|
commentsTable = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if usersTable == nil {
|
||||||
|
t.Fatal("Users table not found")
|
||||||
|
}
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
if commentsTable == nil {
|
||||||
|
t.Fatal("Comments table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table - test NOT NULL logic for various field types
|
||||||
|
if len(usersTable.Columns) != 10 {
|
||||||
|
t.Errorf("Expected 10 columns in users table, got %d", len(usersTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// username - NOT NULL (explicit notnull tag)
|
||||||
|
usernameCol, exists := usersTable.Columns["username"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'username' not found")
|
||||||
|
}
|
||||||
|
if !usernameCol.NotNull {
|
||||||
|
t.Error("Column 'username' should be NOT NULL (explicit 'notnull' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// first_name - nullable (pointer type)
|
||||||
|
firstNameCol, exists := usersTable.Columns["first_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'first_name' not found")
|
||||||
|
}
|
||||||
|
if firstNameCol.NotNull {
|
||||||
|
t.Error("Column 'first_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// last_name - nullable (pointer type)
|
||||||
|
lastNameCol, exists := usersTable.Columns["last_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'last_name' not found")
|
||||||
|
}
|
||||||
|
if lastNameCol.NotNull {
|
||||||
|
t.Error("Column 'last_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// bio - nullable (pointer type)
|
||||||
|
bioCol, exists := usersTable.Columns["bio"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'bio' not found")
|
||||||
|
}
|
||||||
|
if bioCol.NotNull {
|
||||||
|
t.Error("Column 'bio' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// is_active - NOT NULL (primitive bool without nullzero)
|
||||||
|
isActiveCol, exists := usersTable.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type without nullzero)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table indexes
|
||||||
|
if len(usersTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on users table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table
|
||||||
|
if len(postsTable.Columns) != 11 {
|
||||||
|
t.Errorf("Expected 11 columns in posts table, got %d", len(postsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// excerpt - nullable (pointer type)
|
||||||
|
excerptCol, exists := postsTable.Columns["excerpt"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'excerpt' not found")
|
||||||
|
}
|
||||||
|
if excerptCol.NotNull {
|
||||||
|
t.Error("Column 'excerpt' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// published - NOT NULL (primitive bool without nullzero)
|
||||||
|
publishedCol, exists := postsTable.Columns["published"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published' not found")
|
||||||
|
}
|
||||||
|
if !publishedCol.NotNull {
|
||||||
|
t.Error("Column 'published' should be NOT NULL (primitive bool type without nullzero)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// published_at - nullable (has nullzero tag)
|
||||||
|
publishedAtCol, exists := postsTable.Columns["published_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published_at' not found")
|
||||||
|
}
|
||||||
|
if publishedAtCol.NotNull {
|
||||||
|
t.Error("Column 'published_at' should be nullable (has nullzero tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// view_count - NOT NULL (primitive int64 without nullzero)
|
||||||
|
viewCountCol, exists := postsTable.Columns["view_count"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'view_count' not found")
|
||||||
|
}
|
||||||
|
if !viewCountCol.NotNull {
|
||||||
|
t.Error("Column 'view_count' should be NOT NULL (primitive int64 type without nullzero)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table indexes
|
||||||
|
if len(postsTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on posts table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify comments table
|
||||||
|
if len(commentsTable.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns in comments table, got %d", len(commentsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// user_id - nullable (pointer type)
|
||||||
|
userIDCol, exists := commentsTable.Columns["user_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'user_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if userIDCol.NotNull {
|
||||||
|
t.Error("Column 'user_id' should be nullable (pointer type *int64)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// post_id - NOT NULL (explicit notnull tag)
|
||||||
|
postIDCol, exists := commentsTable.Columns["post_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'post_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if !postIDCol.NotNull {
|
||||||
|
t.Error("Column 'post_id' should be NOT NULL (explicit 'notnull' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify foreign key constraints are created from relationship tags
|
||||||
|
// In Bun, relationships are defined with rel: tags
|
||||||
|
// The constraints should be created on the referenced tables
|
||||||
|
if len(postsTable.Constraints) > 0 {
|
||||||
|
// Check if FK constraint exists
|
||||||
|
var fkPostsUser *models.Constraint
|
||||||
|
for _, c := range postsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint && c.ReferencedTable == "users" {
|
||||||
|
fkPostsUser = c
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkPostsUser != nil {
|
||||||
|
if len(fkPostsUser.Columns) != 1 || fkPostsUser.Columns[0] != "user_id" {
|
||||||
|
t.Error("Expected FK column 'user_id'")
|
||||||
|
}
|
||||||
|
if len(fkPostsUser.ReferencedColumns) != 1 || fkPostsUser.ReferencedColumns[0] != "id" {
|
||||||
|
t.Error("Expected FK referenced column 'id'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(commentsTable.Constraints) > 0 {
|
||||||
|
// Check if FK constraints exist
|
||||||
|
var fkCommentsPost, fkCommentsUser *models.Constraint
|
||||||
|
for _, c := range commentsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint {
|
||||||
|
if c.ReferencedTable == "posts" {
|
||||||
|
fkCommentsPost = c
|
||||||
|
} else if c.ReferencedTable == "users" {
|
||||||
|
fkCommentsUser = c
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsPost != nil {
|
||||||
|
if len(fkCommentsPost.Columns) != 1 || fkCommentsPost.Columns[0] != "post_id" {
|
||||||
|
t.Error("Expected FK column 'post_id'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsUser != nil {
|
||||||
|
if len(fkCommentsUser.Columns) != 1 || fkCommentsUser.Columns[0] != "user_id" {
|
||||||
|
t.Error("Expected FK column 'user_id'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema == nil {
|
||||||
|
t.Fatal("ReadSchema() returned nil schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadTable() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if table == nil {
|
||||||
|
t.Fatal("ReadTable() returned nil table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Directory(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should read both simple.go and complex.go
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
// Should have at least 3 tables from complex.go (users, posts, comments)
|
||||||
|
// plus 1 from simple.go (users) - but same table name, so may be overwritten
|
||||||
|
if len(schema.Tables) < 3 {
|
||||||
|
t.Errorf("Expected at least 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_InvalidPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/file.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for invalid file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_EmptyPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for empty file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_NullableTypes(t *testing.T) {
|
||||||
|
// This test specifically verifies the NOT NULL logic changes
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "complex.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find posts table
|
||||||
|
var postsTable *models.Table
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "posts" {
|
||||||
|
postsTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test all nullability scenarios
|
||||||
|
tests := []struct {
|
||||||
|
column string
|
||||||
|
notNull bool
|
||||||
|
reason string
|
||||||
|
}{
|
||||||
|
{"id", true, "primary key"},
|
||||||
|
{"user_id", true, "explicit notnull tag"},
|
||||||
|
{"title", true, "explicit notnull tag"},
|
||||||
|
{"slug", true, "explicit notnull tag"},
|
||||||
|
{"content", true, "explicit notnull tag"},
|
||||||
|
{"excerpt", false, "pointer type *string"},
|
||||||
|
{"published", true, "primitive bool without nullzero"},
|
||||||
|
{"view_count", true, "primitive int64 without nullzero"},
|
||||||
|
{"published_at", false, "has nullzero tag"},
|
||||||
|
{"created_at", true, "time.Time without nullzero"},
|
||||||
|
{"updated_at", true, "time.Time without nullzero"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
col, exists := postsTable.Columns[tt.column]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Column '%s' not found", tt.column)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.NotNull != tt.notNull {
|
||||||
|
if tt.notNull {
|
||||||
|
t.Errorf("Column '%s' should be NOT NULL (%s), but NotNull=%v",
|
||||||
|
tt.column, tt.reason, col.NotNull)
|
||||||
|
} else {
|
||||||
|
t.Errorf("Column '%s' should be nullable (%s), but NotNull=%v",
|
||||||
|
tt.column, tt.reason, col.NotNull)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
101
pkg/readers/dbml/README.md
Normal file
101
pkg/readers/dbml/README.md
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# DBML Reader
|
||||||
|
|
||||||
|
Reads Database Markup Language (DBML) files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DBML Reader parses `.dbml` files that define database schemas using the DBML syntax (used by dbdiagram.io) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses DBML syntax
|
||||||
|
- Extracts tables, columns, and relationships
|
||||||
|
- Supports DBML-specific features:
|
||||||
|
- Table groups and notes
|
||||||
|
- Enum definitions
|
||||||
|
- Indexes
|
||||||
|
- Foreign key relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.dbml",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := dbml.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read DBML file and convert to JSON
|
||||||
|
relspec --input dbml --in-file schema.dbml --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DBML to GORM models
|
||||||
|
relspec --input dbml --in-file database.dbml --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example DBML File
|
||||||
|
|
||||||
|
```dbml
|
||||||
|
Table users {
|
||||||
|
id bigserial [pk, increment]
|
||||||
|
username varchar(50) [not null, unique]
|
||||||
|
email varchar(100) [not null]
|
||||||
|
created_at timestamp [not null, default: `now()`]
|
||||||
|
|
||||||
|
Note: 'Users table'
|
||||||
|
}
|
||||||
|
|
||||||
|
Table posts {
|
||||||
|
id bigserial [pk]
|
||||||
|
user_id bigint [not null, ref: > users.id]
|
||||||
|
title varchar(200) [not null]
|
||||||
|
content text
|
||||||
|
|
||||||
|
indexes {
|
||||||
|
user_id
|
||||||
|
(user_id, created_at) [name: 'idx_user_posts']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ref: posts.user_id > users.id [delete: cascade]
|
||||||
|
```
|
||||||
|
|
||||||
|
## DBML Features Supported
|
||||||
|
|
||||||
|
- Table definitions with columns
|
||||||
|
- Primary keys (`pk`)
|
||||||
|
- Not null constraints (`not null`)
|
||||||
|
- Unique constraints (`unique`)
|
||||||
|
- Default values (`default`)
|
||||||
|
- Inline references (`ref`)
|
||||||
|
- Standalone `Ref` blocks
|
||||||
|
- Indexes and composite indexes
|
||||||
|
- Table notes and column notes
|
||||||
|
- Enums
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DBML is designed for database documentation and diagramming
|
||||||
|
- Schema name defaults to `public`
|
||||||
|
- Relationship cardinality is preserved
|
||||||
96
pkg/readers/dctx/README.md
Normal file
96
pkg/readers/dctx/README.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# DCTX Reader
|
||||||
|
|
||||||
|
Reads Clarion database dictionary (DCTX) files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DCTX Reader parses Clarion dictionary files (`.dctx`) that define database structures in the Clarion development system and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Clarion DCTX XML format
|
||||||
|
- Extracts file (table) and field (column) definitions
|
||||||
|
- Supports Clarion data types
|
||||||
|
- Handles keys (indexes) and relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/database.dctx",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := dctx.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read DCTX file and convert to JSON
|
||||||
|
relspec --input dctx --in-file legacy.dctx --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DCTX to GORM models for migration
|
||||||
|
relspec --input dctx --in-file app.dctx --output gorm --out-file models.go
|
||||||
|
|
||||||
|
# Export DCTX to PostgreSQL DDL
|
||||||
|
relspec --input dctx --in-file database.dctx --output pgsql --out-file schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example DCTX Structure
|
||||||
|
|
||||||
|
DCTX files are XML-based Clarion dictionary files that define:
|
||||||
|
|
||||||
|
- Files (equivalent to tables)
|
||||||
|
- Fields (columns) with Clarion-specific types
|
||||||
|
- Keys (indexes)
|
||||||
|
- Relationships between files
|
||||||
|
|
||||||
|
Common Clarion data types:
|
||||||
|
- `STRING` - Fixed-length string
|
||||||
|
- `CSTRING` - C-style null-terminated string
|
||||||
|
- `LONG` - 32-bit integer
|
||||||
|
- `SHORT` - 16-bit integer
|
||||||
|
- `DECIMAL` - Decimal number
|
||||||
|
- `REAL` - Floating point
|
||||||
|
- `DATE` - Date field
|
||||||
|
- `TIME` - Time field
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
The reader automatically maps Clarion data types to standard SQL types:
|
||||||
|
|
||||||
|
| Clarion Type | SQL Type |
|
||||||
|
|--------------|----------|
|
||||||
|
| STRING | VARCHAR |
|
||||||
|
| CSTRING | VARCHAR |
|
||||||
|
| LONG | INTEGER |
|
||||||
|
| SHORT | SMALLINT |
|
||||||
|
| DECIMAL | NUMERIC |
|
||||||
|
| REAL | REAL |
|
||||||
|
| DATE | DATE |
|
||||||
|
| TIME | TIME |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DCTX is specific to Clarion development platform
|
||||||
|
- Useful for migrating legacy Clarion applications
|
||||||
|
- Schema name defaults to `public`
|
||||||
|
- Preserves field properties and constraints where possible
|
||||||
96
pkg/readers/drawdb/README.md
Normal file
96
pkg/readers/drawdb/README.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# DrawDB Reader
|
||||||
|
|
||||||
|
Reads DrawDB schema files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DrawDB Reader parses JSON files exported from DrawDB (a free online database design tool) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses DrawDB JSON format
|
||||||
|
- Extracts tables, fields, and relationships
|
||||||
|
- Supports DrawDB-specific metadata
|
||||||
|
- Preserves visual layout information
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/diagram.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := drawdb.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read DrawDB export and convert to JSON schema
|
||||||
|
relspec --input drawdb --in-file diagram.json --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DrawDB design to GORM models
|
||||||
|
relspec --input drawdb --in-file design.json --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example DrawDB Export
|
||||||
|
|
||||||
|
DrawDB exports database designs as JSON files containing:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "users",
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"primary": true,
|
||||||
|
"autoIncrement": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "username",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 50,
|
||||||
|
"notNull": true,
|
||||||
|
"unique": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"relationships": [
|
||||||
|
{
|
||||||
|
"source": "posts",
|
||||||
|
"target": "users",
|
||||||
|
"type": "many-to-one"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DrawDB is a free online database designer at drawdb.vercel.app
|
||||||
|
- Export format preserves visual design metadata
|
||||||
|
- Useful for converting visual designs to code
|
||||||
|
- Schema defaults to `public`
|
||||||
90
pkg/readers/drizzle/README.md
Normal file
90
pkg/readers/drizzle/README.md
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# Drizzle Reader
|
||||||
|
|
||||||
|
Reads TypeScript/JavaScript files containing Drizzle ORM schema definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Drizzle Reader parses Drizzle ORM schema files (TypeScript/JavaScript) that define database tables using Drizzle's schema builder and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Drizzle schema definitions
|
||||||
|
- Extracts table, column, and relationship information
|
||||||
|
- Supports various Drizzle column types
|
||||||
|
- Handles constraints and indexes
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.ts",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := drizzle.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read Drizzle schema and convert to JSON
|
||||||
|
relspec --input drizzle --in-file schema.ts --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert Drizzle to GORM models
|
||||||
|
relspec --input drizzle --in-file schema/ --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Drizzle Schema
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { pgTable, serial, varchar, text, timestamp, integer } from 'drizzle-orm/pg-core';
|
||||||
|
import { relations } from 'drizzle-orm';
|
||||||
|
|
||||||
|
export const users = pgTable('users', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
username: varchar('username', { length: 50 }).notNull().unique(),
|
||||||
|
email: varchar('email', { length: 100 }).notNull(),
|
||||||
|
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const posts = pgTable('posts', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }),
|
||||||
|
title: varchar('title', { length: 200 }).notNull(),
|
||||||
|
content: text('content'),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const usersRelations = relations(users, ({ many }) => ({
|
||||||
|
posts: many(posts),
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const postsRelations = relations(posts, ({ one }) => ({
|
||||||
|
user: one(users, {
|
||||||
|
fields: [posts.userId],
|
||||||
|
references: [users.id],
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Supports both PostgreSQL and MySQL Drizzle schemas
|
||||||
|
- Extracts relationship information from `relations` definitions
|
||||||
|
- Schema defaults to `public` for PostgreSQL
|
||||||
619
pkg/readers/drizzle/reader.go
Normal file
619
pkg/readers/drizzle/reader.go
Normal file
@@ -0,0 +1,619 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for Drizzle schema format
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new Drizzle reader with the given options
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads and parses Drizzle schema input, returning a Database model
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Drizzle reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a file or directory
|
||||||
|
info, err := os.Stat(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to stat path: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
// Read all .ts files in the directory
|
||||||
|
return r.readDirectory(r.options.FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read single file
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parseDrizzle(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads and parses Drizzle schema input, returning a Schema model
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in Drizzle schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first schema
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads and parses Drizzle schema input, returning a Table model
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in Drizzle schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first table
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// readDirectory reads all .ts files in a directory and parses them
|
||||||
|
func (r *Reader) readDirectory(dirPath string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
db.Name = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default schema for Drizzle
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Enums = make([]*models.Enum, 0)
|
||||||
|
|
||||||
|
// Read all .ts files
|
||||||
|
files, err := filepath.Glob(filepath.Join(dirPath, "*.ts"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to glob directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse each file
|
||||||
|
for _, file := range files {
|
||||||
|
content, err := os.ReadFile(file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file %s: %w", file, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse and merge into schema
|
||||||
|
fileDB, err := r.parseDrizzle(string(content))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse file %s: %w", file, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge schemas
|
||||||
|
if len(fileDB.Schemas) > 0 {
|
||||||
|
fileSchema := fileDB.Schemas[0]
|
||||||
|
schema.Tables = append(schema.Tables, fileSchema.Tables...)
|
||||||
|
schema.Enums = append(schema.Enums, fileSchema.Enums...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDrizzle parses Drizzle schema content and returns a Database model
|
||||||
|
func (r *Reader) parseDrizzle(content string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
db.Name = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default schema for Drizzle (PostgreSQL)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Enums = make([]*models.Enum, 0)
|
||||||
|
db.DatabaseType = models.PostgresqlDatabaseType
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
// Regex patterns
|
||||||
|
// Match: export const users = pgTable('users', {
|
||||||
|
pgTableRegex := regexp.MustCompile(`export\s+const\s+(\w+)\s*=\s*pgTable\s*\(\s*['"](\w+)['"]`)
|
||||||
|
// Match: export const userRole = pgEnum('UserRole', ['admin', 'user']);
|
||||||
|
pgEnumRegex := regexp.MustCompile(`export\s+const\s+(\w+)\s*=\s*pgEnum\s*\(\s*['"](\w+)['"]`)
|
||||||
|
|
||||||
|
// State tracking
|
||||||
|
var currentTable *models.Table
|
||||||
|
var currentTableVarName string
|
||||||
|
var inTableBlock bool
|
||||||
|
var blockDepth int
|
||||||
|
var tableLines []string
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for pgEnum definition
|
||||||
|
if matches := pgEnumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
enum := r.parsePgEnum(trimmed, matches)
|
||||||
|
if enum != nil {
|
||||||
|
schema.Enums = append(schema.Enums, enum)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for pgTable definition
|
||||||
|
if matches := pgTableRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
varName := matches[1]
|
||||||
|
tableName := matches[2]
|
||||||
|
|
||||||
|
currentTableVarName = varName
|
||||||
|
currentTable = models.InitTable(tableName, "public")
|
||||||
|
inTableBlock = true
|
||||||
|
// Count braces in the first line
|
||||||
|
blockDepth = strings.Count(line, "{") - strings.Count(line, "}")
|
||||||
|
tableLines = []string{line}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're in a table block, accumulate lines
|
||||||
|
if inTableBlock {
|
||||||
|
tableLines = append(tableLines, line)
|
||||||
|
|
||||||
|
// Track brace depth
|
||||||
|
blockDepth += strings.Count(line, "{")
|
||||||
|
blockDepth -= strings.Count(line, "}")
|
||||||
|
|
||||||
|
// Check if we've closed the table definition
|
||||||
|
if blockDepth < 0 || (blockDepth == 0 && strings.Contains(line, ");")) {
|
||||||
|
// Parse the complete table block
|
||||||
|
if currentTable != nil {
|
||||||
|
r.parseTableBlock(tableLines, currentTable, currentTableVarName)
|
||||||
|
schema.Tables = append(schema.Tables, currentTable)
|
||||||
|
currentTable = nil
|
||||||
|
}
|
||||||
|
inTableBlock = false
|
||||||
|
tableLines = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePgEnum parses a pgEnum definition
|
||||||
|
func (r *Reader) parsePgEnum(line string, matches []string) *models.Enum {
|
||||||
|
// matches[1] = variable name
|
||||||
|
// matches[2] = enum name
|
||||||
|
|
||||||
|
enumName := matches[2]
|
||||||
|
|
||||||
|
// Extract values from the array
|
||||||
|
// Example: pgEnum('UserRole', ['admin', 'user', 'guest'])
|
||||||
|
valuesRegex := regexp.MustCompile(`\[(.*?)\]`)
|
||||||
|
valuesMatch := valuesRegex.FindStringSubmatch(line)
|
||||||
|
if valuesMatch == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
valuesStr := valuesMatch[1]
|
||||||
|
// Split by comma and clean up
|
||||||
|
valueParts := strings.Split(valuesStr, ",")
|
||||||
|
values := make([]string, 0)
|
||||||
|
for _, part := range valueParts {
|
||||||
|
// Remove quotes and whitespace
|
||||||
|
cleaned := strings.TrimSpace(part)
|
||||||
|
cleaned = strings.Trim(cleaned, "'\"")
|
||||||
|
if cleaned != "" {
|
||||||
|
values = append(values, cleaned)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &models.Enum{
|
||||||
|
Name: enumName,
|
||||||
|
Values: values,
|
||||||
|
Schema: "public",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTableBlock parses a complete pgTable definition block
|
||||||
|
func (r *Reader) parseTableBlock(lines []string, table *models.Table, tableVarName string) {
|
||||||
|
// Join all lines into a single string for easier parsing
|
||||||
|
fullText := strings.Join(lines, "\n")
|
||||||
|
|
||||||
|
// Extract the columns block and index callback separately
|
||||||
|
// The structure is: pgTable('name', { columns }, (table) => [indexes])
|
||||||
|
|
||||||
|
// Find the main object block (columns)
|
||||||
|
columnsStart := strings.Index(fullText, "{")
|
||||||
|
if columnsStart == -1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find matching closing brace for columns
|
||||||
|
depth := 0
|
||||||
|
columnsEnd := -1
|
||||||
|
for i := columnsStart; i < len(fullText); i++ {
|
||||||
|
if fullText[i] == '{' {
|
||||||
|
depth++
|
||||||
|
} else if fullText[i] == '}' {
|
||||||
|
depth--
|
||||||
|
if depth == 0 {
|
||||||
|
columnsEnd = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if columnsEnd == -1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
columnsBlock := fullText[columnsStart+1 : columnsEnd]
|
||||||
|
|
||||||
|
// Parse columns
|
||||||
|
r.parseColumnsBlock(columnsBlock, table, tableVarName)
|
||||||
|
|
||||||
|
// Check for index callback: , (table) => [ or , ({ col1, col2 }) => [
|
||||||
|
// Match: }, followed by arrow function with any parameters
|
||||||
|
// Use (?s) flag to make . match newlines
|
||||||
|
indexCallbackRegex := regexp.MustCompile(`(?s)}\s*,\s*\(.*?\)\s*=>\s*\[`)
|
||||||
|
if indexCallbackRegex.MatchString(fullText[columnsEnd:]) {
|
||||||
|
// Find the index array
|
||||||
|
indexStart := strings.Index(fullText[columnsEnd:], "[")
|
||||||
|
if indexStart != -1 {
|
||||||
|
indexStart += columnsEnd
|
||||||
|
indexDepth := 0
|
||||||
|
indexEnd := -1
|
||||||
|
for i := indexStart; i < len(fullText); i++ {
|
||||||
|
if fullText[i] == '[' {
|
||||||
|
indexDepth++
|
||||||
|
} else if fullText[i] == ']' {
|
||||||
|
indexDepth--
|
||||||
|
if indexDepth == 0 {
|
||||||
|
indexEnd = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if indexEnd != -1 {
|
||||||
|
indexBlock := fullText[indexStart+1 : indexEnd]
|
||||||
|
r.parseIndexBlock(indexBlock, table, tableVarName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnsBlock parses the columns block of a table
|
||||||
|
func (r *Reader) parseColumnsBlock(block string, table *models.Table, tableVarName string) {
|
||||||
|
// Split by lines and parse each column definition
|
||||||
|
lines := strings.Split(block, "\n")
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match: fieldName: columnType('columnName').modifier().modifier(),
|
||||||
|
// Example: id: integer('id').primaryKey(),
|
||||||
|
columnRegex := regexp.MustCompile(`(\w+):\s*(\w+)\s*\(`)
|
||||||
|
matches := columnRegex.FindStringSubmatch(trimmed)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldName := matches[1]
|
||||||
|
columnType := matches[2]
|
||||||
|
|
||||||
|
// Parse the column definition
|
||||||
|
col := r.parseColumnDefinition(trimmed, fieldName, columnType, table)
|
||||||
|
if col != nil {
|
||||||
|
table.Columns[col.Name] = col
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnDefinition parses a single column definition line
|
||||||
|
func (r *Reader) parseColumnDefinition(line, fieldName, drizzleType string, table *models.Table) *models.Column {
|
||||||
|
// Check for enum column syntax: pgEnum('EnumName')('column_name')
|
||||||
|
enumRegex := regexp.MustCompile(`pgEnum\s*\(['"](\w+)['"]\)\s*\(['"](\w+)['"]\)`)
|
||||||
|
if enumMatch := enumRegex.FindStringSubmatch(line); enumMatch != nil {
|
||||||
|
enumName := enumMatch[1]
|
||||||
|
columnName := enumMatch[2]
|
||||||
|
|
||||||
|
column := models.InitColumn(columnName, table.Name, table.Schema)
|
||||||
|
column.Type = enumName
|
||||||
|
column.NotNull = false
|
||||||
|
|
||||||
|
// Parse modifiers
|
||||||
|
r.parseColumnModifiers(line, column, table)
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract column name from the first argument
|
||||||
|
// Example: integer('id')
|
||||||
|
nameRegex := regexp.MustCompile(`\w+\s*\(['"](\w+)['"]\)`)
|
||||||
|
nameMatch := nameRegex.FindStringSubmatch(line)
|
||||||
|
if nameMatch == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
columnName := nameMatch[1]
|
||||||
|
column := models.InitColumn(columnName, table.Name, table.Schema)
|
||||||
|
|
||||||
|
// Map Drizzle type to SQL type
|
||||||
|
column.Type = r.drizzleTypeToSQL(drizzleType)
|
||||||
|
|
||||||
|
// Default: columns are nullable unless specified
|
||||||
|
column.NotNull = false
|
||||||
|
|
||||||
|
// Parse modifiers
|
||||||
|
r.parseColumnModifiers(line, column, table)
|
||||||
|
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// drizzleTypeToSQL converts Drizzle column types to SQL types
|
||||||
|
func (r *Reader) drizzleTypeToSQL(drizzleType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "integer",
|
||||||
|
"bigint": "bigint",
|
||||||
|
"smallint": "smallint",
|
||||||
|
|
||||||
|
// Serial types
|
||||||
|
"serial": "serial",
|
||||||
|
"bigserial": "bigserial",
|
||||||
|
"smallserial": "smallserial",
|
||||||
|
|
||||||
|
// Numeric types
|
||||||
|
"numeric": "numeric",
|
||||||
|
"real": "real",
|
||||||
|
"doublePrecision": "double precision",
|
||||||
|
|
||||||
|
// Character types
|
||||||
|
"text": "text",
|
||||||
|
"varchar": "varchar",
|
||||||
|
"char": "char",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "boolean",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "bytea",
|
||||||
|
|
||||||
|
// JSON
|
||||||
|
"json": "json",
|
||||||
|
"jsonb": "jsonb",
|
||||||
|
|
||||||
|
// Date/Time
|
||||||
|
"time": "time",
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"date": "date",
|
||||||
|
"interval": "interval",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "uuid",
|
||||||
|
|
||||||
|
// Geometric
|
||||||
|
"point": "point",
|
||||||
|
"line": "line",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sqlType, ok := typeMap[drizzleType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not found, might be an enum - return as-is
|
||||||
|
return drizzleType
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnModifiers parses column modifiers like .primaryKey(), .notNull(), etc.
|
||||||
|
func (r *Reader) parseColumnModifiers(line string, column *models.Column, table *models.Table) {
|
||||||
|
// Check for .primaryKey()
|
||||||
|
if strings.Contains(line, ".primaryKey()") {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .notNull()
|
||||||
|
if strings.Contains(line, ".notNull()") {
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .unique()
|
||||||
|
if strings.Contains(line, ".unique()") {
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s", column.Name),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = []string{column.Name}
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .default(...)
|
||||||
|
// Need to handle nested backticks and parentheses in SQL expressions
|
||||||
|
defaultIdx := strings.Index(line, ".default(")
|
||||||
|
if defaultIdx != -1 {
|
||||||
|
start := defaultIdx + len(".default(")
|
||||||
|
depth := 1
|
||||||
|
inBacktick := false
|
||||||
|
i := start
|
||||||
|
|
||||||
|
for i < len(line) && depth > 0 {
|
||||||
|
ch := line[i]
|
||||||
|
if ch == '`' {
|
||||||
|
inBacktick = !inBacktick
|
||||||
|
} else if !inBacktick {
|
||||||
|
switch ch {
|
||||||
|
case '(':
|
||||||
|
depth++
|
||||||
|
case ')':
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
if depth == 0 {
|
||||||
|
defaultValue := strings.TrimSpace(line[start : i-1])
|
||||||
|
r.parseDefaultValue(defaultValue, column)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .generatedAlwaysAsIdentity()
|
||||||
|
if strings.Contains(line, ".generatedAlwaysAsIdentity()") {
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for .references(() => otherTable.column)
|
||||||
|
referencesRegex := regexp.MustCompile(`\.references\(\(\)\s*=>\s*(\w+)\.(\w+)\)`)
|
||||||
|
if matches := referencesRegex.FindStringSubmatch(line); matches != nil {
|
||||||
|
refTableVar := matches[1]
|
||||||
|
refColumn := matches[2]
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraintName := fmt.Sprintf("fk_%s_%s", table.Name, column.Name)
|
||||||
|
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||||
|
constraint.Schema = table.Schema
|
||||||
|
constraint.Table = table.Name
|
||||||
|
constraint.Columns = []string{column.Name}
|
||||||
|
constraint.ReferencedSchema = table.Schema // Assume same schema
|
||||||
|
constraint.ReferencedTable = r.varNameToTableName(refTableVar)
|
||||||
|
constraint.ReferencedColumns = []string{refColumn}
|
||||||
|
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDefaultValue parses a default value expression
|
||||||
|
func (r *Reader) parseDefaultValue(defaultExpr string, column *models.Column) {
|
||||||
|
defaultExpr = strings.TrimSpace(defaultExpr)
|
||||||
|
|
||||||
|
// Handle SQL expressions like sql`now()`
|
||||||
|
sqlRegex := regexp.MustCompile("sql`([^`]+)`")
|
||||||
|
if match := sqlRegex.FindStringSubmatch(defaultExpr); match != nil {
|
||||||
|
column.Default = match[1]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle boolean values
|
||||||
|
if defaultExpr == "true" {
|
||||||
|
column.Default = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if defaultExpr == "false" {
|
||||||
|
column.Default = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle string literals
|
||||||
|
if strings.HasPrefix(defaultExpr, "'") && strings.HasSuffix(defaultExpr, "'") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(defaultExpr, "\"") && strings.HasSuffix(defaultExpr, "\"") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse as number
|
||||||
|
column.Default = defaultExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIndexBlock parses the index callback block
|
||||||
|
func (r *Reader) parseIndexBlock(block string, table *models.Table, tableVarName string) {
|
||||||
|
// Split by lines
|
||||||
|
lines := strings.Split(block, "\n")
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match: index('index_name').on(table.col1, table.col2)
|
||||||
|
// or: uniqueIndex('index_name').on(table.col1, table.col2)
|
||||||
|
indexRegex := regexp.MustCompile(`(uniqueIndex|index)\s*\(['"](\w+)['"]\)\s*\.on\s*\((.*?)\)`)
|
||||||
|
matches := indexRegex.FindStringSubmatch(trimmed)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
indexType := matches[1]
|
||||||
|
indexName := matches[2]
|
||||||
|
columnsStr := matches[3]
|
||||||
|
|
||||||
|
// Parse column list
|
||||||
|
columnParts := strings.Split(columnsStr, ",")
|
||||||
|
columns := make([]string, 0)
|
||||||
|
for _, part := range columnParts {
|
||||||
|
// Remove table prefix: table.column -> column
|
||||||
|
cleaned := strings.TrimSpace(part)
|
||||||
|
if strings.Contains(cleaned, ".") {
|
||||||
|
parts := strings.Split(cleaned, ".")
|
||||||
|
cleaned = parts[len(parts)-1]
|
||||||
|
}
|
||||||
|
columns = append(columns, cleaned)
|
||||||
|
}
|
||||||
|
|
||||||
|
if indexType == "uniqueIndex" {
|
||||||
|
// Create unique constraint
|
||||||
|
constraint := models.InitConstraint(indexName, models.UniqueConstraint)
|
||||||
|
constraint.Schema = table.Schema
|
||||||
|
constraint.Table = table.Name
|
||||||
|
constraint.Columns = columns
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
} else {
|
||||||
|
// Create index
|
||||||
|
index := models.InitIndex(indexName, table.Name, table.Schema)
|
||||||
|
index.Columns = columns
|
||||||
|
index.Unique = false
|
||||||
|
table.Indexes[index.Name] = index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// varNameToTableName converts a variable name to a table name
|
||||||
|
// For now, just return as-is (could add inflection later)
|
||||||
|
func (r *Reader) varNameToTableName(varName string) string {
|
||||||
|
// TODO: Could add conversion logic here if needed
|
||||||
|
// For now, assume variable name matches table name
|
||||||
|
return varName
|
||||||
|
}
|
||||||
141
pkg/readers/gorm/README.md
Normal file
141
pkg/readers/gorm/README.md
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
# GORM Reader
|
||||||
|
|
||||||
|
Reads Go source files containing GORM model definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The GORM Reader parses Go source code files that define GORM models (structs with `gorm` struct tags) and converts them into RelSpec's internal database model representation. It supports reading from individual files or entire directories.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses GORM struct tags to extract column definitions
|
||||||
|
- Extracts table names from `TableName()` methods
|
||||||
|
- Identifies primary keys, foreign keys, and indexes
|
||||||
|
- Supports relationship detection (has-many, belongs-to)
|
||||||
|
- Handles both single files and directories
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Read from a single file
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/models.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := gorm.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reading from Directory
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Read all .go files from a directory
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/models/",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := gorm.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read GORM models and convert to JSON
|
||||||
|
relspec --input gorm --in-file models/ --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert GORM models to Bun
|
||||||
|
relspec --input gorm --in-file models.go --output bun --out-file bun_models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported GORM Tags
|
||||||
|
|
||||||
|
The reader recognizes the following GORM struct tags:
|
||||||
|
|
||||||
|
- `column` - Column name
|
||||||
|
- `type` - SQL data type (e.g., `varchar(255)`, `bigint`)
|
||||||
|
- `primaryKey` or `primary_key` - Mark as primary key
|
||||||
|
- `not null` - NOT NULL constraint
|
||||||
|
- `autoIncrement` - Auto-increment column
|
||||||
|
- `default` - Default value
|
||||||
|
- `size` - Column size/length
|
||||||
|
- `index` - Create index
|
||||||
|
- `uniqueIndex` - Create unique index
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `foreignKey` - Foreign key column
|
||||||
|
- `references` - Referenced column
|
||||||
|
- `constraint` - Constraint behavior (OnDelete, OnUpdate)
|
||||||
|
|
||||||
|
## Example GORM Model
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelUser struct {
|
||||||
|
gorm.Model
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey;autoIncrement"`
|
||||||
|
Username string `gorm:"column:username;type:varchar(50);not null;uniqueIndex"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(100);not null"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;not null;default:now()"`
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
Posts []*ModelPost `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelUser) TableName() string {
|
||||||
|
return "public.users"
|
||||||
|
}
|
||||||
|
|
||||||
|
type ModelPost struct {
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey"`
|
||||||
|
UserID int64 `gorm:"column:user_id;type:bigint;not null"`
|
||||||
|
Title string `gorm:"column:title;type:varchar(200);not null"`
|
||||||
|
Content string `gorm:"column:content;type:text"`
|
||||||
|
|
||||||
|
// Belongs-to relationship
|
||||||
|
User *ModelUser `gorm:"foreignKey:UserID;references:ID"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelPost) TableName() string {
|
||||||
|
return "public.posts"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Test files (ending in `_test.go`) are automatically excluded
|
||||||
|
- The `gorm.Model` embedded struct is automatically recognized and skipped
|
||||||
|
- Table names are derived from struct names if `TableName()` method is not present
|
||||||
|
- Schema defaults to `public` if not specified in `TableName()`
|
||||||
|
- Relationships are inferred from GORM relationship tags
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- Complex relationship types (many-to-many with join tables) may need manual verification
|
||||||
|
- Custom GORM types may not be fully supported
|
||||||
|
- Some advanced GORM features may not be captured
|
||||||
@@ -693,7 +693,7 @@ func (r *Reader) deriveTableName(structName string) string {
|
|||||||
|
|
||||||
// parseColumn parses a struct field into a Column model
|
// parseColumn parses a struct field into a Column model
|
||||||
// Returns the column and any inline reference information (e.g., "mainaccount(id_mainaccount)")
|
// Returns the column and any inline reference information (e.g., "mainaccount(id_mainaccount)")
|
||||||
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) (*models.Column, string) {
|
func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, sequence uint) (col *models.Column, ref string) {
|
||||||
// Extract gorm tag
|
// Extract gorm tag
|
||||||
gormTag := r.extractGormTag(tag)
|
gormTag := r.extractGormTag(tag)
|
||||||
if gormTag == "" {
|
if gormTag == "" {
|
||||||
@@ -756,20 +756,14 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
|
|||||||
// - explicit "not null" tag means NOT NULL
|
// - explicit "not null" tag means NOT NULL
|
||||||
// - absence of "not null" tag with sql_types means nullable
|
// - absence of "not null" tag with sql_types means nullable
|
||||||
// - primitive types (string, int64, bool) default to NOT NULL unless explicitly nullable
|
// - primitive types (string, int64, bool) default to NOT NULL unless explicitly nullable
|
||||||
|
// Primary keys are always NOT NULL
|
||||||
|
column.NotNull = false
|
||||||
if _, hasNotNull := parts["not null"]; hasNotNull {
|
if _, hasNotNull := parts["not null"]; hasNotNull {
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
} else {
|
} else {
|
||||||
// If no explicit "not null" tag, check the Go type
|
// sql_types.SqlString, etc. are nullable by default
|
||||||
if r.isNullableGoType(fieldType) {
|
column.NotNull = !r.isNullableGoType(fieldType)
|
||||||
// sql_types.SqlString, etc. are nullable by default
|
|
||||||
column.NotNull = false
|
|
||||||
} else {
|
|
||||||
// Primitive types default to NOT NULL
|
|
||||||
column.NotNull = false // Default to nullable unless explicitly set
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Primary keys are always NOT NULL
|
|
||||||
if column.IsPrimaryKey {
|
if column.IsPrimaryKey {
|
||||||
column.NotNull = true
|
column.NotNull = true
|
||||||
}
|
}
|
||||||
|
|||||||
464
pkg/readers/gorm/reader_test.go
Normal file
464
pkg/readers/gorm/reader_test.go
Normal file
@@ -0,0 +1,464 @@
|
|||||||
|
package gorm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
table := schema.Tables[0]
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify id column - primary key should be NOT NULL
|
||||||
|
idCol, exists := table.Columns["id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'id' not found")
|
||||||
|
}
|
||||||
|
if !idCol.IsPrimaryKey {
|
||||||
|
t.Error("Column 'id' should be primary key")
|
||||||
|
}
|
||||||
|
if !idCol.AutoIncrement {
|
||||||
|
t.Error("Column 'id' should be auto-increment")
|
||||||
|
}
|
||||||
|
if !idCol.NotNull {
|
||||||
|
t.Error("Column 'id' should be NOT NULL (primary keys are always NOT NULL)")
|
||||||
|
}
|
||||||
|
if idCol.Type != "bigint" {
|
||||||
|
t.Errorf("Expected id type 'bigint', got '%s'", idCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify email column - explicit "not null" tag should be NOT NULL
|
||||||
|
emailCol, exists := table.Columns["email"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'email' not found")
|
||||||
|
}
|
||||||
|
if !emailCol.NotNull {
|
||||||
|
t.Error("Column 'email' should be NOT NULL (explicit 'not null' tag)")
|
||||||
|
}
|
||||||
|
if emailCol.Type != "varchar" || emailCol.Length != 255 {
|
||||||
|
t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify name column - primitive string type should be NOT NULL by default
|
||||||
|
nameCol, exists := table.Columns["name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'name' not found")
|
||||||
|
}
|
||||||
|
if !nameCol.NotNull {
|
||||||
|
t.Error("Column 'name' should be NOT NULL (primitive string type defaults to NOT NULL)")
|
||||||
|
}
|
||||||
|
if nameCol.Type != "text" {
|
||||||
|
t.Errorf("Expected name type 'text', got '%s'", nameCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify age column - pointer type should be nullable (NOT NULL = false)
|
||||||
|
ageCol, exists := table.Columns["age"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'age' not found")
|
||||||
|
}
|
||||||
|
if ageCol.NotNull {
|
||||||
|
t.Error("Column 'age' should be nullable (pointer type *int)")
|
||||||
|
}
|
||||||
|
if ageCol.Type != "integer" {
|
||||||
|
t.Errorf("Expected age type 'integer', got '%s'", ageCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify is_active column - primitive bool type should be NOT NULL by default
|
||||||
|
isActiveCol, exists := table.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type defaults to NOT NULL)")
|
||||||
|
}
|
||||||
|
if isActiveCol.Type != "boolean" {
|
||||||
|
t.Errorf("Expected is_active type 'boolean', got '%s'", isActiveCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify created_at column - time.Time should be NOT NULL by default
|
||||||
|
createdAtCol, exists := table.Columns["created_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'created_at' not found")
|
||||||
|
}
|
||||||
|
if !createdAtCol.NotNull {
|
||||||
|
t.Error("Column 'created_at' should be NOT NULL (time.Time is NOT NULL by default)")
|
||||||
|
}
|
||||||
|
if createdAtCol.Type != "timestamp" {
|
||||||
|
t.Errorf("Expected created_at type 'timestamp', got '%s'", createdAtCol.Type)
|
||||||
|
}
|
||||||
|
if createdAtCol.Default != "now()" {
|
||||||
|
t.Errorf("Expected created_at default 'now()', got '%v'", createdAtCol.Default)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "complex.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify schema
|
||||||
|
if len(db.Schemas) != 1 {
|
||||||
|
t.Fatalf("Expected 1 schema, got %d", len(db.Schemas))
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify tables
|
||||||
|
if len(schema.Tables) != 3 {
|
||||||
|
t.Fatalf("Expected 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find tables
|
||||||
|
var usersTable, postsTable, commentsTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
switch table.Name {
|
||||||
|
case "users":
|
||||||
|
usersTable = table
|
||||||
|
case "posts":
|
||||||
|
postsTable = table
|
||||||
|
case "comments":
|
||||||
|
commentsTable = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if usersTable == nil {
|
||||||
|
t.Fatal("Users table not found")
|
||||||
|
}
|
||||||
|
if postsTable == nil {
|
||||||
|
t.Fatal("Posts table not found")
|
||||||
|
}
|
||||||
|
if commentsTable == nil {
|
||||||
|
t.Fatal("Comments table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table - test NOT NULL logic for various field types
|
||||||
|
if len(usersTable.Columns) != 10 {
|
||||||
|
t.Errorf("Expected 10 columns in users table, got %d", len(usersTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// username - NOT NULL (explicit tag)
|
||||||
|
usernameCol, exists := usersTable.Columns["username"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'username' not found")
|
||||||
|
}
|
||||||
|
if !usernameCol.NotNull {
|
||||||
|
t.Error("Column 'username' should be NOT NULL (explicit 'not null' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// first_name - nullable (pointer type)
|
||||||
|
firstNameCol, exists := usersTable.Columns["first_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'first_name' not found")
|
||||||
|
}
|
||||||
|
if firstNameCol.NotNull {
|
||||||
|
t.Error("Column 'first_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// last_name - nullable (pointer type)
|
||||||
|
lastNameCol, exists := usersTable.Columns["last_name"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'last_name' not found")
|
||||||
|
}
|
||||||
|
if lastNameCol.NotNull {
|
||||||
|
t.Error("Column 'last_name' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// bio - nullable (pointer type)
|
||||||
|
bioCol, exists := usersTable.Columns["bio"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'bio' not found")
|
||||||
|
}
|
||||||
|
if bioCol.NotNull {
|
||||||
|
t.Error("Column 'bio' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// is_active - NOT NULL (primitive bool)
|
||||||
|
isActiveCol, exists := usersTable.Columns["is_active"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'is_active' not found")
|
||||||
|
}
|
||||||
|
if !isActiveCol.NotNull {
|
||||||
|
t.Error("Column 'is_active' should be NOT NULL (primitive bool type)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify users table indexes
|
||||||
|
if len(usersTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on users table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table
|
||||||
|
if len(postsTable.Columns) != 11 {
|
||||||
|
t.Errorf("Expected 11 columns in posts table, got %d", len(postsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// excerpt - nullable (pointer type)
|
||||||
|
excerptCol, exists := postsTable.Columns["excerpt"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'excerpt' not found")
|
||||||
|
}
|
||||||
|
if excerptCol.NotNull {
|
||||||
|
t.Error("Column 'excerpt' should be nullable (pointer type *string)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// published - NOT NULL (primitive bool with default)
|
||||||
|
publishedCol, exists := postsTable.Columns["published"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published' not found")
|
||||||
|
}
|
||||||
|
if !publishedCol.NotNull {
|
||||||
|
t.Error("Column 'published' should be NOT NULL (primitive bool type)")
|
||||||
|
}
|
||||||
|
if publishedCol.Default != "false" {
|
||||||
|
t.Errorf("Expected published default 'false', got '%v'", publishedCol.Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
// published_at - nullable (pointer to time.Time)
|
||||||
|
publishedAtCol, exists := postsTable.Columns["published_at"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'published_at' not found")
|
||||||
|
}
|
||||||
|
if publishedAtCol.NotNull {
|
||||||
|
t.Error("Column 'published_at' should be nullable (pointer type *time.Time)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// view_count - NOT NULL (primitive int64 with default)
|
||||||
|
viewCountCol, exists := postsTable.Columns["view_count"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'view_count' not found")
|
||||||
|
}
|
||||||
|
if !viewCountCol.NotNull {
|
||||||
|
t.Error("Column 'view_count' should be NOT NULL (primitive int64 type)")
|
||||||
|
}
|
||||||
|
if viewCountCol.Default != "0" {
|
||||||
|
t.Errorf("Expected view_count default '0', got '%v'", viewCountCol.Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify posts table indexes
|
||||||
|
if len(postsTable.Indexes) < 1 {
|
||||||
|
t.Error("Expected at least 1 index on posts table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify comments table
|
||||||
|
if len(commentsTable.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns in comments table, got %d", len(commentsTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
// user_id - nullable (pointer type)
|
||||||
|
userIDCol, exists := commentsTable.Columns["user_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'user_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if userIDCol.NotNull {
|
||||||
|
t.Error("Column 'user_id' should be nullable (pointer type *int64)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// post_id - NOT NULL (explicit tag)
|
||||||
|
postIDCol, exists := commentsTable.Columns["post_id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Column 'post_id' not found in comments table")
|
||||||
|
}
|
||||||
|
if !postIDCol.NotNull {
|
||||||
|
t.Error("Column 'post_id' should be NOT NULL (explicit 'not null' tag)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify foreign key constraints
|
||||||
|
if len(postsTable.Constraints) == 0 {
|
||||||
|
t.Error("Expected at least one constraint on posts table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find FK constraint to users
|
||||||
|
var fkPostsUser *models.Constraint
|
||||||
|
for _, c := range postsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint && c.ReferencedTable == "users" {
|
||||||
|
fkPostsUser = c
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkPostsUser != nil {
|
||||||
|
if fkPostsUser.OnDelete != "CASCADE" {
|
||||||
|
t.Errorf("Expected ON DELETE CASCADE for posts->users FK, got '%s'", fkPostsUser.OnDelete)
|
||||||
|
}
|
||||||
|
if fkPostsUser.OnUpdate != "CASCADE" {
|
||||||
|
t.Errorf("Expected ON UPDATE CASCADE for posts->users FK, got '%s'", fkPostsUser.OnUpdate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify comments table constraints
|
||||||
|
if len(commentsTable.Constraints) == 0 {
|
||||||
|
t.Error("Expected at least one constraint on comments table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find FK constraints
|
||||||
|
var fkCommentsPost, fkCommentsUser *models.Constraint
|
||||||
|
for _, c := range commentsTable.Constraints {
|
||||||
|
if c.Type == models.ForeignKeyConstraint {
|
||||||
|
if c.ReferencedTable == "posts" {
|
||||||
|
fkCommentsPost = c
|
||||||
|
} else if c.ReferencedTable == "users" {
|
||||||
|
fkCommentsUser = c
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsPost != nil {
|
||||||
|
if fkCommentsPost.OnDelete != "CASCADE" {
|
||||||
|
t.Errorf("Expected ON DELETE CASCADE for comments->posts FK, got '%s'", fkCommentsPost.OnDelete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCommentsUser != nil {
|
||||||
|
if fkCommentsUser.OnDelete != "SET NULL" {
|
||||||
|
t.Errorf("Expected ON DELETE SET NULL for comments->users FK, got '%s'", fkCommentsUser.OnDelete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema == nil {
|
||||||
|
t.Fatal("ReadSchema() returned nil schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadTable() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if table == nil {
|
||||||
|
t.Fatal("ReadTable() returned nil table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Name != "users" {
|
||||||
|
t.Errorf("Expected table name 'users', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(table.Columns) != 6 {
|
||||||
|
t.Errorf("Expected 6 columns, got %d", len(table.Columns))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Directory(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db == nil {
|
||||||
|
t.Fatal("ReadDatabase() returned nil database")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should read both simple.go and complex.go
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
// Should have at least 3 tables from complex.go (users, posts, comments)
|
||||||
|
// plus 1 from simple.go (users) - but same table name, so may be overwritten
|
||||||
|
if len(schema.Tables) < 3 {
|
||||||
|
t.Errorf("Expected at least 3 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_InvalidPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/file.go",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for invalid file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_EmptyPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for empty file path")
|
||||||
|
}
|
||||||
|
}
|
||||||
203
pkg/readers/graphql/README.md
Normal file
203
pkg/readers/graphql/README.md
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
# GraphQL Schema Reader
|
||||||
|
|
||||||
|
The GraphQL reader parses GraphQL Schema Definition Language (SDL) files and converts them into RelSpec's internal database model.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Standard GraphQL SDL** support (generic, non-framework-specific)
|
||||||
|
- **Type to Table mapping**: GraphQL types become database tables
|
||||||
|
- **Field to Column mapping**: GraphQL fields become table columns
|
||||||
|
- **Enum support**: GraphQL enums are preserved
|
||||||
|
- **Custom scalars**: DateTime, JSON, Date automatically mapped to appropriate SQL types
|
||||||
|
- **Implicit relationships**: Detects relationships from field types
|
||||||
|
- **Many-to-many support**: Creates junction tables for bidirectional array relationships
|
||||||
|
- **Configurable ID mapping**: Choose between bigint (default) or UUID for ID fields
|
||||||
|
|
||||||
|
## Supported GraphQL Features
|
||||||
|
|
||||||
|
### Built-in Scalars
|
||||||
|
- `ID` → bigint (default) or uuid (configurable)
|
||||||
|
- `String` → text
|
||||||
|
- `Int` → integer
|
||||||
|
- `Float` → double precision
|
||||||
|
- `Boolean` → boolean
|
||||||
|
|
||||||
|
### Custom Scalars
|
||||||
|
- `DateTime` → timestamp
|
||||||
|
- `JSON` → jsonb
|
||||||
|
- `Date` → date
|
||||||
|
- `Time` → time
|
||||||
|
- `Decimal` → numeric
|
||||||
|
|
||||||
|
Additional custom scalars can be mapped via metadata.
|
||||||
|
|
||||||
|
### Relationships
|
||||||
|
|
||||||
|
Relationships are inferred from field types:
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
author: User! # Many-to-one (creates authorId FK column, NOT NULL)
|
||||||
|
reviewer: User # Many-to-one nullable (creates reviewerId FK column, NULL)
|
||||||
|
tags: [Tag!]! # One-to-many or many-to-many (depending on reverse)
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Reverse of Post.author (no FK created)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Many-to-many with Post (creates PostTag junction table)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Relationship Detection Rules:**
|
||||||
|
- Single type reference (`user: User`) → Creates FK column (e.g., `userId`)
|
||||||
|
- Array type reference (`posts: [Post!]!`) → One-to-many reverse (no FK on this table)
|
||||||
|
- Bidirectional arrays → Many-to-many (creates junction table)
|
||||||
|
|
||||||
|
### Enums
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
GUEST
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
role: Role!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Enums are preserved in the schema and can be used as column types.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
|
)
|
||||||
|
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := graphql.NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
```
|
||||||
|
|
||||||
|
### With UUID ID Type
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"idType": "uuid", // Map ID scalar to uuid instead of bigint
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := graphql.NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Per-Type ID Mapping
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"typeIdMappings": map[string]string{
|
||||||
|
"User": "uuid", // User.id → uuid
|
||||||
|
"Post": "bigint", // Post.id → bigint
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Custom Scalar Mappings
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "schema.graphql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"customScalarMappings": map[string]string{
|
||||||
|
"Upload": "bytea",
|
||||||
|
"Decimal": "numeric(10,2)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Convert GraphQL to JSON
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to json --to-path schema.json
|
||||||
|
|
||||||
|
# Convert GraphQL to GORM models
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to gorm --to-path models/ --package models
|
||||||
|
|
||||||
|
# Convert GraphQL to PostgreSQL SQL
|
||||||
|
relspec convert --from graphql --from-path schema.graphql \
|
||||||
|
--to pgsql --to-path schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Metadata Options
|
||||||
|
|
||||||
|
| Option | Type | Description | Default |
|
||||||
|
|--------|------|-------------|---------|
|
||||||
|
| `idType` | string | Global ID type mapping ("bigint" or "uuid") | "bigint" |
|
||||||
|
| `typeIdMappings` | map[string]string | Per-type ID mappings | {} |
|
||||||
|
| `customScalarMappings` | map[string]string | Custom scalar to SQL type mappings | {} |
|
||||||
|
| `schemaName` | string | Schema name for all tables | "public" |
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- Only supports GraphQL SDL (Schema Definition Language), not queries or mutations
|
||||||
|
- Directives are ignored (except for future extensibility)
|
||||||
|
- Interfaces and Unions are not supported
|
||||||
|
- GraphQL's concept of "schema" is different from database schemas; all types go into a single database schema (default: "public")
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
**Input** (`schema.graphql`):
|
||||||
|
```graphql
|
||||||
|
scalar DateTime
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
role: Role!
|
||||||
|
createdAt: DateTime!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result**: Database with:
|
||||||
|
- 2 tables: `User` and `Post`
|
||||||
|
- `Post` table has `authorId` foreign key to `User.id`
|
||||||
|
- `Role` enum with values: ADMIN, USER
|
||||||
|
- Custom scalar `DateTime` mapped to `timestamp`
|
||||||
279
pkg/readers/graphql/reader.go
Normal file
279
pkg/readers/graphql/reader.go
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for GraphQL reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parseGraphQL(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type parseContext struct {
|
||||||
|
inType bool
|
||||||
|
inEnum bool
|
||||||
|
currentType string
|
||||||
|
typeLines []string
|
||||||
|
currentEnum string
|
||||||
|
enumLines []string
|
||||||
|
customScalars map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseGraphQL(content string) (*models.Database, error) {
|
||||||
|
dbName := "database"
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
dbName = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db := models.InitDatabase(dbName)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
ctx := &parseContext{
|
||||||
|
customScalars: make(map[string]bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
// First pass: collect custom scalars and enums
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
scalarRegex := regexp.MustCompile(`^\s*scalar\s+(\w+)`)
|
||||||
|
enumRegex := regexp.MustCompile(`^\s*enum\s+(\w+)\s*\{`)
|
||||||
|
closingBraceRegex := regexp.MustCompile(`^\s*\}`)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := scalarRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.customScalars[matches[1]] = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.inEnum = true
|
||||||
|
ctx.currentEnum = matches[1]
|
||||||
|
ctx.enumLines = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if closingBraceRegex.MatchString(trimmed) && ctx.inEnum {
|
||||||
|
r.parseEnum(ctx.currentEnum, ctx.enumLines, schema)
|
||||||
|
// Add enum name to custom scalars for type detection
|
||||||
|
ctx.customScalars[ctx.currentEnum] = true
|
||||||
|
ctx.inEnum = false
|
||||||
|
ctx.currentEnum = ""
|
||||||
|
ctx.enumLines = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.inEnum {
|
||||||
|
ctx.enumLines = append(ctx.enumLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanner error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: parse types
|
||||||
|
scanner = bufio.NewScanner(strings.NewReader(content))
|
||||||
|
typeRegex := regexp.MustCompile(`^\s*type\s+(\w+)\s*\{`)
|
||||||
|
ctx.inType = false
|
||||||
|
ctx.inEnum = false
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := typeRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
ctx.inType = true
|
||||||
|
ctx.currentType = matches[1]
|
||||||
|
ctx.typeLines = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if closingBraceRegex.MatchString(trimmed) && ctx.inType {
|
||||||
|
if err := r.parseType(ctx.currentType, ctx.typeLines, schema, ctx); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse type %s: %w", ctx.currentType, err)
|
||||||
|
}
|
||||||
|
ctx.inType = false
|
||||||
|
ctx.currentType = ""
|
||||||
|
ctx.typeLines = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.inType {
|
||||||
|
ctx.typeLines = append(ctx.typeLines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanner error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
// Third pass: detect and create relationships
|
||||||
|
if err := r.detectAndCreateRelationships(schema, ctx); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create relationships: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type fieldInfo struct {
|
||||||
|
name string
|
||||||
|
typeName string
|
||||||
|
isArray bool
|
||||||
|
isNullable bool
|
||||||
|
innerNullable bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseType(typeName string, lines []string, schema *models.Schema, ctx *parseContext) error {
|
||||||
|
table := models.InitTable(typeName, schema.Name)
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
|
||||||
|
// Store field info for relationship detection
|
||||||
|
relationFields := make(map[string]*fieldInfo)
|
||||||
|
|
||||||
|
fieldRegex := regexp.MustCompile(`^\s*(\w+)\s*:\s*(\[)?(\w+)(!)?(\])?(!)?\s*`)
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
matches := fieldRegex.FindStringSubmatch(trimmed)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldName := matches[1]
|
||||||
|
hasOpenBracket := matches[2] == "["
|
||||||
|
baseType := matches[3]
|
||||||
|
innerNonNull := matches[4] == "!"
|
||||||
|
hasCloseBracket := matches[5] == "]"
|
||||||
|
outerNonNull := matches[6] == "!"
|
||||||
|
|
||||||
|
isArray := hasOpenBracket && hasCloseBracket
|
||||||
|
|
||||||
|
// Determine if this is a scalar or a relation
|
||||||
|
if r.isScalarType(baseType, ctx) {
|
||||||
|
// This is a scalar field
|
||||||
|
column := models.InitColumn(fieldName, table.Name, schema.Name)
|
||||||
|
column.Type = r.graphQLTypeToSQL(baseType, fieldName, typeName)
|
||||||
|
|
||||||
|
if isArray {
|
||||||
|
// Array of scalars: use array type
|
||||||
|
column.Type += "[]"
|
||||||
|
column.NotNull = outerNonNull
|
||||||
|
} else {
|
||||||
|
column.NotNull = !isArray && innerNonNull
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a primary key (convention: field named "id")
|
||||||
|
if fieldName == "id" {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Columns[fieldName] = column
|
||||||
|
} else {
|
||||||
|
// This is a relation field - store for later processing
|
||||||
|
relationFields[fieldName] = &fieldInfo{
|
||||||
|
name: fieldName,
|
||||||
|
typeName: baseType,
|
||||||
|
isArray: isArray,
|
||||||
|
isNullable: !innerNonNull && !isArray,
|
||||||
|
innerNullable: !innerNonNull && isArray,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store relation fields in table metadata for relationship detection
|
||||||
|
if len(relationFields) > 0 {
|
||||||
|
table.Metadata["relationFields"] = relationFields
|
||||||
|
}
|
||||||
|
|
||||||
|
schema.Tables = append(schema.Tables, table)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
|
||||||
|
enum := &models.Enum{
|
||||||
|
Name: enumName,
|
||||||
|
Schema: schema.Name,
|
||||||
|
Values: make([]string, 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Enum values are simple identifiers
|
||||||
|
enum.Values = append(enum.Values, trimmed)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema.Enums = append(schema.Enums, enum)
|
||||||
|
}
|
||||||
362
pkg/readers/graphql/reader_test.go
Normal file
362
pkg/readers/graphql/reader_test.go
Normal file
@@ -0,0 +1,362 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Simple(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
t.Fatal("Expected at least one schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
if userTable.Name != "User" {
|
||||||
|
t.Errorf("Expected table name 'User', got '%s'", userTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify columns
|
||||||
|
expectedColumns := map[string]struct {
|
||||||
|
sqlType string
|
||||||
|
notNull bool
|
||||||
|
isPK bool
|
||||||
|
}{
|
||||||
|
"id": {"bigint", true, true},
|
||||||
|
"email": {"text", true, false},
|
||||||
|
"name": {"text", false, false},
|
||||||
|
"age": {"integer", false, false},
|
||||||
|
"active": {"boolean", true, false},
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(userTable.Columns) != len(expectedColumns) {
|
||||||
|
t.Fatalf("Expected %d columns, got %d", len(expectedColumns), len(userTable.Columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
for colName, expected := range expectedColumns {
|
||||||
|
col, exists := userTable.Columns[colName]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected column '%s' not found", colName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.Type != expected.sqlType {
|
||||||
|
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expected.sqlType, col.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.NotNull != expected.notNull {
|
||||||
|
t.Errorf("Column '%s': expected NotNull=%v, got %v", colName, expected.notNull, col.NotNull)
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.IsPrimaryKey != expected.isPK {
|
||||||
|
t.Errorf("Column '%s': expected IsPrimaryKey=%v, got %v", colName, expected.isPK, col.IsPrimaryKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_WithRelations(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "relations.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
if len(schema.Tables) != 2 {
|
||||||
|
t.Fatalf("Expected 2 tables, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find Post table (should have FK to User)
|
||||||
|
var postTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "Post" {
|
||||||
|
postTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if postTable == nil {
|
||||||
|
t.Fatal("Post table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify authorId FK column was created
|
||||||
|
authorIdCol, exists := postTable.Columns["authorId"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'authorId' FK column not found in Post table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if authorIdCol.Type != "bigint" {
|
||||||
|
t.Errorf("Expected authorId type 'bigint', got '%s'", authorIdCol.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !authorIdCol.NotNull {
|
||||||
|
t.Error("Expected authorId to be NOT NULL")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify FK constraint
|
||||||
|
fkConstraintFound := false
|
||||||
|
for _, constraint := range postTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
if constraint.ReferencedTable == "User" && len(constraint.Columns) > 0 && constraint.Columns[0] == "authorId" {
|
||||||
|
fkConstraintFound = true
|
||||||
|
if constraint.OnDelete != "CASCADE" {
|
||||||
|
t.Errorf("Expected OnDelete CASCADE, got %s", constraint.OnDelete)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !fkConstraintFound {
|
||||||
|
t.Error("Foreign key constraint from Post to User not found")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_WithEnums(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "enums.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
if len(schema.Enums) != 1 {
|
||||||
|
t.Fatalf("Expected 1 enum, got %d", len(schema.Enums))
|
||||||
|
}
|
||||||
|
|
||||||
|
roleEnum := schema.Enums[0]
|
||||||
|
if roleEnum.Name != "Role" {
|
||||||
|
t.Errorf("Expected enum name 'Role', got '%s'", roleEnum.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedValues := []string{"ADMIN", "USER", "GUEST"}
|
||||||
|
if len(roleEnum.Values) != len(expectedValues) {
|
||||||
|
t.Fatalf("Expected %d enum values, got %d", len(expectedValues), len(roleEnum.Values))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, expected := range expectedValues {
|
||||||
|
if roleEnum.Values[i] != expected {
|
||||||
|
t.Errorf("Expected enum value '%s' at index %d, got '%s'", expected, i, roleEnum.Values[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify role column in User table
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
roleCol, exists := userTable.Columns["role"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'role' column not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
if roleCol.Type != "Role" {
|
||||||
|
t.Errorf("Expected role type 'Role', got '%s'", roleCol.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_CustomScalars(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "custom_scalars.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
|
||||||
|
// Verify custom scalar mappings
|
||||||
|
expectedTypes := map[string]string{
|
||||||
|
"createdAt": "timestamp",
|
||||||
|
"metadata": "jsonb",
|
||||||
|
"birthDate": "date",
|
||||||
|
}
|
||||||
|
|
||||||
|
for colName, expectedType := range expectedTypes {
|
||||||
|
col, exists := userTable.Columns[colName]
|
||||||
|
if !exists {
|
||||||
|
t.Errorf("Expected column '%s' not found", colName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.Type != expectedType {
|
||||||
|
t.Errorf("Column '%s': expected type '%s', got '%s'", colName, expectedType, col.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_UUIDMetadata(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"idType": "uuid",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
userTable := schema.Tables[0]
|
||||||
|
|
||||||
|
idCol, exists := userTable.Columns["id"]
|
||||||
|
if !exists {
|
||||||
|
t.Fatal("Expected 'id' column not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
if idCol.Type != "uuid" {
|
||||||
|
t.Errorf("Expected id type 'uuid' with metadata, got '%s'", idCol.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadDatabase_Complex(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "complex.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadDatabase() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := db.Schemas[0]
|
||||||
|
|
||||||
|
// Should have 5 tables: User, Profile, Post, Tag, and PostTag (join table)
|
||||||
|
expectedTableCount := 5
|
||||||
|
if len(schema.Tables) != expectedTableCount {
|
||||||
|
t.Fatalf("Expected %d tables, got %d", expectedTableCount, len(schema.Tables))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify PostTag join table exists (many-to-many between Post and Tag)
|
||||||
|
var joinTable *models.Table
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == "PostTag" {
|
||||||
|
joinTable = table
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if joinTable == nil {
|
||||||
|
t.Fatal("Expected PostTag join table not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify join table has both FK columns
|
||||||
|
if _, exists := joinTable.Columns["postId"]; !exists {
|
||||||
|
t.Error("Expected 'postId' column in PostTag join table")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := joinTable.Columns["tagId"]; !exists {
|
||||||
|
t.Error("Expected 'tagId' column in PostTag join table")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify composite primary key
|
||||||
|
pkFound := false
|
||||||
|
for _, constraint := range joinTable.Constraints {
|
||||||
|
if constraint.Type == models.PrimaryKeyConstraint {
|
||||||
|
if len(constraint.Columns) == 2 {
|
||||||
|
pkFound = true
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pkFound {
|
||||||
|
t.Error("Expected composite primary key in PostTag join table")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadSchema(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
schema, err := reader.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadSchema() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema.Name != "public" {
|
||||||
|
t.Errorf("Expected schema name 'public', got '%s'", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) != 1 {
|
||||||
|
t.Errorf("Expected 1 table, got %d", len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_ReadTable(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "graphql", "simple.graphql"),
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
table, err := reader.ReadTable()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadTable() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if table.Name != "User" {
|
||||||
|
t.Errorf("Expected table name 'User', got '%s'", table.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_InvalidPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "/nonexistent/path.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for invalid path, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReader_EmptyPath(t *testing.T) {
|
||||||
|
opts := &readers.ReaderOptions{
|
||||||
|
FilePath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := NewReader(opts)
|
||||||
|
_, err := reader.ReadDatabase()
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expected error for empty path, got nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
225
pkg/readers/graphql/relationships.go
Normal file
225
pkg/readers/graphql/relationships.go
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *Reader) detectAndCreateRelationships(schema *models.Schema, ctx *parseContext) error {
|
||||||
|
// Build table lookup map
|
||||||
|
tableMap := make(map[string]*models.Table)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableMap[table.Name] = table
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each table's relation fields
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||||
|
if !ok || len(relationFields) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for fieldName, fieldInfo := range relationFields {
|
||||||
|
targetTable, exists := tableMap[fieldInfo.typeName]
|
||||||
|
if !exists {
|
||||||
|
// Referenced type doesn't exist - might be an interface/union, skip
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldInfo.isArray {
|
||||||
|
// This is a one-to-many or many-to-many reverse side
|
||||||
|
// Check if target table has a reverse array field
|
||||||
|
if r.hasReverseArrayField(targetTable, table.Name) {
|
||||||
|
// Bidirectional array = many-to-many
|
||||||
|
// Only create join table once (lexicographically first table creates it)
|
||||||
|
if table.Name < targetTable.Name {
|
||||||
|
if err := r.createManyToManyJoinTable(schema, table, targetTable, fieldName, tableMap); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For one-to-many, no action needed (FK is on the other table)
|
||||||
|
} else {
|
||||||
|
// This is a many-to-one or one-to-one
|
||||||
|
// Create FK column on this table
|
||||||
|
if err := r.createForeignKeyColumn(table, targetTable, fieldName, fieldInfo.isNullable, schema); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up metadata
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
delete(table.Metadata, "relationFields")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) hasReverseArrayField(table *models.Table, targetTypeName string) bool {
|
||||||
|
relationFields, ok := table.Metadata["relationFields"].(map[string]*fieldInfo)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fieldInfo := range relationFields {
|
||||||
|
if fieldInfo.typeName == targetTypeName && fieldInfo.isArray {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) createForeignKeyColumn(fromTable, toTable *models.Table, fieldName string, nullable bool, schema *models.Schema) error {
|
||||||
|
// Get primary key from target table
|
||||||
|
pkCol := toTable.GetPrimaryKey()
|
||||||
|
if pkCol == nil {
|
||||||
|
return fmt.Errorf("target table %s has no primary key for relationship", toTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column name: {fieldName}Id
|
||||||
|
fkColName := fieldName + "Id"
|
||||||
|
|
||||||
|
// Check if column already exists (shouldn't happen but be safe)
|
||||||
|
if _, exists := fromTable.Columns[fkColName]; exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column
|
||||||
|
fkCol := models.InitColumn(fkColName, fromTable.Name, schema.Name)
|
||||||
|
fkCol.Type = pkCol.Type
|
||||||
|
fkCol.NotNull = !nullable
|
||||||
|
|
||||||
|
fromTable.Columns[fkColName] = fkCol
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", fromTable.Name, fieldName),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
constraint.Schema = schema.Name
|
||||||
|
constraint.Table = fromTable.Name
|
||||||
|
constraint.Columns = []string{fkColName}
|
||||||
|
constraint.ReferencedSchema = schema.Name
|
||||||
|
constraint.ReferencedTable = toTable.Name
|
||||||
|
constraint.ReferencedColumns = []string{pkCol.Name}
|
||||||
|
constraint.OnDelete = "CASCADE"
|
||||||
|
constraint.OnUpdate = "RESTRICT"
|
||||||
|
|
||||||
|
fromTable.Constraints[constraint.Name] = constraint
|
||||||
|
|
||||||
|
// Create relationship
|
||||||
|
relationship := models.InitRelationship(
|
||||||
|
fmt.Sprintf("rel_%s_%s", fromTable.Name, fieldName),
|
||||||
|
models.OneToMany,
|
||||||
|
)
|
||||||
|
relationship.FromTable = fromTable.Name
|
||||||
|
relationship.FromSchema = schema.Name
|
||||||
|
relationship.FromColumns = []string{fkColName}
|
||||||
|
relationship.ToTable = toTable.Name
|
||||||
|
relationship.ToSchema = schema.Name
|
||||||
|
relationship.ToColumns = []string{pkCol.Name}
|
||||||
|
relationship.ForeignKey = constraint.Name
|
||||||
|
|
||||||
|
fromTable.Relationships[relationship.Name] = relationship
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) createManyToManyJoinTable(schema *models.Schema, table1, table2 *models.Table, fieldName string, tableMap map[string]*models.Table) error {
|
||||||
|
// Create join table name
|
||||||
|
joinTableName := table1.Name + table2.Name
|
||||||
|
|
||||||
|
// Check if join table already exists
|
||||||
|
if _, exists := tableMap[joinTableName]; exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get primary keys
|
||||||
|
pk1 := table1.GetPrimaryKey()
|
||||||
|
pk2 := table2.GetPrimaryKey()
|
||||||
|
|
||||||
|
if pk1 == nil || pk2 == nil {
|
||||||
|
return fmt.Errorf("cannot create many-to-many: tables must have primary keys")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join table
|
||||||
|
joinTable := models.InitTable(joinTableName, schema.Name)
|
||||||
|
|
||||||
|
// Create FK column for table1
|
||||||
|
fkCol1Name := strings.ToLower(table1.Name) + "Id"
|
||||||
|
fkCol1 := models.InitColumn(fkCol1Name, joinTable.Name, schema.Name)
|
||||||
|
fkCol1.Type = pk1.Type
|
||||||
|
fkCol1.NotNull = true
|
||||||
|
joinTable.Columns[fkCol1Name] = fkCol1
|
||||||
|
|
||||||
|
// Create FK column for table2
|
||||||
|
fkCol2Name := strings.ToLower(table2.Name) + "Id"
|
||||||
|
fkCol2 := models.InitColumn(fkCol2Name, joinTable.Name, schema.Name)
|
||||||
|
fkCol2.Type = pk2.Type
|
||||||
|
fkCol2.NotNull = true
|
||||||
|
joinTable.Columns[fkCol2Name] = fkCol2
|
||||||
|
|
||||||
|
// Create composite primary key
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", joinTableName),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = schema.Name
|
||||||
|
pkConstraint.Table = joinTable.Name
|
||||||
|
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||||
|
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
// Create FK constraint to table1
|
||||||
|
fk1 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, table1.Name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk1.Schema = schema.Name
|
||||||
|
fk1.Table = joinTable.Name
|
||||||
|
fk1.Columns = []string{fkCol1Name}
|
||||||
|
fk1.ReferencedSchema = schema.Name
|
||||||
|
fk1.ReferencedTable = table1.Name
|
||||||
|
fk1.ReferencedColumns = []string{pk1.Name}
|
||||||
|
fk1.OnDelete = "CASCADE"
|
||||||
|
fk1.OnUpdate = "RESTRICT"
|
||||||
|
joinTable.Constraints[fk1.Name] = fk1
|
||||||
|
|
||||||
|
// Create FK constraint to table2
|
||||||
|
fk2 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, table2.Name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk2.Schema = schema.Name
|
||||||
|
fk2.Table = joinTable.Name
|
||||||
|
fk2.Columns = []string{fkCol2Name}
|
||||||
|
fk2.ReferencedSchema = schema.Name
|
||||||
|
fk2.ReferencedTable = table2.Name
|
||||||
|
fk2.ReferencedColumns = []string{pk2.Name}
|
||||||
|
fk2.OnDelete = "CASCADE"
|
||||||
|
fk2.OnUpdate = "RESTRICT"
|
||||||
|
joinTable.Constraints[fk2.Name] = fk2
|
||||||
|
|
||||||
|
// Create relationships
|
||||||
|
rel1 := models.InitRelationship(
|
||||||
|
fmt.Sprintf("rel_%s_%s_%s", joinTableName, table1.Name, table2.Name),
|
||||||
|
models.ManyToMany,
|
||||||
|
)
|
||||||
|
rel1.FromTable = table1.Name
|
||||||
|
rel1.FromSchema = schema.Name
|
||||||
|
rel1.ToTable = table2.Name
|
||||||
|
rel1.ToSchema = schema.Name
|
||||||
|
rel1.ThroughTable = joinTableName
|
||||||
|
rel1.ThroughSchema = schema.Name
|
||||||
|
joinTable.Relationships[rel1.Name] = rel1
|
||||||
|
|
||||||
|
// Add join table to schema
|
||||||
|
schema.Tables = append(schema.Tables, joinTable)
|
||||||
|
tableMap[joinTableName] = joinTable
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
97
pkg/readers/graphql/type_mapping.go
Normal file
97
pkg/readers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
func (r *Reader) isScalarType(typeName string, ctx *parseContext) bool {
|
||||||
|
// Built-in GraphQL scalars
|
||||||
|
builtInScalars := map[string]bool{
|
||||||
|
"ID": true,
|
||||||
|
"String": true,
|
||||||
|
"Int": true,
|
||||||
|
"Float": true,
|
||||||
|
"Boolean": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if builtInScalars[typeName] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalars declared in the schema
|
||||||
|
if ctx.customScalars[typeName] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common custom scalars (even if not declared)
|
||||||
|
commonCustomScalars := map[string]bool{
|
||||||
|
"DateTime": true,
|
||||||
|
"JSON": true,
|
||||||
|
"Date": true,
|
||||||
|
"Time": true,
|
||||||
|
"Upload": true,
|
||||||
|
"Decimal": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return commonCustomScalars[typeName]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) graphQLTypeToSQL(gqlType string, fieldName string, typeName string) string {
|
||||||
|
// Check for ID type with configurable mapping
|
||||||
|
if gqlType == "ID" {
|
||||||
|
// Check metadata for ID type preference
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
// Global idType setting
|
||||||
|
if idType, ok := r.options.Metadata["idType"].(string); ok {
|
||||||
|
if idType == "uuid" {
|
||||||
|
return "uuid"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Per-type ID mapping
|
||||||
|
if typeIdMappings, ok := r.options.Metadata["typeIdMappings"].(map[string]string); ok {
|
||||||
|
if idType, ok := typeIdMappings[typeName]; ok {
|
||||||
|
if idType == "uuid" {
|
||||||
|
return "uuid"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "bigint" // Default
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalar mappings
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if customMappings, ok := r.options.Metadata["customScalarMappings"].(map[string]string); ok {
|
||||||
|
if sqlType, ok := customMappings[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Built-in custom scalar mappings
|
||||||
|
customScalars := map[string]string{
|
||||||
|
"DateTime": "timestamp",
|
||||||
|
"JSON": "jsonb",
|
||||||
|
"Date": "date",
|
||||||
|
"Time": "time",
|
||||||
|
"Decimal": "numeric",
|
||||||
|
"Upload": "bytea",
|
||||||
|
}
|
||||||
|
if sqlType, ok := customScalars[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard scalar mappings
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"String": "text",
|
||||||
|
"Int": "integer",
|
||||||
|
"Float": "double precision",
|
||||||
|
"Boolean": "boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sqlType, ok := typeMap[gqlType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a known scalar, assume it's an enum or custom type
|
||||||
|
// Return as-is (might be an enum)
|
||||||
|
return gqlType
|
||||||
|
}
|
||||||
152
pkg/readers/json/README.md
Normal file
152
pkg/readers/json/README.md
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
# JSON Reader
|
||||||
|
|
||||||
|
Reads database schema definitions from JSON files.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The JSON Reader parses JSON files that define database schemas in RelSpec's canonical JSON format and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Reads RelSpec's standard JSON schema format
|
||||||
|
- Supports complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := json.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read JSON schema and convert to GORM models
|
||||||
|
relspec --input json --in-file schema.json --output gorm --out-file models.go
|
||||||
|
|
||||||
|
# Convert JSON to PostgreSQL DDL
|
||||||
|
relspec --input json --in-file database.json --output pgsql --out-file schema.sql
|
||||||
|
|
||||||
|
# Transform JSON to YAML
|
||||||
|
relspec --input json --in-file schema.json --output yaml --out-file schema.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example JSON Schema
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "myapp",
|
||||||
|
"database_type": "postgresql",
|
||||||
|
"schemas": [
|
||||||
|
{
|
||||||
|
"name": "public",
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"name": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": {
|
||||||
|
"id": {
|
||||||
|
"name": "id",
|
||||||
|
"type": "bigint",
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": true,
|
||||||
|
"auto_increment": true,
|
||||||
|
"sequence": 1
|
||||||
|
},
|
||||||
|
"username": {
|
||||||
|
"name": "username",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 50,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 2
|
||||||
|
},
|
||||||
|
"email": {
|
||||||
|
"name": "email",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 100,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"constraints": {
|
||||||
|
"pk_users": {
|
||||||
|
"name": "pk_users",
|
||||||
|
"type": "PRIMARY KEY",
|
||||||
|
"columns": ["id"]
|
||||||
|
},
|
||||||
|
"uq_users_username": {
|
||||||
|
"name": "uq_users_username",
|
||||||
|
"type": "UNIQUE",
|
||||||
|
"columns": ["username"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"indexes": {
|
||||||
|
"idx_users_email": {
|
||||||
|
"name": "idx_users_email",
|
||||||
|
"columns": ["email"],
|
||||||
|
"unique": false,
|
||||||
|
"type": "btree"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The JSON format follows RelSpec's internal model structure:
|
||||||
|
|
||||||
|
- `Database` - Top-level container
|
||||||
|
- `name` - Database name
|
||||||
|
- `database_type` - Database system (postgresql, mysql, etc.)
|
||||||
|
- `schemas[]` - Array of schemas
|
||||||
|
|
||||||
|
- `Schema` - Schema/namespace
|
||||||
|
- `name` - Schema name
|
||||||
|
- `tables[]` - Array of tables
|
||||||
|
- `views[]` - Array of views
|
||||||
|
- `sequences[]` - Array of sequences
|
||||||
|
|
||||||
|
- `Table` - Table definition
|
||||||
|
- `name` - Table name
|
||||||
|
- `columns{}` - Map of columns
|
||||||
|
- `constraints{}` - Map of constraints
|
||||||
|
- `indexes{}` - Map of indexes
|
||||||
|
- `relationships{}` - Map of relationships
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- This is RelSpec's native interchange format
|
||||||
|
- Preserves complete schema information
|
||||||
|
- Ideal for version control and schema documentation
|
||||||
|
- Can be used as an intermediate format for transformations
|
||||||
138
pkg/readers/pgsql/README.md
Normal file
138
pkg/readers/pgsql/README.md
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
# PostgreSQL Reader
|
||||||
|
|
||||||
|
Reads schema information directly from a live PostgreSQL database.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The PostgreSQL Reader connects to a PostgreSQL database and introspects its schema, extracting complete information about tables, columns, constraints, indexes, views, and sequences.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Direct database introspection
|
||||||
|
- Extracts complete schema information including:
|
||||||
|
- Tables and columns
|
||||||
|
- Primary keys, foreign keys, unique constraints, check constraints
|
||||||
|
- Indexes
|
||||||
|
- Views
|
||||||
|
- Sequences
|
||||||
|
- Supports multiple schemas
|
||||||
|
- Captures constraint actions (ON DELETE, ON UPDATE)
|
||||||
|
- Derives relationships from foreign keys
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
ConnectionString: "postgres://user:password@localhost:5432/mydb?sslmode=disable",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := pgsql.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Database: %s\n", db.Name)
|
||||||
|
fmt.Printf("Schemas: %d\n", len(db.Schemas))
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
fmt.Printf(" Schema: %s, Tables: %d\n", schema.Name, len(schema.Tables))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect PostgreSQL database and export to JSON
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://user:password@localhost:5432/mydb" \
|
||||||
|
--output json \
|
||||||
|
--out-file schema.json
|
||||||
|
|
||||||
|
# Generate GORM models from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://user:password@localhost:5432/mydb" \
|
||||||
|
--output gorm \
|
||||||
|
--out-file models.go
|
||||||
|
|
||||||
|
# Export database structure to YAML
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb?sslmode=disable" \
|
||||||
|
--output yaml \
|
||||||
|
--out-file schema.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Connection String Format
|
||||||
|
|
||||||
|
The reader uses PostgreSQL connection strings in the format:
|
||||||
|
|
||||||
|
```
|
||||||
|
postgres://username:password@hostname:port/database?parameters
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
postgres://localhost/mydb
|
||||||
|
postgres://user:pass@localhost:5432/mydb
|
||||||
|
postgres://user@localhost/mydb?sslmode=disable
|
||||||
|
postgres://user:pass@db.example.com:5432/production?sslmode=require
|
||||||
|
```
|
||||||
|
|
||||||
|
## Extracted Information
|
||||||
|
|
||||||
|
### Tables
|
||||||
|
- Table name and schema
|
||||||
|
- Comments/descriptions
|
||||||
|
- All columns with data types, nullable, defaults
|
||||||
|
- Sequences
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
- Column name, data type, length/precision
|
||||||
|
- NULL/NOT NULL constraints
|
||||||
|
- Default values
|
||||||
|
- Auto-increment information
|
||||||
|
- Primary key designation
|
||||||
|
|
||||||
|
### Constraints
|
||||||
|
- Primary keys
|
||||||
|
- Foreign keys (with ON DELETE/UPDATE actions)
|
||||||
|
- Unique constraints
|
||||||
|
- Check constraints
|
||||||
|
|
||||||
|
### Indexes
|
||||||
|
- Index name and type (btree, hash, gist, gin, etc.)
|
||||||
|
- Columns in index
|
||||||
|
- Unique/non-unique
|
||||||
|
- Partial indexes
|
||||||
|
|
||||||
|
### Views
|
||||||
|
- View definitions
|
||||||
|
- Column information
|
||||||
|
|
||||||
|
### Sequences
|
||||||
|
- Sequence properties
|
||||||
|
- Associated tables
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Requires PostgreSQL connection permissions
|
||||||
|
- Reads all non-system schemas (excludes pg_catalog, information_schema, pg_toast)
|
||||||
|
- Captures PostgreSQL-specific data types
|
||||||
|
- Automatically maps PostgreSQL types to canonical types
|
||||||
|
- Preserves relationship metadata for downstream conversion
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- Go library: `github.com/jackc/pgx/v5`
|
||||||
|
- Database user must have SELECT permissions on system catalogs
|
||||||
103
pkg/readers/prisma/README.md
Normal file
103
pkg/readers/prisma/README.md
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
# Prisma Reader
|
||||||
|
|
||||||
|
Reads Prisma schema files and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Prisma Reader parses `.prisma` schema files that define database models using Prisma's schema language and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses Prisma schema syntax
|
||||||
|
- Extracts models, fields, and relationships
|
||||||
|
- Supports Prisma attributes and directives
|
||||||
|
- Handles enums and composite types
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.prisma",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := prisma.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read Prisma schema and convert to JSON
|
||||||
|
relspec --input prisma --in-file schema.prisma --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert Prisma to GORM models
|
||||||
|
relspec --input prisma --in-file schema.prisma --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Prisma Schema
|
||||||
|
|
||||||
|
```prisma
|
||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
}
|
||||||
|
|
||||||
|
generator client {
|
||||||
|
provider = "prisma-client-js"
|
||||||
|
}
|
||||||
|
|
||||||
|
model User {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
username String @unique @db.VarChar(50)
|
||||||
|
email String @db.VarChar(100)
|
||||||
|
createdAt DateTime @default(now()) @map("created_at")
|
||||||
|
|
||||||
|
posts Post[]
|
||||||
|
|
||||||
|
@@map("users")
|
||||||
|
}
|
||||||
|
|
||||||
|
model Post {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
userId Int @map("user_id")
|
||||||
|
title String @db.VarChar(200)
|
||||||
|
content String @db.Text
|
||||||
|
|
||||||
|
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@map("posts")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Prisma Attributes
|
||||||
|
|
||||||
|
- `@id` - Primary key
|
||||||
|
- `@unique` - Unique constraint
|
||||||
|
- `@default` - Default value
|
||||||
|
- `@map` - Column name mapping
|
||||||
|
- `@@map` - Table name mapping
|
||||||
|
- `@relation` - Relationship definition
|
||||||
|
- `@db.*` - Database-specific type annotations
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Extracts datasource provider information
|
||||||
|
- Supports `@@map` for custom table names
|
||||||
|
- Handles Prisma-specific types and converts them to standard SQL types
|
||||||
815
pkg/readers/prisma/reader.go
Normal file
815
pkg/readers/prisma/reader.go
Normal file
@@ -0,0 +1,815 @@
|
|||||||
|
package prisma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for Prisma schema format
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new Prisma reader with the given options
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads and parses Prisma schema input, returning a Database model
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for Prisma reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parsePrisma(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads and parses Prisma schema input, returning a Schema model
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in Prisma schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first schema
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads and parses Prisma schema input, returning a Table model
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in Prisma schema")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the first table
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePrisma parses Prisma schema content and returns a Database model
|
||||||
|
func (r *Reader) parsePrisma(content string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
|
||||||
|
if r.options.Metadata != nil {
|
||||||
|
if name, ok := r.options.Metadata["name"].(string); ok {
|
||||||
|
db.Name = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default schema for Prisma (doesn't have explicit schema concept in most cases)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Enums = make([]*models.Enum, 0)
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
// State tracking
|
||||||
|
var currentBlock string // "datasource", "generator", "model", "enum"
|
||||||
|
var currentTable *models.Table
|
||||||
|
var currentEnum *models.Enum
|
||||||
|
var blockContent []string
|
||||||
|
|
||||||
|
// Regex patterns
|
||||||
|
datasourceRegex := regexp.MustCompile(`^datasource\s+\w+\s*{`)
|
||||||
|
generatorRegex := regexp.MustCompile(`^generator\s+\w+\s*{`)
|
||||||
|
modelRegex := regexp.MustCompile(`^model\s+(\w+)\s*{`)
|
||||||
|
enumRegex := regexp.MustCompile(`^enum\s+(\w+)\s*{`)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for block start
|
||||||
|
if matches := datasourceRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "datasource"
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := generatorRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "generator"
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := modelRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "model"
|
||||||
|
tableName := matches[1]
|
||||||
|
currentTable = models.InitTable(tableName, "public")
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentBlock = "enum"
|
||||||
|
enumName := matches[1]
|
||||||
|
currentEnum = &models.Enum{
|
||||||
|
Name: enumName,
|
||||||
|
Schema: "public",
|
||||||
|
Values: make([]string, 0),
|
||||||
|
}
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for block end
|
||||||
|
if trimmed == "}" {
|
||||||
|
switch currentBlock {
|
||||||
|
case "datasource":
|
||||||
|
r.parseDatasource(blockContent, db)
|
||||||
|
case "generator":
|
||||||
|
// We don't need to do anything with generator blocks
|
||||||
|
case "model":
|
||||||
|
if currentTable != nil {
|
||||||
|
r.parseModelFields(blockContent, currentTable)
|
||||||
|
schema.Tables = append(schema.Tables, currentTable)
|
||||||
|
currentTable = nil
|
||||||
|
}
|
||||||
|
case "enum":
|
||||||
|
if currentEnum != nil {
|
||||||
|
schema.Enums = append(schema.Enums, currentEnum)
|
||||||
|
currentEnum = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
currentBlock = ""
|
||||||
|
blockContent = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Accumulate block content
|
||||||
|
if currentBlock != "" {
|
||||||
|
if currentBlock == "enum" && currentEnum != nil {
|
||||||
|
// For enums, just add the trimmed value
|
||||||
|
if trimmed != "" {
|
||||||
|
currentEnum.Values = append(currentEnum.Values, trimmed)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
blockContent = append(blockContent, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: resolve relationships
|
||||||
|
r.resolveRelationships(schema)
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDatasource extracts database type from datasource block
|
||||||
|
func (r *Reader) parseDatasource(lines []string, db *models.Database) {
|
||||||
|
providerRegex := regexp.MustCompile(`provider\s*=\s*"?(\w+)"?`)
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
if matches := providerRegex.FindStringSubmatch(line); matches != nil {
|
||||||
|
provider := matches[1]
|
||||||
|
switch provider {
|
||||||
|
case "postgresql", "postgres":
|
||||||
|
db.DatabaseType = models.PostgresqlDatabaseType
|
||||||
|
case "mysql":
|
||||||
|
db.DatabaseType = "mysql"
|
||||||
|
case "sqlite":
|
||||||
|
db.DatabaseType = models.SqlLiteDatabaseType
|
||||||
|
case "sqlserver":
|
||||||
|
db.DatabaseType = models.MSSQLDatabaseType
|
||||||
|
default:
|
||||||
|
db.DatabaseType = models.PostgresqlDatabaseType
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseModelFields parses model field definitions
|
||||||
|
func (r *Reader) parseModelFields(lines []string, table *models.Table) {
|
||||||
|
fieldRegex := regexp.MustCompile(`^(\w+)\s+(\w+)(\?|\[\])?\s*(@.+)?`)
|
||||||
|
blockAttrRegex := regexp.MustCompile(`^@@(\w+)\((.*?)\)`)
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for block attributes (@@id, @@unique, @@index)
|
||||||
|
if matches := blockAttrRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
attrName := matches[1]
|
||||||
|
attrContent := matches[2]
|
||||||
|
r.parseBlockAttribute(attrName, attrContent, table)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse field definition
|
||||||
|
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
fieldName := matches[1]
|
||||||
|
fieldType := matches[2]
|
||||||
|
modifier := matches[3] // ? or []
|
||||||
|
attributes := matches[4] // @... part
|
||||||
|
|
||||||
|
column := r.parseField(fieldName, fieldType, modifier, attributes, table)
|
||||||
|
if column != nil {
|
||||||
|
table.Columns[column.Name] = column
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseField parses a single field definition
|
||||||
|
func (r *Reader) parseField(name, fieldType, modifier, attributes string, table *models.Table) *models.Column {
|
||||||
|
// Check if this is a relation field (array or references another model)
|
||||||
|
if modifier == "[]" {
|
||||||
|
// Array field - this is a relation field, not a column
|
||||||
|
// We'll handle this in relationship resolution
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a non-primitive type (relation field)
|
||||||
|
// Note: We need to allow enum types through as they're like primitives
|
||||||
|
if !r.isPrimitiveType(fieldType) && !r.isEnumType(fieldType, table) {
|
||||||
|
// This is a relation field (e.g., user User), not a scalar column
|
||||||
|
// Only process this if it has @relation attribute (which means it's the owning side with FK)
|
||||||
|
// Otherwise skip it as it's just the inverse relation field
|
||||||
|
if attributes == "" || !strings.Contains(attributes, "@relation") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// If it has @relation, we still don't create a column for it
|
||||||
|
// The actual FK column will be in the fields: [...] part of @relation
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
column := models.InitColumn(name, table.Name, table.Schema)
|
||||||
|
|
||||||
|
// Map Prisma type to SQL type
|
||||||
|
column.Type = r.prismaTypeToSQL(fieldType)
|
||||||
|
|
||||||
|
// Handle modifiers
|
||||||
|
if modifier == "?" {
|
||||||
|
column.NotNull = false
|
||||||
|
} else {
|
||||||
|
// Default: required fields are NOT NULL
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse field attributes
|
||||||
|
if attributes != "" {
|
||||||
|
r.parseFieldAttributes(attributes, column, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// prismaTypeToSQL converts Prisma types to SQL types
|
||||||
|
func (r *Reader) prismaTypeToSQL(prismaType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"String": "text",
|
||||||
|
"Boolean": "boolean",
|
||||||
|
"Int": "integer",
|
||||||
|
"BigInt": "bigint",
|
||||||
|
"Float": "double precision",
|
||||||
|
"Decimal": "decimal",
|
||||||
|
"DateTime": "timestamp",
|
||||||
|
"Json": "jsonb",
|
||||||
|
"Bytes": "bytea",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sqlType, ok := typeMap[prismaType]; ok {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not a built-in type, it might be an enum or model reference
|
||||||
|
// For enums, we'll use the enum name directly
|
||||||
|
return prismaType
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseFieldAttributes parses field attributes like @id, @unique, @default
|
||||||
|
func (r *Reader) parseFieldAttributes(attributes string, column *models.Column, table *models.Table) {
|
||||||
|
// @id attribute
|
||||||
|
if strings.Contains(attributes, "@id") {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.NotNull = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// @unique attribute
|
||||||
|
if regexp.MustCompile(`@unique\b`).MatchString(attributes) {
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s", column.Name),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = []string{column.Name}
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// @default attribute - extract value with balanced parentheses
|
||||||
|
if strings.Contains(attributes, "@default(") {
|
||||||
|
defaultValue := r.extractDefaultValue(attributes)
|
||||||
|
if defaultValue != "" {
|
||||||
|
r.parseDefaultValue(defaultValue, column)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @updatedAt attribute - store in comment for now
|
||||||
|
if strings.Contains(attributes, "@updatedAt") {
|
||||||
|
if column.Comment != "" {
|
||||||
|
column.Comment += "; @updatedAt"
|
||||||
|
} else {
|
||||||
|
column.Comment = "@updatedAt"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @relation attribute - we'll handle this in relationship resolution
|
||||||
|
// For now, just note that this field is part of a relation
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractDefaultValue extracts the default value from @default(...) handling nested parentheses
|
||||||
|
func (r *Reader) extractDefaultValue(attributes string) string {
|
||||||
|
idx := strings.Index(attributes, "@default(")
|
||||||
|
if idx == -1 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
start := idx + len("@default(")
|
||||||
|
depth := 1
|
||||||
|
i := start
|
||||||
|
|
||||||
|
for i < len(attributes) && depth > 0 {
|
||||||
|
switch attributes[i] {
|
||||||
|
case '(':
|
||||||
|
depth++
|
||||||
|
case ')':
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
if depth == 0 {
|
||||||
|
return attributes[start : i-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDefaultValue parses Prisma default value expressions
|
||||||
|
func (r *Reader) parseDefaultValue(defaultExpr string, column *models.Column) {
|
||||||
|
defaultExpr = strings.TrimSpace(defaultExpr)
|
||||||
|
|
||||||
|
switch defaultExpr {
|
||||||
|
case "autoincrement()":
|
||||||
|
column.AutoIncrement = true
|
||||||
|
case "now()":
|
||||||
|
column.Default = "now()"
|
||||||
|
case "uuid()":
|
||||||
|
column.Default = "gen_random_uuid()"
|
||||||
|
case "cuid()":
|
||||||
|
// CUID is Prisma-specific, store in comment
|
||||||
|
if column.Comment != "" {
|
||||||
|
column.Comment += "; default(cuid())"
|
||||||
|
} else {
|
||||||
|
column.Comment = "default(cuid())"
|
||||||
|
}
|
||||||
|
case "true":
|
||||||
|
column.Default = true
|
||||||
|
case "false":
|
||||||
|
column.Default = false
|
||||||
|
default:
|
||||||
|
// Check if it's a string literal
|
||||||
|
if strings.HasPrefix(defaultExpr, "\"") && strings.HasSuffix(defaultExpr, "\"") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
} else if strings.HasPrefix(defaultExpr, "'") && strings.HasSuffix(defaultExpr, "'") {
|
||||||
|
column.Default = defaultExpr[1 : len(defaultExpr)-1]
|
||||||
|
} else {
|
||||||
|
// Try to parse as number or enum value
|
||||||
|
column.Default = defaultExpr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseBlockAttribute parses block-level attributes like @@id, @@unique, @@index
|
||||||
|
func (r *Reader) parseBlockAttribute(attrName, content string, table *models.Table) {
|
||||||
|
// Extract column list from brackets [col1, col2]
|
||||||
|
colListRegex := regexp.MustCompile(`\[(.*?)\]`)
|
||||||
|
matches := colListRegex.FindStringSubmatch(content)
|
||||||
|
if matches == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
columnList := strings.Split(matches[1], ",")
|
||||||
|
columns := make([]string, 0)
|
||||||
|
for _, col := range columnList {
|
||||||
|
columns = append(columns, strings.TrimSpace(col))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch attrName {
|
||||||
|
case "id":
|
||||||
|
// Composite primary key
|
||||||
|
for _, colName := range columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
col.IsPrimaryKey = true
|
||||||
|
col.NotNull = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Also create a PK constraint
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", table.Name),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = table.Schema
|
||||||
|
pkConstraint.Table = table.Name
|
||||||
|
pkConstraint.Columns = columns
|
||||||
|
table.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
case "unique":
|
||||||
|
// Multi-column unique constraint
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s_%s", table.Name, strings.Join(columns, "_")),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = columns
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
|
||||||
|
case "index":
|
||||||
|
// Index
|
||||||
|
index := models.InitIndex(
|
||||||
|
fmt.Sprintf("idx_%s_%s", table.Name, strings.Join(columns, "_")),
|
||||||
|
table.Name,
|
||||||
|
table.Schema,
|
||||||
|
)
|
||||||
|
index.Columns = columns
|
||||||
|
table.Indexes[index.Name] = index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// relationField stores information about a relation field for second-pass processing
|
||||||
|
type relationField struct {
|
||||||
|
tableName string
|
||||||
|
fieldName string
|
||||||
|
relatedModel string
|
||||||
|
isArray bool
|
||||||
|
relationAttr string
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveRelationships performs a second pass to resolve @relation attributes
|
||||||
|
func (r *Reader) resolveRelationships(schema *models.Schema) {
|
||||||
|
// Build a map of table names for quick lookup
|
||||||
|
tableMap := make(map[string]*models.Table)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableMap[table.Name] = table
|
||||||
|
}
|
||||||
|
|
||||||
|
// First, we need to re-parse to find relation fields
|
||||||
|
// We'll re-read the file to extract relation information
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
relations := r.extractRelationFields(string(content))
|
||||||
|
|
||||||
|
// Process explicit @relation attributes to create FK constraints
|
||||||
|
for _, rel := range relations {
|
||||||
|
if rel.relationAttr != "" {
|
||||||
|
r.createConstraintFromRelation(rel, tableMap, schema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect implicit many-to-many relationships
|
||||||
|
r.detectImplicitManyToMany(relations, tableMap, schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractRelationFields extracts relation field information from the schema
|
||||||
|
func (r *Reader) extractRelationFields(content string) []relationField {
|
||||||
|
relations := make([]relationField, 0)
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
modelRegex := regexp.MustCompile(`^model\s+(\w+)\s*{`)
|
||||||
|
fieldRegex := regexp.MustCompile(`^(\w+)\s+(\w+)(\?|\[\])?\s*(@.+)?`)
|
||||||
|
|
||||||
|
var currentModel string
|
||||||
|
inModel := false
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches := modelRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
currentModel = matches[1]
|
||||||
|
inModel = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if trimmed == "}" {
|
||||||
|
inModel = false
|
||||||
|
currentModel = ""
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if inModel && currentModel != "" {
|
||||||
|
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
fieldName := matches[1]
|
||||||
|
fieldType := matches[2]
|
||||||
|
modifier := matches[3]
|
||||||
|
attributes := matches[4]
|
||||||
|
|
||||||
|
// Check if this is a relation field (references another model or is an array)
|
||||||
|
isPotentialRelation := modifier == "[]" || !r.isPrimitiveType(fieldType)
|
||||||
|
|
||||||
|
if isPotentialRelation {
|
||||||
|
rel := relationField{
|
||||||
|
tableName: currentModel,
|
||||||
|
fieldName: fieldName,
|
||||||
|
relatedModel: fieldType,
|
||||||
|
isArray: modifier == "[]",
|
||||||
|
relationAttr: attributes,
|
||||||
|
}
|
||||||
|
relations = append(relations, rel)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return relations
|
||||||
|
}
|
||||||
|
|
||||||
|
// isPrimitiveType checks if a type is a Prisma primitive type
|
||||||
|
func (r *Reader) isPrimitiveType(typeName string) bool {
|
||||||
|
primitives := []string{"String", "Boolean", "Int", "BigInt", "Float", "Decimal", "DateTime", "Json", "Bytes"}
|
||||||
|
for _, p := range primitives {
|
||||||
|
if typeName == p {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// isEnumType checks if a type name might be an enum
|
||||||
|
// Note: We can't definitively check against schema.Enums at parse time
|
||||||
|
// because enums might be defined after the model, so we just check
|
||||||
|
// if it starts with uppercase (Prisma convention for enums)
|
||||||
|
func (r *Reader) isEnumType(typeName string, table *models.Table) bool {
|
||||||
|
// Simple heuristic: enum types start with uppercase letter
|
||||||
|
// and are not known model names (though we can't check that yet)
|
||||||
|
if len(typeName) > 0 && typeName[0] >= 'A' && typeName[0] <= 'Z' {
|
||||||
|
// Additional check: primitive types are already handled above
|
||||||
|
// So if it's uppercase and not primitive, it's likely an enum or model
|
||||||
|
// We'll assume it's an enum if it's a single word
|
||||||
|
return !strings.Contains(typeName, "_")
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// createConstraintFromRelation creates a FK constraint from a @relation attribute
|
||||||
|
func (r *Reader) createConstraintFromRelation(rel relationField, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Skip array fields (they are the inverse side of the relation)
|
||||||
|
if rel.isArray {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if rel.relationAttr == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse @relation attribute
|
||||||
|
relationRegex := regexp.MustCompile(`@relation\((.*?)\)`)
|
||||||
|
matches := relationRegex.FindStringSubmatch(rel.relationAttr)
|
||||||
|
if matches == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
relationContent := matches[1]
|
||||||
|
|
||||||
|
// Extract fields and references
|
||||||
|
fieldsRegex := regexp.MustCompile(`fields:\s*\[(.*?)\]`)
|
||||||
|
referencesRegex := regexp.MustCompile(`references:\s*\[(.*?)\]`)
|
||||||
|
nameRegex := regexp.MustCompile(`name:\s*"([^"]+)"`)
|
||||||
|
onDeleteRegex := regexp.MustCompile(`onDelete:\s*(\w+)`)
|
||||||
|
onUpdateRegex := regexp.MustCompile(`onUpdate:\s*(\w+)`)
|
||||||
|
|
||||||
|
fieldsMatch := fieldsRegex.FindStringSubmatch(relationContent)
|
||||||
|
referencesMatch := referencesRegex.FindStringSubmatch(relationContent)
|
||||||
|
|
||||||
|
if fieldsMatch == nil || referencesMatch == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse field and reference column lists
|
||||||
|
fieldCols := r.parseColumnList(fieldsMatch[1])
|
||||||
|
refCols := r.parseColumnList(referencesMatch[1])
|
||||||
|
|
||||||
|
if len(fieldCols) == 0 || len(refCols) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraintName := fmt.Sprintf("fk_%s_%s", rel.tableName, fieldCols[0])
|
||||||
|
|
||||||
|
// Check for custom name
|
||||||
|
if nameMatch := nameRegex.FindStringSubmatch(relationContent); nameMatch != nil {
|
||||||
|
constraintName = nameMatch[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||||
|
constraint.Schema = "public"
|
||||||
|
constraint.Table = rel.tableName
|
||||||
|
constraint.Columns = fieldCols
|
||||||
|
constraint.ReferencedSchema = "public"
|
||||||
|
constraint.ReferencedTable = rel.relatedModel
|
||||||
|
constraint.ReferencedColumns = refCols
|
||||||
|
|
||||||
|
// Parse referential actions
|
||||||
|
if onDeleteMatch := onDeleteRegex.FindStringSubmatch(relationContent); onDeleteMatch != nil {
|
||||||
|
constraint.OnDelete = onDeleteMatch[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
if onUpdateMatch := onUpdateRegex.FindStringSubmatch(relationContent); onUpdateMatch != nil {
|
||||||
|
constraint.OnUpdate = onUpdateMatch[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add constraint to table
|
||||||
|
if table, exists := tableMap[rel.tableName]; exists {
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnList parses a comma-separated list of column names
|
||||||
|
func (r *Reader) parseColumnList(list string) []string {
|
||||||
|
parts := strings.Split(list, ",")
|
||||||
|
result := make([]string, 0)
|
||||||
|
for _, part := range parts {
|
||||||
|
trimmed := strings.TrimSpace(part)
|
||||||
|
if trimmed != "" {
|
||||||
|
result = append(result, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// detectImplicitManyToMany detects implicit M2M relationships and creates join tables
|
||||||
|
func (r *Reader) detectImplicitManyToMany(relations []relationField, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Group relations by model pairs
|
||||||
|
type modelPair struct {
|
||||||
|
model1 string
|
||||||
|
model2 string
|
||||||
|
}
|
||||||
|
|
||||||
|
pairMap := make(map[modelPair][]relationField)
|
||||||
|
|
||||||
|
for _, rel := range relations {
|
||||||
|
if !rel.isArray || rel.relationAttr != "" {
|
||||||
|
// Skip non-array fields and explicit relations
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a normalized pair (alphabetically sorted to avoid duplicates)
|
||||||
|
pair := modelPair{}
|
||||||
|
if rel.tableName < rel.relatedModel {
|
||||||
|
pair.model1 = rel.tableName
|
||||||
|
pair.model2 = rel.relatedModel
|
||||||
|
} else {
|
||||||
|
pair.model1 = rel.relatedModel
|
||||||
|
pair.model2 = rel.tableName
|
||||||
|
}
|
||||||
|
|
||||||
|
pairMap[pair] = append(pairMap[pair], rel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for pairs with arrays on both sides (implicit M2M)
|
||||||
|
for pair, rels := range pairMap {
|
||||||
|
if len(rels) >= 2 {
|
||||||
|
// This is an implicit many-to-many relationship
|
||||||
|
r.createImplicitJoinTable(pair.model1, pair.model2, tableMap, schema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// createImplicitJoinTable creates a virtual join table for implicit M2M relations
|
||||||
|
func (r *Reader) createImplicitJoinTable(model1, model2 string, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Prisma naming convention: _Model1ToModel2 (alphabetically sorted)
|
||||||
|
joinTableName := fmt.Sprintf("_%sTo%s", model1, model2)
|
||||||
|
|
||||||
|
// Check if join table already exists
|
||||||
|
if _, exists := tableMap[joinTableName]; exists {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join table
|
||||||
|
joinTable := models.InitTable(joinTableName, "public")
|
||||||
|
|
||||||
|
// Get primary keys from both tables
|
||||||
|
pk1 := r.getPrimaryKeyColumn(tableMap[model1])
|
||||||
|
pk2 := r.getPrimaryKeyColumn(tableMap[model2])
|
||||||
|
|
||||||
|
if pk1 == nil || pk2 == nil {
|
||||||
|
return // Can't create join table without PKs
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK columns in join table
|
||||||
|
fkCol1Name := fmt.Sprintf("%sId", model1)
|
||||||
|
fkCol1 := models.InitColumn(fkCol1Name, joinTableName, "public")
|
||||||
|
fkCol1.Type = pk1.Type
|
||||||
|
fkCol1.NotNull = true
|
||||||
|
joinTable.Columns[fkCol1Name] = fkCol1
|
||||||
|
|
||||||
|
fkCol2Name := fmt.Sprintf("%sId", model2)
|
||||||
|
fkCol2 := models.InitColumn(fkCol2Name, joinTableName, "public")
|
||||||
|
fkCol2.Type = pk2.Type
|
||||||
|
fkCol2.NotNull = true
|
||||||
|
joinTable.Columns[fkCol2Name] = fkCol2
|
||||||
|
|
||||||
|
// Create composite primary key
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", joinTableName),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = "public"
|
||||||
|
pkConstraint.Table = joinTableName
|
||||||
|
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||||
|
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
// Mark columns as PK
|
||||||
|
fkCol1.IsPrimaryKey = true
|
||||||
|
fkCol2.IsPrimaryKey = true
|
||||||
|
|
||||||
|
// Create FK constraints
|
||||||
|
fk1 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, model1),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk1.Schema = "public"
|
||||||
|
fk1.Table = joinTableName
|
||||||
|
fk1.Columns = []string{fkCol1Name}
|
||||||
|
fk1.ReferencedSchema = "public"
|
||||||
|
fk1.ReferencedTable = model1
|
||||||
|
fk1.ReferencedColumns = []string{pk1.Name}
|
||||||
|
fk1.OnDelete = "Cascade"
|
||||||
|
joinTable.Constraints[fk1.Name] = fk1
|
||||||
|
|
||||||
|
fk2 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, model2),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk2.Schema = "public"
|
||||||
|
fk2.Table = joinTableName
|
||||||
|
fk2.Columns = []string{fkCol2Name}
|
||||||
|
fk2.ReferencedSchema = "public"
|
||||||
|
fk2.ReferencedTable = model2
|
||||||
|
fk2.ReferencedColumns = []string{pk2.Name}
|
||||||
|
fk2.OnDelete = "Cascade"
|
||||||
|
joinTable.Constraints[fk2.Name] = fk2
|
||||||
|
|
||||||
|
// Add join table to schema
|
||||||
|
schema.Tables = append(schema.Tables, joinTable)
|
||||||
|
tableMap[joinTableName] = joinTable
|
||||||
|
}
|
||||||
|
|
||||||
|
// getPrimaryKeyColumn returns the primary key column of a table
|
||||||
|
func (r *Reader) getPrimaryKeyColumn(table *models.Table) *models.Column {
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
return col
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
122
pkg/readers/typeorm/README.md
Normal file
122
pkg/readers/typeorm/README.md
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
# TypeORM Reader
|
||||||
|
|
||||||
|
Reads TypeScript files containing TypeORM entity definitions and extracts database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The TypeORM Reader parses TypeScript source files that define TypeORM entities (classes with TypeORM decorators) and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Parses TypeORM decorators and entity definitions
|
||||||
|
- Extracts table, column, and relationship information
|
||||||
|
- Supports various TypeORM column types and options
|
||||||
|
- Handles constraints, indexes, and relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/entities/",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := typeorm.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read TypeORM entities and convert to JSON
|
||||||
|
relspec --input typeorm --in-file entities/ --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert TypeORM to GORM models
|
||||||
|
relspec --input typeorm --in-file User.ts --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example TypeORM Entity
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
Entity,
|
||||||
|
PrimaryGeneratedColumn,
|
||||||
|
Column,
|
||||||
|
CreateDateColumn,
|
||||||
|
OneToMany,
|
||||||
|
} from 'typeorm';
|
||||||
|
import { Post } from './Post';
|
||||||
|
|
||||||
|
@Entity('users')
|
||||||
|
export class User {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 50, unique: true })
|
||||||
|
username: string;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 100 })
|
||||||
|
email: string;
|
||||||
|
|
||||||
|
@CreateDateColumn({ name: 'created_at' })
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@OneToMany(() => Post, (post) => post.user)
|
||||||
|
posts: Post[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity('posts')
|
||||||
|
export class Post {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ name: 'user_id' })
|
||||||
|
userId: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 200 })
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text' })
|
||||||
|
content: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, (user) => user.posts, { onDelete: 'CASCADE' })
|
||||||
|
@JoinColumn({ name: 'user_id' })
|
||||||
|
user: User;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported TypeORM Decorators
|
||||||
|
|
||||||
|
- `@Entity()` - Entity/table definition
|
||||||
|
- `@PrimaryGeneratedColumn()` - Auto-increment primary key
|
||||||
|
- `@PrimaryColumn()` - Primary key
|
||||||
|
- `@Column()` - Column definition
|
||||||
|
- `@CreateDateColumn()` - Auto-set creation timestamp
|
||||||
|
- `@UpdateDateColumn()` - Auto-update timestamp
|
||||||
|
- `@OneToMany()` - One-to-many relationship
|
||||||
|
- `@ManyToOne()` - Many-to-one relationship
|
||||||
|
- `@JoinColumn()` - Foreign key column
|
||||||
|
- `@Index()` - Index definition
|
||||||
|
- `@Unique()` - Unique constraint
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Schema name can be specified in `@Entity()` decorator
|
||||||
|
- Supports both JavaScript and TypeScript entity files
|
||||||
|
- Relationship metadata is extracted from decorators
|
||||||
785
pkg/readers/typeorm/reader.go
Normal file
785
pkg/readers/typeorm/reader.go
Normal file
@@ -0,0 +1,785 @@
|
|||||||
|
package typeorm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for TypeORM entity files
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new TypeORM reader with the given options
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads and parses TypeORM entity files, returning a Database model
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
if r.options.FilePath == "" {
|
||||||
|
return nil, fmt.Errorf("file path is required for TypeORM reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := os.ReadFile(r.options.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.parseTypeORM(string(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads and parses TypeORM entity files, returning a Schema model
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in TypeORM entities")
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads and parses TypeORM entity files, returning a Table model
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in TypeORM entities")
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// entityInfo stores information about an entity during parsing
|
||||||
|
type entityInfo struct {
|
||||||
|
name string
|
||||||
|
fields []fieldInfo
|
||||||
|
decorators []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// fieldInfo stores information about a field during parsing
|
||||||
|
type fieldInfo struct {
|
||||||
|
name string
|
||||||
|
typeName string
|
||||||
|
decorators []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTypeORM parses TypeORM entity content and returns a Database model
|
||||||
|
func (r *Reader) parseTypeORM(content string) (*models.Database, error) {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Parse entities
|
||||||
|
entities := r.extractEntities(content)
|
||||||
|
|
||||||
|
// Convert entities to tables and views
|
||||||
|
tableMap := make(map[string]*models.Table)
|
||||||
|
for _, entity := range entities {
|
||||||
|
// Check if this is a view
|
||||||
|
isView := false
|
||||||
|
for _, decorator := range entity.decorators {
|
||||||
|
if strings.HasPrefix(decorator, "@ViewEntity") {
|
||||||
|
isView = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isView {
|
||||||
|
view := r.entityToView(entity)
|
||||||
|
schema.Views = append(schema.Views, view)
|
||||||
|
} else {
|
||||||
|
table := r.entityToTable(entity)
|
||||||
|
schema.Tables = append(schema.Tables, table)
|
||||||
|
tableMap[table.Name] = table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: resolve relationships
|
||||||
|
r.resolveRelationships(entities, tableMap, schema)
|
||||||
|
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractEntities extracts entity and view definitions from TypeORM content
|
||||||
|
func (r *Reader) extractEntities(content string) []entityInfo {
|
||||||
|
entities := make([]entityInfo, 0)
|
||||||
|
|
||||||
|
// First, extract decorators properly (handling multi-line)
|
||||||
|
content = r.normalizeDecorators(content)
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(content))
|
||||||
|
|
||||||
|
entityRegex := regexp.MustCompile(`^export\s+class\s+(\w+)`)
|
||||||
|
decoratorRegex := regexp.MustCompile(`^\s*@(\w+)(\([^)]*\))?`)
|
||||||
|
fieldRegex := regexp.MustCompile(`^\s*(\w+):\s*([^;]+);`)
|
||||||
|
|
||||||
|
var currentEntity *entityInfo
|
||||||
|
var pendingDecorators []string
|
||||||
|
inClass := false
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and comments
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "//") || strings.HasPrefix(trimmed, "import ") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for decorator
|
||||||
|
if matches := decoratorRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
decorator := matches[0]
|
||||||
|
pendingDecorators = append(pendingDecorators, decorator)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for entity/view class
|
||||||
|
if matches := entityRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
// Save previous entity if exists
|
||||||
|
if currentEntity != nil {
|
||||||
|
entities = append(entities, *currentEntity)
|
||||||
|
}
|
||||||
|
|
||||||
|
currentEntity = &entityInfo{
|
||||||
|
name: matches[1],
|
||||||
|
fields: make([]fieldInfo, 0),
|
||||||
|
decorators: pendingDecorators,
|
||||||
|
}
|
||||||
|
pendingDecorators = []string{}
|
||||||
|
inClass = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for class end
|
||||||
|
if inClass && trimmed == "}" {
|
||||||
|
if currentEntity != nil {
|
||||||
|
entities = append(entities, *currentEntity)
|
||||||
|
currentEntity = nil
|
||||||
|
}
|
||||||
|
inClass = false
|
||||||
|
pendingDecorators = []string{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for field definition
|
||||||
|
if inClass && currentEntity != nil {
|
||||||
|
if matches := fieldRegex.FindStringSubmatch(trimmed); matches != nil {
|
||||||
|
fieldName := matches[1]
|
||||||
|
fieldType := strings.TrimSpace(matches[2])
|
||||||
|
|
||||||
|
field := fieldInfo{
|
||||||
|
name: fieldName,
|
||||||
|
typeName: fieldType,
|
||||||
|
decorators: pendingDecorators,
|
||||||
|
}
|
||||||
|
currentEntity.fields = append(currentEntity.fields, field)
|
||||||
|
pendingDecorators = []string{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save last entity
|
||||||
|
if currentEntity != nil {
|
||||||
|
entities = append(entities, *currentEntity)
|
||||||
|
}
|
||||||
|
|
||||||
|
return entities
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalizeDecorators combines multi-line decorators into single lines
|
||||||
|
func (r *Reader) normalizeDecorators(content string) string {
|
||||||
|
// Replace multi-line decorators with single-line versions
|
||||||
|
// Match @Decorator({ ... }) across multiple lines
|
||||||
|
decoratorRegex := regexp.MustCompile(`@(\w+)\s*\(\s*\{([^}]*)\}\s*\)`)
|
||||||
|
|
||||||
|
return decoratorRegex.ReplaceAllStringFunc(content, func(match string) string {
|
||||||
|
// Remove newlines and extra spaces from decorator
|
||||||
|
match = strings.ReplaceAll(match, "\n", " ")
|
||||||
|
match = strings.ReplaceAll(match, "\r", " ")
|
||||||
|
// Normalize multiple spaces
|
||||||
|
spaceRegex := regexp.MustCompile(`\s+`)
|
||||||
|
match = spaceRegex.ReplaceAllString(match, " ")
|
||||||
|
return match
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// entityToView converts a view entity to a view
|
||||||
|
func (r *Reader) entityToView(entity entityInfo) *models.View {
|
||||||
|
// Parse @ViewEntity decorator options
|
||||||
|
viewName := entity.name
|
||||||
|
schemaName := "public"
|
||||||
|
var expression string
|
||||||
|
|
||||||
|
for _, decorator := range entity.decorators {
|
||||||
|
if strings.HasPrefix(decorator, "@ViewEntity") {
|
||||||
|
// Extract options from @ViewEntity({ ... })
|
||||||
|
options := r.parseViewEntityOptions(decorator)
|
||||||
|
|
||||||
|
// Check for custom view name
|
||||||
|
if name, ok := options["name"]; ok {
|
||||||
|
viewName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for schema
|
||||||
|
if schema, ok := options["schema"]; ok {
|
||||||
|
schemaName = schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for expression (SQL definition)
|
||||||
|
if expr, ok := options["expression"]; ok {
|
||||||
|
expression = expr
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
view := models.InitView(viewName, schemaName)
|
||||||
|
view.Definition = expression
|
||||||
|
|
||||||
|
// Add columns from fields (if any are defined in the view class)
|
||||||
|
for _, field := range entity.fields {
|
||||||
|
column := models.InitColumn(field.name, viewName, schemaName)
|
||||||
|
column.Type = r.typeScriptTypeToSQL(field.typeName)
|
||||||
|
view.Columns[column.Name] = column
|
||||||
|
}
|
||||||
|
|
||||||
|
return view
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseViewEntityOptions parses @ViewEntity decorator options
|
||||||
|
func (r *Reader) parseViewEntityOptions(decorator string) map[string]string {
|
||||||
|
options := make(map[string]string)
|
||||||
|
|
||||||
|
// Extract content between parentheses
|
||||||
|
start := strings.Index(decorator, "(")
|
||||||
|
end := strings.LastIndex(decorator, ")")
|
||||||
|
|
||||||
|
if start == -1 || end == -1 || start >= end {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
content := decorator[start+1 : end]
|
||||||
|
|
||||||
|
// Skip if empty @ViewEntity()
|
||||||
|
if strings.TrimSpace(content) == "" {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse name: "value"
|
||||||
|
nameRegex := regexp.MustCompile(`name:\s*["']([^"']+)["']`)
|
||||||
|
if matches := nameRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["name"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse schema: "value"
|
||||||
|
schemaRegex := regexp.MustCompile(`schema:\s*["']([^"']+)["']`)
|
||||||
|
if matches := schemaRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["schema"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse expression: ` ... ` (can be multi-line, captured as single line after normalization)
|
||||||
|
// Look for expression followed by backtick or quote
|
||||||
|
expressionRegex := regexp.MustCompile(`expression:\s*` + "`" + `([^` + "`" + `]+)` + "`")
|
||||||
|
if matches := expressionRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["expression"] = strings.TrimSpace(matches[1])
|
||||||
|
} else {
|
||||||
|
// Try with regular quotes
|
||||||
|
expressionRegex = regexp.MustCompile(`expression:\s*["']([^"']+)["']`)
|
||||||
|
if matches := expressionRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["expression"] = strings.TrimSpace(matches[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// entityToTable converts an entity to a table
|
||||||
|
func (r *Reader) entityToTable(entity entityInfo) *models.Table {
|
||||||
|
// Parse @Entity decorator options
|
||||||
|
tableName := entity.name
|
||||||
|
schemaName := "public"
|
||||||
|
var entityOptions map[string]string
|
||||||
|
|
||||||
|
for _, decorator := range entity.decorators {
|
||||||
|
if strings.HasPrefix(decorator, "@Entity") {
|
||||||
|
// Extract options from @Entity({ ... })
|
||||||
|
entityOptions = r.parseEntityOptions(decorator)
|
||||||
|
|
||||||
|
// Check for custom table name
|
||||||
|
if name, ok := entityOptions["name"]; ok {
|
||||||
|
tableName = name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for schema
|
||||||
|
if schema, ok := entityOptions["schema"]; ok {
|
||||||
|
schemaName = schema
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table := models.InitTable(tableName, schemaName)
|
||||||
|
|
||||||
|
// Store additional metadata from @Entity options
|
||||||
|
if entityOptions != nil {
|
||||||
|
// Store database name in metadata
|
||||||
|
if database, ok := entityOptions["database"]; ok {
|
||||||
|
if table.Metadata == nil {
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
table.Metadata["database"] = database
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store engine in metadata
|
||||||
|
if engine, ok := entityOptions["engine"]; ok {
|
||||||
|
if table.Metadata == nil {
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
table.Metadata["engine"] = engine
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store original class name if different from table name
|
||||||
|
if entity.name != tableName {
|
||||||
|
if table.Metadata == nil {
|
||||||
|
table.Metadata = make(map[string]any)
|
||||||
|
}
|
||||||
|
table.Metadata["class_name"] = entity.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, field := range entity.fields {
|
||||||
|
// Skip relation fields (they'll be handled in relationship resolution)
|
||||||
|
if r.isRelationField(field) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
column := r.fieldToColumn(field, table)
|
||||||
|
if column != nil {
|
||||||
|
table.Columns[column.Name] = column
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseEntityOptions parses @Entity decorator options
|
||||||
|
func (r *Reader) parseEntityOptions(decorator string) map[string]string {
|
||||||
|
options := make(map[string]string)
|
||||||
|
|
||||||
|
// Extract content between parentheses
|
||||||
|
start := strings.Index(decorator, "(")
|
||||||
|
end := strings.LastIndex(decorator, ")")
|
||||||
|
|
||||||
|
if start == -1 || end == -1 || start >= end {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
content := decorator[start+1 : end]
|
||||||
|
|
||||||
|
// Skip if empty @Entity()
|
||||||
|
if strings.TrimSpace(content) == "" {
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse name: "value" or name: 'value'
|
||||||
|
nameRegex := regexp.MustCompile(`name:\s*["']([^"']+)["']`)
|
||||||
|
if matches := nameRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["name"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse schema: "value"
|
||||||
|
schemaRegex := regexp.MustCompile(`schema:\s*["']([^"']+)["']`)
|
||||||
|
if matches := schemaRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["schema"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse database: "value"
|
||||||
|
databaseRegex := regexp.MustCompile(`database:\s*["']([^"']+)["']`)
|
||||||
|
if matches := databaseRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["database"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse engine: "value"
|
||||||
|
engineRegex := regexp.MustCompile(`engine:\s*["']([^"']+)["']`)
|
||||||
|
if matches := engineRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
options["engine"] = matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// isRelationField checks if a field is a relation field
|
||||||
|
func (r *Reader) isRelationField(field fieldInfo) bool {
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
if strings.Contains(decorator, "@ManyToOne") ||
|
||||||
|
strings.Contains(decorator, "@OneToMany") ||
|
||||||
|
strings.Contains(decorator, "@ManyToMany") ||
|
||||||
|
strings.Contains(decorator, "@OneToOne") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// fieldToColumn converts a field to a column
|
||||||
|
func (r *Reader) fieldToColumn(field fieldInfo, table *models.Table) *models.Column {
|
||||||
|
column := models.InitColumn(field.name, table.Name, table.Schema)
|
||||||
|
|
||||||
|
// Map TypeScript type to SQL type
|
||||||
|
column.Type = r.typeScriptTypeToSQL(field.typeName)
|
||||||
|
|
||||||
|
// Default to NOT NULL
|
||||||
|
column.NotNull = true
|
||||||
|
|
||||||
|
// Parse decorators
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
r.parseColumnDecorator(decorator, column, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
return column
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeScriptTypeToSQL converts TypeScript types to SQL types
|
||||||
|
func (r *Reader) typeScriptTypeToSQL(tsType string) string {
|
||||||
|
// Remove array brackets and optional markers
|
||||||
|
tsType = strings.TrimSuffix(tsType, "[]")
|
||||||
|
tsType = strings.TrimSuffix(tsType, " | null")
|
||||||
|
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"string": "text",
|
||||||
|
"number": "integer",
|
||||||
|
"boolean": "boolean",
|
||||||
|
"Date": "timestamp",
|
||||||
|
"any": "jsonb",
|
||||||
|
}
|
||||||
|
|
||||||
|
for tsPattern, sqlType := range typeMap {
|
||||||
|
if strings.Contains(tsType, tsPattern) {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to text
|
||||||
|
return "text"
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnDecorator parses a column decorator
|
||||||
|
func (r *Reader) parseColumnDecorator(decorator string, column *models.Column, table *models.Table) {
|
||||||
|
// @PrimaryGeneratedColumn
|
||||||
|
if strings.HasPrefix(decorator, "@PrimaryGeneratedColumn") {
|
||||||
|
column.IsPrimaryKey = true
|
||||||
|
column.NotNull = true
|
||||||
|
|
||||||
|
if strings.Contains(decorator, "'uuid'") {
|
||||||
|
column.Type = "uuid"
|
||||||
|
column.Default = "gen_random_uuid()"
|
||||||
|
} else if strings.Contains(decorator, "'increment'") || strings.Contains(decorator, "()") {
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// @Column
|
||||||
|
if strings.HasPrefix(decorator, "@Column") {
|
||||||
|
r.parseColumnOptions(decorator, column, table)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// @CreateDateColumn
|
||||||
|
if strings.HasPrefix(decorator, "@CreateDateColumn") {
|
||||||
|
column.Type = "timestamp"
|
||||||
|
column.Default = "now()"
|
||||||
|
column.NotNull = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// @UpdateDateColumn
|
||||||
|
if strings.HasPrefix(decorator, "@UpdateDateColumn") {
|
||||||
|
column.Type = "timestamp"
|
||||||
|
column.NotNull = true
|
||||||
|
if column.Comment != "" {
|
||||||
|
column.Comment += "; auto-update"
|
||||||
|
} else {
|
||||||
|
column.Comment = "auto-update"
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseColumnOptions parses @Column decorator options
|
||||||
|
func (r *Reader) parseColumnOptions(decorator string, column *models.Column, table *models.Table) {
|
||||||
|
// Extract content between parentheses
|
||||||
|
start := strings.Index(decorator, "(")
|
||||||
|
end := strings.LastIndex(decorator, ")")
|
||||||
|
|
||||||
|
if start == -1 || end == -1 || start >= end {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
content := decorator[start+1 : end]
|
||||||
|
|
||||||
|
// Check for shorthand type: @Column('text')
|
||||||
|
if strings.HasPrefix(content, "'") || strings.HasPrefix(content, "\"") {
|
||||||
|
typeStr := strings.Trim(content, "'\"`")
|
||||||
|
column.Type = typeStr
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse options object
|
||||||
|
if strings.Contains(content, "type:") {
|
||||||
|
typeRegex := regexp.MustCompile(`type:\s*['"]([^'"]+)['"]`)
|
||||||
|
if matches := typeRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
column.Type = matches[1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(content, "nullable: true") || strings.Contains(content, "nullable:true") {
|
||||||
|
column.NotNull = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(content, "unique: true") || strings.Contains(content, "unique:true") {
|
||||||
|
uniqueConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("uq_%s", column.Name),
|
||||||
|
models.UniqueConstraint,
|
||||||
|
)
|
||||||
|
uniqueConstraint.Schema = table.Schema
|
||||||
|
uniqueConstraint.Table = table.Name
|
||||||
|
uniqueConstraint.Columns = []string{column.Name}
|
||||||
|
table.Constraints[uniqueConstraint.Name] = uniqueConstraint
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(content, "default:") {
|
||||||
|
defaultRegex := regexp.MustCompile(`default:\s*['"]?([^,}'"]+)['"]?`)
|
||||||
|
if matches := defaultRegex.FindStringSubmatch(content); matches != nil {
|
||||||
|
defaultValue := strings.TrimSpace(matches[1])
|
||||||
|
defaultValue = strings.Trim(defaultValue, "'\"")
|
||||||
|
column.Default = defaultValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveRelationships resolves TypeORM relationships
|
||||||
|
func (r *Reader) resolveRelationships(entities []entityInfo, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// Track M2M relations that need join tables
|
||||||
|
type m2mRelation struct {
|
||||||
|
ownerEntity string
|
||||||
|
targetEntity string
|
||||||
|
ownerField string
|
||||||
|
}
|
||||||
|
m2mRelations := make([]m2mRelation, 0)
|
||||||
|
|
||||||
|
for _, entity := range entities {
|
||||||
|
table := tableMap[entity.name]
|
||||||
|
if table == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, field := range entity.fields {
|
||||||
|
// Handle @ManyToOne relations
|
||||||
|
if r.hasDecorator(field, "@ManyToOne") {
|
||||||
|
r.createManyToOneConstraint(field, entity.name, table, tableMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track @ManyToMany relations with @JoinTable
|
||||||
|
if r.hasDecorator(field, "@ManyToMany") && r.hasDecorator(field, "@JoinTable") {
|
||||||
|
targetEntity := r.extractRelationTarget(field)
|
||||||
|
if targetEntity != "" {
|
||||||
|
m2mRelations = append(m2mRelations, m2mRelation{
|
||||||
|
ownerEntity: entity.name,
|
||||||
|
targetEntity: targetEntity,
|
||||||
|
ownerField: field.name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join tables for M2M relations
|
||||||
|
for _, rel := range m2mRelations {
|
||||||
|
r.createManyToManyJoinTable(rel.ownerEntity, rel.targetEntity, tableMap, schema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasDecorator checks if a field has a specific decorator
|
||||||
|
func (r *Reader) hasDecorator(field fieldInfo, decoratorName string) bool {
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
if strings.HasPrefix(decorator, decoratorName) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractRelationTarget extracts the target entity from a relation decorator
|
||||||
|
func (r *Reader) extractRelationTarget(field fieldInfo) string {
|
||||||
|
// Remove array brackets from type
|
||||||
|
targetType := strings.TrimSuffix(field.typeName, "[]")
|
||||||
|
targetType = strings.TrimSpace(targetType)
|
||||||
|
return targetType
|
||||||
|
}
|
||||||
|
|
||||||
|
// createManyToOneConstraint creates a foreign key constraint for @ManyToOne
|
||||||
|
func (r *Reader) createManyToOneConstraint(field fieldInfo, entityName string, table *models.Table, tableMap map[string]*models.Table) {
|
||||||
|
targetEntity := r.extractRelationTarget(field)
|
||||||
|
if targetEntity == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get target table to find its PK
|
||||||
|
targetTable := tableMap[targetEntity]
|
||||||
|
if targetTable == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
targetPK := r.getPrimaryKeyColumn(targetTable)
|
||||||
|
if targetPK == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create FK column
|
||||||
|
fkColumnName := fmt.Sprintf("%sId", field.name)
|
||||||
|
fkColumn := models.InitColumn(fkColumnName, table.Name, table.Schema)
|
||||||
|
fkColumn.Type = targetPK.Type
|
||||||
|
|
||||||
|
// Check if nullable option is set in @ManyToOne decorator
|
||||||
|
isNullable := false
|
||||||
|
for _, decorator := range field.decorators {
|
||||||
|
if strings.Contains(decorator, "nullable: true") || strings.Contains(decorator, "nullable:true") {
|
||||||
|
isNullable = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fkColumn.NotNull = !isNullable
|
||||||
|
|
||||||
|
table.Columns[fkColumnName] = fkColumn
|
||||||
|
|
||||||
|
// Create FK constraint
|
||||||
|
constraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", entityName, field.name),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
constraint.Schema = table.Schema
|
||||||
|
constraint.Table = table.Name
|
||||||
|
constraint.Columns = []string{fkColumnName}
|
||||||
|
constraint.ReferencedSchema = "public"
|
||||||
|
constraint.ReferencedTable = targetEntity
|
||||||
|
constraint.ReferencedColumns = []string{targetPK.Name}
|
||||||
|
constraint.OnDelete = "CASCADE"
|
||||||
|
|
||||||
|
table.Constraints[constraint.Name] = constraint
|
||||||
|
}
|
||||||
|
|
||||||
|
// createManyToManyJoinTable creates a join table for M2M relations
|
||||||
|
func (r *Reader) createManyToManyJoinTable(entity1, entity2 string, tableMap map[string]*models.Table, schema *models.Schema) {
|
||||||
|
// TypeORM naming convention: entity1_entity2_entity1field
|
||||||
|
// We'll simplify to entity1_entity2
|
||||||
|
joinTableName := fmt.Sprintf("%s_%s", strings.ToLower(entity1), strings.ToLower(entity2))
|
||||||
|
|
||||||
|
// Check if join table already exists
|
||||||
|
if _, exists := tableMap[joinTableName]; exists {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get PKs from both tables
|
||||||
|
table1 := tableMap[entity1]
|
||||||
|
table2 := tableMap[entity2]
|
||||||
|
if table1 == nil || table2 == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
pk1 := r.getPrimaryKeyColumn(table1)
|
||||||
|
pk2 := r.getPrimaryKeyColumn(table2)
|
||||||
|
if pk1 == nil || pk2 == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create join table
|
||||||
|
joinTable := models.InitTable(joinTableName, "public")
|
||||||
|
|
||||||
|
// Create FK columns
|
||||||
|
fkCol1Name := fmt.Sprintf("%sId", strings.ToLower(entity1))
|
||||||
|
fkCol1 := models.InitColumn(fkCol1Name, joinTableName, "public")
|
||||||
|
fkCol1.Type = pk1.Type
|
||||||
|
fkCol1.NotNull = true
|
||||||
|
fkCol1.IsPrimaryKey = true
|
||||||
|
joinTable.Columns[fkCol1Name] = fkCol1
|
||||||
|
|
||||||
|
fkCol2Name := fmt.Sprintf("%sId", strings.ToLower(entity2))
|
||||||
|
fkCol2 := models.InitColumn(fkCol2Name, joinTableName, "public")
|
||||||
|
fkCol2.Type = pk2.Type
|
||||||
|
fkCol2.NotNull = true
|
||||||
|
fkCol2.IsPrimaryKey = true
|
||||||
|
joinTable.Columns[fkCol2Name] = fkCol2
|
||||||
|
|
||||||
|
// Create composite PK constraint
|
||||||
|
pkConstraint := models.InitConstraint(
|
||||||
|
fmt.Sprintf("pk_%s", joinTableName),
|
||||||
|
models.PrimaryKeyConstraint,
|
||||||
|
)
|
||||||
|
pkConstraint.Schema = "public"
|
||||||
|
pkConstraint.Table = joinTableName
|
||||||
|
pkConstraint.Columns = []string{fkCol1Name, fkCol2Name}
|
||||||
|
joinTable.Constraints[pkConstraint.Name] = pkConstraint
|
||||||
|
|
||||||
|
// Create FK constraints
|
||||||
|
fk1 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, entity1),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk1.Schema = "public"
|
||||||
|
fk1.Table = joinTableName
|
||||||
|
fk1.Columns = []string{fkCol1Name}
|
||||||
|
fk1.ReferencedSchema = "public"
|
||||||
|
fk1.ReferencedTable = entity1
|
||||||
|
fk1.ReferencedColumns = []string{pk1.Name}
|
||||||
|
fk1.OnDelete = "CASCADE"
|
||||||
|
joinTable.Constraints[fk1.Name] = fk1
|
||||||
|
|
||||||
|
fk2 := models.InitConstraint(
|
||||||
|
fmt.Sprintf("fk_%s_%s", joinTableName, entity2),
|
||||||
|
models.ForeignKeyConstraint,
|
||||||
|
)
|
||||||
|
fk2.Schema = "public"
|
||||||
|
fk2.Table = joinTableName
|
||||||
|
fk2.Columns = []string{fkCol2Name}
|
||||||
|
fk2.ReferencedSchema = "public"
|
||||||
|
fk2.ReferencedTable = entity2
|
||||||
|
fk2.ReferencedColumns = []string{pk2.Name}
|
||||||
|
fk2.OnDelete = "CASCADE"
|
||||||
|
joinTable.Constraints[fk2.Name] = fk2
|
||||||
|
|
||||||
|
// Add join table to schema
|
||||||
|
schema.Tables = append(schema.Tables, joinTable)
|
||||||
|
tableMap[joinTableName] = joinTable
|
||||||
|
}
|
||||||
|
|
||||||
|
// getPrimaryKeyColumn returns the primary key column of a table
|
||||||
|
func (r *Reader) getPrimaryKeyColumn(table *models.Table) *models.Column {
|
||||||
|
if table == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
return col
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
159
pkg/readers/yaml/README.md
Normal file
159
pkg/readers/yaml/README.md
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
# YAML Reader
|
||||||
|
|
||||||
|
Reads database schema definitions from YAML files.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The YAML Reader parses YAML files that define database schemas in RelSpec's canonical YAML format and converts them into RelSpec's internal database model representation.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Reads RelSpec's standard YAML schema format
|
||||||
|
- Human-readable alternative to JSON format
|
||||||
|
- Supports complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &readers.ReaderOptions{
|
||||||
|
FilePath: "/path/to/schema.yaml",
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := yaml.NewReader(options)
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Found %d schemas\n", len(db.Schemas))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read YAML schema and convert to GORM models
|
||||||
|
relspec --input yaml --in-file schema.yaml --output gorm --out-file models.go
|
||||||
|
|
||||||
|
# Convert YAML to PostgreSQL DDL
|
||||||
|
relspec --input yaml --in-file database.yaml --output pgsql --out-file schema.sql
|
||||||
|
|
||||||
|
# Transform YAML to JSON
|
||||||
|
relspec --input yaml --in-file schema.yaml --output json --out-file schema.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example YAML Schema
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: myapp
|
||||||
|
database_type: postgresql
|
||||||
|
schemas:
|
||||||
|
- name: public
|
||||||
|
tables:
|
||||||
|
- name: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
auto_increment: true
|
||||||
|
sequence: 1
|
||||||
|
username:
|
||||||
|
name: username
|
||||||
|
type: varchar
|
||||||
|
length: 50
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
email:
|
||||||
|
name: email
|
||||||
|
type: varchar
|
||||||
|
length: 100
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
constraints:
|
||||||
|
pk_users:
|
||||||
|
name: pk_users
|
||||||
|
type: PRIMARY KEY
|
||||||
|
columns:
|
||||||
|
- id
|
||||||
|
uq_users_username:
|
||||||
|
name: uq_users_username
|
||||||
|
type: UNIQUE
|
||||||
|
columns:
|
||||||
|
- username
|
||||||
|
indexes:
|
||||||
|
idx_users_email:
|
||||||
|
name: idx_users_email
|
||||||
|
columns:
|
||||||
|
- email
|
||||||
|
unique: false
|
||||||
|
type: btree
|
||||||
|
- name: posts
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
sequence: 1
|
||||||
|
user_id:
|
||||||
|
name: user_id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
title:
|
||||||
|
name: title
|
||||||
|
type: varchar
|
||||||
|
length: 200
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
constraints:
|
||||||
|
fk_posts_user_id:
|
||||||
|
name: fk_posts_user_id
|
||||||
|
type: FOREIGN KEY
|
||||||
|
columns:
|
||||||
|
- user_id
|
||||||
|
referenced_table: users
|
||||||
|
referenced_schema: public
|
||||||
|
referenced_columns:
|
||||||
|
- id
|
||||||
|
on_delete: CASCADE
|
||||||
|
on_update: NO ACTION
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The YAML format mirrors RelSpec's internal model structure with human-readable syntax:
|
||||||
|
|
||||||
|
- Database level: `name`, `database_type`, `schemas`
|
||||||
|
- Schema level: `name`, `tables`, `views`, `sequences`
|
||||||
|
- Table level: `name`, `schema`, `columns`, `constraints`, `indexes`, `relationships`
|
||||||
|
- Column level: `name`, `type`, `length`, `not_null`, `default`, etc.
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- YAML format is more human-readable than JSON
|
||||||
|
- Ideal for manual editing and version control
|
||||||
|
- Comments are supported in YAML
|
||||||
|
- Preserves complete schema information
|
||||||
|
- Can be used for configuration and documentation
|
||||||
129
pkg/writers/bun/README.md
Normal file
129
pkg/writers/bun/README.md
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
# Bun Writer
|
||||||
|
|
||||||
|
Generates Go source files with Bun model definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Bun Writer converts RelSpec's internal database model representation into Go source code with Bun struct definitions, complete with proper tags, relationships, and table configuration.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates Bun-compatible Go structs
|
||||||
|
- Creates proper `bun` struct tags
|
||||||
|
- Adds relationship fields
|
||||||
|
- Supports both single-file and multi-file output
|
||||||
|
- Maps SQL types to Go types
|
||||||
|
- Handles nullable fields with sql.Null* types
|
||||||
|
- Generates table aliases
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "models.go",
|
||||||
|
PackageName: "models",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := bun.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate Bun models from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output bun \
|
||||||
|
--out-file models.go \
|
||||||
|
--package models
|
||||||
|
|
||||||
|
# Convert GORM models to Bun
|
||||||
|
relspec --input gorm --in-file gorm_models.go --output bun --out-file bun_models.go
|
||||||
|
|
||||||
|
# Multi-file output
|
||||||
|
relspec --input json --in-file schema.json --output bun --out-file models/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement" json:"id"`
|
||||||
|
Username string `bun:"username,notnull,unique" json:"username"`
|
||||||
|
Email string `bun:"email,notnull" json:"email"`
|
||||||
|
Bio sql.NullString `bun:"bio" json:"bio,omitempty"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull,default:now()" json:"created_at"`
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
Posts []*Post `bun:"rel:has-many,join:id=user_id" json:"posts,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post struct {
|
||||||
|
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk" json:"id"`
|
||||||
|
UserID int64 `bun:"user_id,notnull" json:"user_id"`
|
||||||
|
Title string `bun:"title,notnull" json:"title"`
|
||||||
|
Content sql.NullString `bun:"content" json:"content,omitempty"`
|
||||||
|
|
||||||
|
// Belongs to
|
||||||
|
User *User `bun:"rel:belongs-to,join:user_id=id" json:"user,omitempty"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Bun Tags
|
||||||
|
|
||||||
|
- `table` - Table name and alias
|
||||||
|
- `column` - Column name (auto-derived if not specified)
|
||||||
|
- `pk` - Primary key
|
||||||
|
- `autoincrement` - Auto-increment
|
||||||
|
- `notnull` - NOT NULL constraint
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `default` - Default value
|
||||||
|
- `rel` - Relationship definition
|
||||||
|
- `type` - Explicit SQL type
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | Go Type | Nullable Type |
|
||||||
|
|----------|---------|---------------|
|
||||||
|
| bigint | int64 | sql.NullInt64 |
|
||||||
|
| integer | int | sql.NullInt32 |
|
||||||
|
| varchar, text | string | sql.NullString |
|
||||||
|
| boolean | bool | sql.NullBool |
|
||||||
|
| timestamp | time.Time | sql.NullTime |
|
||||||
|
| numeric | float64 | sql.NullFloat64 |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Model names are derived from table names (singularized, PascalCase)
|
||||||
|
- Table aliases are auto-generated from table names
|
||||||
|
- Multi-file mode: one file per table named `sql_{schema}_{table}.go`
|
||||||
|
- Generated code is auto-formatted
|
||||||
|
- JSON tags are automatically added
|
||||||
@@ -41,12 +41,7 @@ func NewWriter(options *writers.WriterOptions) *Writer {
|
|||||||
// WriteDatabase writes a complete database as Bun models
|
// WriteDatabase writes a complete database as Bun models
|
||||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
// Check if multi-file mode is enabled
|
// Check if multi-file mode is enabled
|
||||||
multiFile := false
|
multiFile := w.shouldUseMultiFile()
|
||||||
if w.options.Metadata != nil {
|
|
||||||
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
|
||||||
multiFile = mf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if multiFile {
|
if multiFile {
|
||||||
return w.writeMultiFile(db)
|
return w.writeMultiFile(db)
|
||||||
@@ -346,6 +341,41 @@ func (w *Writer) writeOutput(content string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shouldUseMultiFile determines whether to use multi-file mode based on metadata or output path
|
||||||
|
func (w *Writer) shouldUseMultiFile() bool {
|
||||||
|
// Check if multi_file is explicitly set in metadata
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||||
|
return mf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-detect based on output path
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
// No output path means stdout (single file)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with .go (explicit file)
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, ".go") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with directory separator
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, "/") || strings.HasSuffix(w.options.OutputPath, "\\") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path exists and is a directory
|
||||||
|
info, err := os.Stat(w.options.OutputPath)
|
||||||
|
if err == nil && info.IsDir() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to single file for ambiguous cases
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
||||||
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
||||||
return &models.Database{
|
return &models.Database{
|
||||||
|
|||||||
161
pkg/writers/dbml/README.md
Normal file
161
pkg/writers/dbml/README.md
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
# DBML Writer
|
||||||
|
|
||||||
|
Generates Database Markup Language (DBML) files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DBML Writer converts RelSpec's internal database model representation into DBML syntax, suitable for use with dbdiagram.io and other DBML-compatible tools.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates DBML syntax
|
||||||
|
- Creates table definitions with columns
|
||||||
|
- Defines relationships
|
||||||
|
- Includes indexes
|
||||||
|
- Adds notes and documentation
|
||||||
|
- Supports enums
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.dbml",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := dbml.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate DBML from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output dbml \
|
||||||
|
--out-file schema.dbml
|
||||||
|
|
||||||
|
# Convert GORM models to DBML
|
||||||
|
relspec --input gorm --in-file models.go --output dbml --out-file database.dbml
|
||||||
|
|
||||||
|
# Convert JSON to DBML for visualization
|
||||||
|
relspec --input json --in-file schema.json --output dbml --out-file diagram.dbml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated DBML Example
|
||||||
|
|
||||||
|
```dbml
|
||||||
|
Project MyDatabase {
|
||||||
|
database_type: 'PostgreSQL'
|
||||||
|
}
|
||||||
|
|
||||||
|
Table users {
|
||||||
|
id bigserial [pk, increment]
|
||||||
|
username varchar(50) [not null, unique]
|
||||||
|
email varchar(100) [not null]
|
||||||
|
bio text [null]
|
||||||
|
created_at timestamp [not null, default: `now()`]
|
||||||
|
|
||||||
|
Note: 'Users table'
|
||||||
|
|
||||||
|
indexes {
|
||||||
|
email [name: 'idx_users_email']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Table posts {
|
||||||
|
id bigserial [pk, increment]
|
||||||
|
user_id bigint [not null]
|
||||||
|
title varchar(200) [not null]
|
||||||
|
content text [null]
|
||||||
|
created_at timestamp [default: `now()`]
|
||||||
|
|
||||||
|
indexes {
|
||||||
|
user_id [name: 'idx_posts_user_id']
|
||||||
|
(user_id, created_at) [name: 'idx_posts_user_created']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ref: posts.user_id > users.id [delete: cascade, update: no action]
|
||||||
|
```
|
||||||
|
|
||||||
|
## DBML Features
|
||||||
|
|
||||||
|
### Table Definitions
|
||||||
|
```dbml
|
||||||
|
Table table_name {
|
||||||
|
column_name type [attributes]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Column Attributes
|
||||||
|
- `pk` - Primary key
|
||||||
|
- `increment` - Auto-increment
|
||||||
|
- `not null` - NOT NULL constraint
|
||||||
|
- `null` - Nullable (explicit)
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `default: value` - Default value
|
||||||
|
- `note: 'text'` - Column note
|
||||||
|
|
||||||
|
### Relationships
|
||||||
|
```dbml
|
||||||
|
Ref: table1.column > table2.column
|
||||||
|
Ref: table1.column < table2.column
|
||||||
|
Ref: table1.column - table2.column
|
||||||
|
```
|
||||||
|
|
||||||
|
Relationship types:
|
||||||
|
- `>` - Many-to-one
|
||||||
|
- `<` - One-to-many
|
||||||
|
- `-` - One-to-one
|
||||||
|
|
||||||
|
Relationship actions:
|
||||||
|
```dbml
|
||||||
|
Ref: posts.user_id > users.id [delete: cascade, update: restrict]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Indexes
|
||||||
|
```dbml
|
||||||
|
indexes {
|
||||||
|
column_name
|
||||||
|
(column1, column2) [name: 'idx_name', unique]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | DBML Type |
|
||||||
|
|----------|-----------|
|
||||||
|
| bigint | bigint |
|
||||||
|
| integer | int |
|
||||||
|
| varchar(n) | varchar(n) |
|
||||||
|
| text | text |
|
||||||
|
| boolean | boolean |
|
||||||
|
| timestamp | timestamp |
|
||||||
|
| date | date |
|
||||||
|
| json | json |
|
||||||
|
| uuid | uuid |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DBML is designed for database visualization
|
||||||
|
- Can be imported into dbdiagram.io
|
||||||
|
- Human-readable format
|
||||||
|
- Schema names can be included in table names
|
||||||
|
- Comments and notes are preserved
|
||||||
|
- Ideal for documentation and sharing designs
|
||||||
111
pkg/writers/dctx/README.md
Normal file
111
pkg/writers/dctx/README.md
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# DCTX Writer
|
||||||
|
|
||||||
|
Generates Clarion database dictionary (DCTX) files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DCTX Writer converts RelSpec's internal database model representation into Clarion dictionary XML format, used by the Clarion development platform.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates DCTX XML format
|
||||||
|
- Creates file (table) definitions
|
||||||
|
- Defines fields (columns) with Clarion types
|
||||||
|
- Includes keys (indexes)
|
||||||
|
- Handles relationships
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "database.dctx",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := dctx.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate DCTX from PostgreSQL database (for Clarion migration)
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output dctx \
|
||||||
|
--out-file app.dctx
|
||||||
|
|
||||||
|
# Convert GORM models to DCTX
|
||||||
|
relspec --input gorm --in-file models.go --output dctx --out-file legacy.dctx
|
||||||
|
|
||||||
|
# Convert JSON schema to DCTX
|
||||||
|
relspec --input json --in-file schema.json --output dctx --out-file database.dctx
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
Converts standard SQL types to Clarion types:
|
||||||
|
|
||||||
|
| SQL Type | Clarion Type | Notes |
|
||||||
|
|----------|--------------|-------|
|
||||||
|
| VARCHAR(n) | STRING(n) | Fixed-length string |
|
||||||
|
| TEXT | STRING | Variable length |
|
||||||
|
| INTEGER | LONG | 32-bit integer |
|
||||||
|
| BIGINT | DECIMAL(20,0) | Large integer |
|
||||||
|
| SMALLINT | SHORT | 16-bit integer |
|
||||||
|
| NUMERIC(p,s) | DECIMAL(p,s) | Decimal number |
|
||||||
|
| REAL, FLOAT | REAL | Floating point |
|
||||||
|
| BOOLEAN | BYTE | 0/1 value |
|
||||||
|
| DATE | DATE | Date field |
|
||||||
|
| TIME | TIME | Time field |
|
||||||
|
| TIMESTAMP | LONG | Unix timestamp |
|
||||||
|
|
||||||
|
## DCTX Structure
|
||||||
|
|
||||||
|
DCTX files are XML-based with this structure:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<dictionary>
|
||||||
|
<file name="USERS" driver="TOPSPEED">
|
||||||
|
<record>
|
||||||
|
<field name="ID" type="LONG" />
|
||||||
|
<field name="USERNAME" type="STRING" bytes="50" />
|
||||||
|
<field name="EMAIL" type="STRING" bytes="100" />
|
||||||
|
</record>
|
||||||
|
<key name="KEY_PRIMARY" primary="true">
|
||||||
|
<field name="ID" />
|
||||||
|
</key>
|
||||||
|
</file>
|
||||||
|
</dictionary>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- File definitions (equivalent to tables)
|
||||||
|
- Field definitions with Clarion-specific types
|
||||||
|
- Key definitions (primary and foreign)
|
||||||
|
- Relationships between files
|
||||||
|
- Driver specifications (TOPSPEED, SQL, etc.)
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DCTX is specific to Clarion development
|
||||||
|
- Useful for legacy system integration
|
||||||
|
- Field names are typically uppercase in Clarion
|
||||||
|
- Supports Clarion-specific attributes
|
||||||
|
- Can be imported into Clarion IDE
|
||||||
182
pkg/writers/drawdb/README.md
Normal file
182
pkg/writers/drawdb/README.md
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
# DrawDB Writer
|
||||||
|
|
||||||
|
Generates DrawDB-compatible JSON files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The DrawDB Writer converts RelSpec's internal database model representation into JSON format compatible with DrawDB, a free online database design tool.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates DrawDB JSON format
|
||||||
|
- Creates table and field definitions
|
||||||
|
- Defines relationships
|
||||||
|
- Includes visual layout information
|
||||||
|
- Preserves constraints and indexes
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "diagram.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := drawdb.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate DrawDB diagram from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output drawdb \
|
||||||
|
--out-file diagram.json
|
||||||
|
|
||||||
|
# Convert GORM models to DrawDB for visualization
|
||||||
|
relspec --input gorm --in-file models.go --output drawdb --out-file design.json
|
||||||
|
|
||||||
|
# Convert JSON schema to DrawDB
|
||||||
|
relspec --input json --in-file schema.json --output drawdb --out-file diagram.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated JSON Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "1.0",
|
||||||
|
"database": "PostgreSQL",
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "users",
|
||||||
|
"x": 100,
|
||||||
|
"y": 100,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"primary": true,
|
||||||
|
"autoIncrement": true,
|
||||||
|
"notNull": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"name": "username",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 50,
|
||||||
|
"notNull": true,
|
||||||
|
"unique": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "3",
|
||||||
|
"name": "email",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 100,
|
||||||
|
"notNull": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"indexes": [
|
||||||
|
{
|
||||||
|
"name": "idx_users_email",
|
||||||
|
"fields": ["email"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"name": "posts",
|
||||||
|
"x": 400,
|
||||||
|
"y": 100,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"primary": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"name": "user_id",
|
||||||
|
"type": "BIGINT",
|
||||||
|
"notNull": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "3",
|
||||||
|
"name": "title",
|
||||||
|
"type": "VARCHAR",
|
||||||
|
"size": 200,
|
||||||
|
"notNull": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"relationships": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"source": "2",
|
||||||
|
"target": "1",
|
||||||
|
"sourceField": "user_id",
|
||||||
|
"targetField": "id",
|
||||||
|
"type": "many-to-one",
|
||||||
|
"onDelete": "CASCADE"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## DrawDB Features
|
||||||
|
|
||||||
|
### Table Properties
|
||||||
|
- `id` - Unique table identifier
|
||||||
|
- `name` - Table name
|
||||||
|
- `x`, `y` - Position in diagram
|
||||||
|
- `fields` - Array of field definitions
|
||||||
|
- `indexes` - Array of index definitions
|
||||||
|
|
||||||
|
### Field Properties
|
||||||
|
- `id` - Unique field identifier
|
||||||
|
- `name` - Field name
|
||||||
|
- `type` - Data type (BIGINT, VARCHAR, etc.)
|
||||||
|
- `size` - Length for string types
|
||||||
|
- `primary` - Primary key flag
|
||||||
|
- `notNull` - NOT NULL constraint
|
||||||
|
- `unique` - Unique constraint
|
||||||
|
- `autoIncrement` - Auto-increment flag
|
||||||
|
- `default` - Default value
|
||||||
|
|
||||||
|
### Relationship Properties
|
||||||
|
- `id` - Unique relationship identifier
|
||||||
|
- `source` - Source table ID
|
||||||
|
- `target` - Target table ID
|
||||||
|
- `sourceField` - Foreign key field
|
||||||
|
- `targetField` - Referenced field
|
||||||
|
- `type` - Relationship type (one-to-one, one-to-many, many-to-one)
|
||||||
|
- `onDelete` - Delete action
|
||||||
|
- `onUpdate` - Update action
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- DrawDB is available at drawdb.vercel.app
|
||||||
|
- Generated files can be imported for visual editing
|
||||||
|
- Visual positions (x, y) are auto-generated
|
||||||
|
- Ideal for creating ERD diagrams
|
||||||
|
- Supports modern database features
|
||||||
|
- Free and open-source tool
|
||||||
120
pkg/writers/drizzle/README.md
Normal file
120
pkg/writers/drizzle/README.md
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
# Drizzle Writer
|
||||||
|
|
||||||
|
Generates TypeScript/JavaScript files with Drizzle ORM schema definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Drizzle Writer converts RelSpec's internal database model representation into TypeScript source code with Drizzle ORM schema definitions, including tables, columns, relationships, and constraints.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates Drizzle-compatible TypeScript schema
|
||||||
|
- Supports PostgreSQL and MySQL schemas
|
||||||
|
- Creates table definitions with proper column types
|
||||||
|
- Generates relationship definitions
|
||||||
|
- Handles constraints and indexes
|
||||||
|
- Outputs formatted TypeScript code
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.ts",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"database_type": "postgresql", // or "mysql"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := drizzle.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate Drizzle schema from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output drizzle \
|
||||||
|
--out-file schema.ts
|
||||||
|
|
||||||
|
# Convert GORM models to Drizzle
|
||||||
|
relspec --input gorm --in-file models.go --output drizzle --out-file schema.ts
|
||||||
|
|
||||||
|
# Convert JSON schema to Drizzle
|
||||||
|
relspec --input json --in-file schema.json --output drizzle --out-file db/schema.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { pgTable, serial, varchar, text, timestamp, integer } from 'drizzle-orm/pg-core';
|
||||||
|
import { relations } from 'drizzle-orm';
|
||||||
|
|
||||||
|
export const users = pgTable('users', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
username: varchar('username', { length: 50 }).notNull().unique(),
|
||||||
|
email: varchar('email', { length: 100 }).notNull(),
|
||||||
|
bio: text('bio'),
|
||||||
|
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const posts = pgTable('posts', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }),
|
||||||
|
title: varchar('title', { length: 200 }).notNull(),
|
||||||
|
content: text('content'),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const usersRelations = relations(users, ({ many }) => ({
|
||||||
|
posts: many(posts),
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const postsRelations = relations(posts, ({ one }) => ({
|
||||||
|
user: one(users, {
|
||||||
|
fields: [posts.userId],
|
||||||
|
references: [users.id],
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Column Types
|
||||||
|
|
||||||
|
### PostgreSQL
|
||||||
|
- `serial`, `bigserial` - Auto-increment integers
|
||||||
|
- `integer`, `bigint`, `smallint` - Integer types
|
||||||
|
- `varchar`, `text` - String types
|
||||||
|
- `boolean` - Boolean
|
||||||
|
- `timestamp`, `date`, `time` - Date/time types
|
||||||
|
- `json`, `jsonb` - JSON types
|
||||||
|
- `uuid` - UUID type
|
||||||
|
|
||||||
|
### MySQL
|
||||||
|
- `int`, `bigint`, `smallint` - Integer types
|
||||||
|
- `varchar`, `text` - String types
|
||||||
|
- `boolean` - Boolean
|
||||||
|
- `datetime`, `timestamp` - Date/time types
|
||||||
|
- `json` - JSON type
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Table names and column names are preserved as-is
|
||||||
|
- Relationships are generated as separate relation definitions
|
||||||
|
- Constraint actions (CASCADE, etc.) are included in references
|
||||||
|
- Schema names other than 'public' are supported
|
||||||
|
- Output is formatted TypeScript code
|
||||||
221
pkg/writers/drizzle/template_data.go
Normal file
221
pkg/writers/drizzle/template_data.go
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TemplateData represents the data passed to the template for code generation
|
||||||
|
type TemplateData struct {
|
||||||
|
Imports []string
|
||||||
|
Enums []*EnumData
|
||||||
|
Tables []*TableData
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnumData represents an enum in the schema
|
||||||
|
type EnumData struct {
|
||||||
|
Name string // Enum name (PascalCase)
|
||||||
|
VarName string // Variable name for the enum (camelCase)
|
||||||
|
Values []string // Enum values
|
||||||
|
ValuesStr string // Comma-separated quoted values for pgEnum()
|
||||||
|
TypeUnion string // TypeScript union type (e.g., "'admin' | 'user' | 'guest'")
|
||||||
|
SchemaName string // Schema name
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableData represents a table in the template
|
||||||
|
type TableData struct {
|
||||||
|
Name string // Table variable name (camelCase, e.g., users)
|
||||||
|
TableName string // Actual database table name (e.g., users)
|
||||||
|
TypeName string // TypeScript type name (PascalCase, e.g., Users)
|
||||||
|
Columns []*ColumnData // Column definitions
|
||||||
|
Indexes []*IndexData // Index definitions
|
||||||
|
Comment string // Table comment
|
||||||
|
SchemaName string // Schema name
|
||||||
|
NeedsSQLTag bool // Whether we need to import 'sql' from drizzle-orm
|
||||||
|
IndexColumnFields []string // Column field names used in indexes (for destructuring)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColumnData represents a column in a table
|
||||||
|
type ColumnData struct {
|
||||||
|
Name string // Column name in database
|
||||||
|
FieldName string // Field name in TypeScript (camelCase)
|
||||||
|
DrizzleChain string // Complete Drizzle column chain (e.g., "integer('id').primaryKey()")
|
||||||
|
TypeScriptType string // TypeScript type for interface (e.g., "string", "number | null")
|
||||||
|
IsForeignKey bool // Whether this is a foreign key
|
||||||
|
ReferencesLine string // The .references() line if FK
|
||||||
|
Comment string // Column comment
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexData represents an index definition
|
||||||
|
type IndexData struct {
|
||||||
|
Name string // Index name
|
||||||
|
Columns []string // Column names
|
||||||
|
IsUnique bool // Whether it's a unique index
|
||||||
|
Definition string // Complete index definition line
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTemplateData creates a new TemplateData
|
||||||
|
func NewTemplateData() *TemplateData {
|
||||||
|
return &TemplateData{
|
||||||
|
Imports: make([]string, 0),
|
||||||
|
Enums: make([]*EnumData, 0),
|
||||||
|
Tables: make([]*TableData, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddImport adds an import to the template data (deduplicates automatically)
|
||||||
|
func (td *TemplateData) AddImport(importLine string) {
|
||||||
|
// Check if already exists
|
||||||
|
for _, imp := range td.Imports {
|
||||||
|
if imp == importLine {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
td.Imports = append(td.Imports, importLine)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddEnum adds an enum to the template data
|
||||||
|
func (td *TemplateData) AddEnum(enum *EnumData) {
|
||||||
|
td.Enums = append(td.Enums, enum)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTable adds a table to the template data
|
||||||
|
func (td *TemplateData) AddTable(table *TableData) {
|
||||||
|
td.Tables = append(td.Tables, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FinalizeImports sorts imports
|
||||||
|
func (td *TemplateData) FinalizeImports() {
|
||||||
|
sort.Strings(td.Imports)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEnumData creates EnumData from a models.Enum
|
||||||
|
func NewEnumData(enum *models.Enum, tm *TypeMapper) *EnumData {
|
||||||
|
// Keep enum name as-is (it should already be PascalCase from the source)
|
||||||
|
enumName := enum.Name
|
||||||
|
// Variable name is camelCase version
|
||||||
|
varName := tm.ToCamelCase(enum.Name)
|
||||||
|
|
||||||
|
// Format values as comma-separated quoted strings for pgEnum()
|
||||||
|
quotedValues := make([]string, len(enum.Values))
|
||||||
|
for i, v := range enum.Values {
|
||||||
|
quotedValues[i] = "'" + v + "'"
|
||||||
|
}
|
||||||
|
valuesStr := ""
|
||||||
|
for i, qv := range quotedValues {
|
||||||
|
if i > 0 {
|
||||||
|
valuesStr += ", "
|
||||||
|
}
|
||||||
|
valuesStr += qv
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build TypeScript union type (e.g., "'admin' | 'user' | 'guest'")
|
||||||
|
typeUnion := ""
|
||||||
|
for i, qv := range quotedValues {
|
||||||
|
if i > 0 {
|
||||||
|
typeUnion += " | "
|
||||||
|
}
|
||||||
|
typeUnion += qv
|
||||||
|
}
|
||||||
|
|
||||||
|
return &EnumData{
|
||||||
|
Name: enumName,
|
||||||
|
VarName: varName,
|
||||||
|
Values: enum.Values,
|
||||||
|
ValuesStr: valuesStr,
|
||||||
|
TypeUnion: typeUnion,
|
||||||
|
SchemaName: enum.Schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTableData creates TableData from a models.Table
|
||||||
|
func NewTableData(table *models.Table, tm *TypeMapper) *TableData {
|
||||||
|
tableName := tm.ToCamelCase(table.Name)
|
||||||
|
typeName := tm.ToPascalCase(table.Name)
|
||||||
|
|
||||||
|
return &TableData{
|
||||||
|
Name: tableName,
|
||||||
|
TableName: table.Name,
|
||||||
|
TypeName: typeName,
|
||||||
|
Columns: make([]*ColumnData, 0),
|
||||||
|
Indexes: make([]*IndexData, 0),
|
||||||
|
Comment: formatComment(table.Description, table.Comment),
|
||||||
|
SchemaName: table.Schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddColumn adds a column to the table data
|
||||||
|
func (td *TableData) AddColumn(col *ColumnData) {
|
||||||
|
td.Columns = append(td.Columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddIndex adds an index to the table data
|
||||||
|
func (td *TableData) AddIndex(idx *IndexData) {
|
||||||
|
td.Indexes = append(td.Indexes, idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewColumnData creates ColumnData from a models.Column
|
||||||
|
func NewColumnData(col *models.Column, table *models.Table, tm *TypeMapper, isEnum bool) *ColumnData {
|
||||||
|
fieldName := tm.ToCamelCase(col.Name)
|
||||||
|
drizzleChain := tm.BuildColumnChain(col, table, isEnum)
|
||||||
|
|
||||||
|
return &ColumnData{
|
||||||
|
Name: col.Name,
|
||||||
|
FieldName: fieldName,
|
||||||
|
DrizzleChain: drizzleChain,
|
||||||
|
Comment: formatComment(col.Description, col.Comment),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewIndexData creates IndexData from a models.Index
|
||||||
|
func NewIndexData(index *models.Index, tableVar string, tm *TypeMapper) *IndexData {
|
||||||
|
indexName := tm.ToCamelCase(index.Name) + "Idx"
|
||||||
|
|
||||||
|
// Build column references as field names (will be used with destructuring)
|
||||||
|
colRefs := make([]string, len(index.Columns))
|
||||||
|
for i, colName := range index.Columns {
|
||||||
|
// Use just the field name for destructured parameters
|
||||||
|
colRefs[i] = tm.ToCamelCase(colName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the complete definition
|
||||||
|
// Example: index('email_idx').on(email)
|
||||||
|
// or: uniqueIndex('unique_email_idx').on(email)
|
||||||
|
definition := ""
|
||||||
|
if index.Unique {
|
||||||
|
definition = "uniqueIndex('" + index.Name + "').on(" + joinStrings(colRefs, ", ") + ")"
|
||||||
|
} else {
|
||||||
|
definition = "index('" + index.Name + "').on(" + joinStrings(colRefs, ", ") + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
return &IndexData{
|
||||||
|
Name: indexName,
|
||||||
|
Columns: index.Columns,
|
||||||
|
IsUnique: index.Unique,
|
||||||
|
Definition: definition,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatComment combines description and comment into a single comment string
|
||||||
|
func formatComment(description, comment string) string {
|
||||||
|
if description != "" && comment != "" {
|
||||||
|
return description + " - " + comment
|
||||||
|
}
|
||||||
|
if description != "" {
|
||||||
|
return description
|
||||||
|
}
|
||||||
|
return comment
|
||||||
|
}
|
||||||
|
|
||||||
|
// joinStrings joins a slice of strings with a separator
|
||||||
|
func joinStrings(strs []string, sep string) string {
|
||||||
|
result := ""
|
||||||
|
for i, s := range strs {
|
||||||
|
if i > 0 {
|
||||||
|
result += sep
|
||||||
|
}
|
||||||
|
result += s
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
64
pkg/writers/drizzle/templates.go
Normal file
64
pkg/writers/drizzle/templates.go
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
// schemaTemplate defines the template for generating Drizzle schemas
|
||||||
|
const schemaTemplate = `// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
{{range .Imports}}{{.}}
|
||||||
|
{{end}}
|
||||||
|
{{if .Enums}}
|
||||||
|
// Enums
|
||||||
|
{{range .Enums}}export const {{.VarName}} = pgEnum('{{.Name}}', [{{.ValuesStr}}]);
|
||||||
|
export type {{.Name}} = {{.TypeUnion}};
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
{{range .Tables}}// Table: {{.TableName}}{{if .Comment}} - {{.Comment}}{{end}}
|
||||||
|
export interface {{.TypeName}} {
|
||||||
|
{{- range $i, $col := .Columns}}
|
||||||
|
{{$col.FieldName}}: {{$col.TypeScriptType}};{{if $col.Comment}} // {{$col.Comment}}{{end}}
|
||||||
|
{{- end}}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const {{.Name}} = pgTable('{{.TableName}}', {
|
||||||
|
{{- range $i, $col := .Columns}}
|
||||||
|
{{$col.FieldName}}: {{$col.DrizzleChain}},{{if $col.Comment}} // {{$col.Comment}}{{end}}
|
||||||
|
{{- end}}
|
||||||
|
}{{if .Indexes}}{{if .IndexColumnFields}}, ({ {{range $i, $field := .IndexColumnFields}}{{if $i}}, {{end}}{{$field}}{{end}} }) => [{{else}}, (table) => [{{end}}
|
||||||
|
{{- range $i, $idx := .Indexes}}
|
||||||
|
{{$idx.Definition}},
|
||||||
|
{{- end}}
|
||||||
|
]{{end}});
|
||||||
|
|
||||||
|
export type New{{.TypeName}} = typeof {{.Name}}.$inferInsert;
|
||||||
|
{{end}}`
|
||||||
|
|
||||||
|
// Templates holds the parsed templates
|
||||||
|
type Templates struct {
|
||||||
|
schemaTmpl *template.Template
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTemplates creates and parses the templates
|
||||||
|
func NewTemplates() (*Templates, error) {
|
||||||
|
schemaTmpl, err := template.New("schema").Parse(schemaTemplate)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Templates{
|
||||||
|
schemaTmpl: schemaTmpl,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateCode executes the template with the given data
|
||||||
|
func (t *Templates) GenerateCode(data *TemplateData) (string, error) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := t.schemaTmpl.Execute(&buf, data)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String(), nil
|
||||||
|
}
|
||||||
318
pkg/writers/drizzle/type_mapper.go
Normal file
318
pkg/writers/drizzle/type_mapper.go
Normal file
@@ -0,0 +1,318 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TypeMapper handles SQL to Drizzle type conversions
|
||||||
|
type TypeMapper struct{}
|
||||||
|
|
||||||
|
// NewTypeMapper creates a new TypeMapper instance
|
||||||
|
func NewTypeMapper() *TypeMapper {
|
||||||
|
return &TypeMapper{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SQLTypeToDrizzle converts SQL types to Drizzle column type functions
|
||||||
|
// Returns the Drizzle column constructor (e.g., "integer", "varchar", "text")
|
||||||
|
func (tm *TypeMapper) SQLTypeToDrizzle(sqlType string) string {
|
||||||
|
sqlTypeLower := strings.ToLower(sqlType)
|
||||||
|
|
||||||
|
// PostgreSQL type mapping to Drizzle
|
||||||
|
typeMap := map[string]string{
|
||||||
|
// Integer types
|
||||||
|
"integer": "integer",
|
||||||
|
"int": "integer",
|
||||||
|
"int4": "integer",
|
||||||
|
"smallint": "smallint",
|
||||||
|
"int2": "smallint",
|
||||||
|
"bigint": "bigint",
|
||||||
|
"int8": "bigint",
|
||||||
|
|
||||||
|
// Serial types
|
||||||
|
"serial": "serial",
|
||||||
|
"serial4": "serial",
|
||||||
|
"smallserial": "smallserial",
|
||||||
|
"serial2": "smallserial",
|
||||||
|
"bigserial": "bigserial",
|
||||||
|
"serial8": "bigserial",
|
||||||
|
|
||||||
|
// Numeric types
|
||||||
|
"numeric": "numeric",
|
||||||
|
"decimal": "numeric",
|
||||||
|
"real": "real",
|
||||||
|
"float4": "real",
|
||||||
|
"double precision": "doublePrecision",
|
||||||
|
"float": "doublePrecision",
|
||||||
|
"float8": "doublePrecision",
|
||||||
|
|
||||||
|
// Character types
|
||||||
|
"text": "text",
|
||||||
|
"varchar": "varchar",
|
||||||
|
"character varying": "varchar",
|
||||||
|
"char": "char",
|
||||||
|
"character": "char",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "boolean",
|
||||||
|
"bool": "boolean",
|
||||||
|
|
||||||
|
// Binary
|
||||||
|
"bytea": "bytea",
|
||||||
|
|
||||||
|
// JSON types
|
||||||
|
"json": "json",
|
||||||
|
"jsonb": "jsonb",
|
||||||
|
|
||||||
|
// Date/Time types
|
||||||
|
"time": "time",
|
||||||
|
"timetz": "time",
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"timestamptz": "timestamp",
|
||||||
|
"date": "date",
|
||||||
|
"interval": "interval",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "uuid",
|
||||||
|
|
||||||
|
// Geometric types
|
||||||
|
"point": "point",
|
||||||
|
"line": "line",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for exact match first
|
||||||
|
if drizzleType, ok := typeMap[sqlTypeLower]; ok {
|
||||||
|
return drizzleType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for partial matches (e.g., "varchar(255)" -> "varchar")
|
||||||
|
for sqlPattern, drizzleType := range typeMap {
|
||||||
|
if strings.HasPrefix(sqlTypeLower, sqlPattern) {
|
||||||
|
return drizzleType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to text for unknown types
|
||||||
|
return "text"
|
||||||
|
}
|
||||||
|
|
||||||
|
// BuildColumnChain builds the complete column definition chain for Drizzle
|
||||||
|
// Example: integer('id').primaryKey().notNull()
|
||||||
|
func (tm *TypeMapper) BuildColumnChain(col *models.Column, table *models.Table, isEnum bool) string {
|
||||||
|
var parts []string
|
||||||
|
|
||||||
|
// Determine Drizzle column type
|
||||||
|
var drizzleType string
|
||||||
|
if isEnum {
|
||||||
|
// For enum types, use the type name directly
|
||||||
|
drizzleType = fmt.Sprintf("pgEnum('%s')", col.Type)
|
||||||
|
} else {
|
||||||
|
drizzleType = tm.SQLTypeToDrizzle(col.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start with column type and name
|
||||||
|
// Note: column name is passed as first argument to the column constructor
|
||||||
|
base := fmt.Sprintf("%s('%s')", drizzleType, col.Name)
|
||||||
|
parts = append(parts, base)
|
||||||
|
|
||||||
|
// Add column modifiers in order
|
||||||
|
modifiers := tm.buildColumnModifiers(col, table)
|
||||||
|
if len(modifiers) > 0 {
|
||||||
|
parts = append(parts, modifiers...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(parts, ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildColumnModifiers builds an array of method calls for column modifiers
|
||||||
|
func (tm *TypeMapper) buildColumnModifiers(col *models.Column, table *models.Table) []string {
|
||||||
|
var modifiers []string
|
||||||
|
|
||||||
|
// Primary key
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
modifiers = append(modifiers, "primaryKey()")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not null constraint
|
||||||
|
if col.NotNull && !col.IsPrimaryKey {
|
||||||
|
modifiers = append(modifiers, "notNull()")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique constraint (check if there's a single-column unique constraint)
|
||||||
|
if tm.hasUniqueConstraint(col.Name, table) {
|
||||||
|
modifiers = append(modifiers, "unique()")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default value
|
||||||
|
if col.AutoIncrement {
|
||||||
|
// For auto-increment, use generatedAlwaysAsIdentity()
|
||||||
|
modifiers = append(modifiers, "generatedAlwaysAsIdentity()")
|
||||||
|
} else if col.Default != nil {
|
||||||
|
defaultValue := tm.formatDefaultValue(col.Default)
|
||||||
|
if defaultValue != "" {
|
||||||
|
modifiers = append(modifiers, fmt.Sprintf("default(%s)", defaultValue))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return modifiers
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatDefaultValue formats a default value for Drizzle
|
||||||
|
func (tm *TypeMapper) formatDefaultValue(defaultValue any) string {
|
||||||
|
switch v := defaultValue.(type) {
|
||||||
|
case string:
|
||||||
|
if v == "now()" || v == "CURRENT_TIMESTAMP" {
|
||||||
|
return "sql`now()`"
|
||||||
|
} else if v == "gen_random_uuid()" || strings.Contains(strings.ToLower(v), "uuid") {
|
||||||
|
return "sql`gen_random_uuid()`"
|
||||||
|
} else {
|
||||||
|
// Try to parse as number first
|
||||||
|
// Check if it's a numeric string that should be a number
|
||||||
|
if isNumericString(v) {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
// String literal
|
||||||
|
return fmt.Sprintf("'%s'", strings.ReplaceAll(v, "'", "\\'"))
|
||||||
|
}
|
||||||
|
case bool:
|
||||||
|
if v {
|
||||||
|
return "true"
|
||||||
|
}
|
||||||
|
return "false"
|
||||||
|
case int, int64, int32, int16, int8:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
case float32, float64:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// isNumericString checks if a string represents a number
|
||||||
|
func isNumericString(s string) bool {
|
||||||
|
if s == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// Simple check for numeric strings
|
||||||
|
for i, c := range s {
|
||||||
|
if i == 0 && c == '-' {
|
||||||
|
continue // Allow negative sign at start
|
||||||
|
}
|
||||||
|
if c < '0' || c > '9' {
|
||||||
|
if c != '.' {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasUniqueConstraint checks if a column has a unique constraint
|
||||||
|
func (tm *TypeMapper) hasUniqueConstraint(colName string, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint &&
|
||||||
|
len(constraint.Columns) == 1 &&
|
||||||
|
constraint.Columns[0] == colName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// BuildReferencesChain builds the .references() chain for foreign key columns
|
||||||
|
func (tm *TypeMapper) BuildReferencesChain(fk *models.Constraint, referencedTable string) string {
|
||||||
|
// Example: .references(() => users.id)
|
||||||
|
if len(fk.ReferencedColumns) > 0 {
|
||||||
|
// Use the referenced table variable name (camelCase)
|
||||||
|
refTableVar := tm.ToCamelCase(referencedTable)
|
||||||
|
refColumn := fk.ReferencedColumns[0]
|
||||||
|
return fmt.Sprintf("references(() => %s.%s)", refTableVar, refColumn)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToCamelCase converts snake_case or PascalCase to camelCase
|
||||||
|
func (tm *TypeMapper) ToCamelCase(s string) string {
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's snake_case
|
||||||
|
if strings.Contains(s, "_") {
|
||||||
|
parts := strings.Split(s, "_")
|
||||||
|
if len(parts) == 0 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// First part stays lowercase
|
||||||
|
result := strings.ToLower(parts[0])
|
||||||
|
|
||||||
|
// Capitalize first letter of remaining parts
|
||||||
|
for i := 1; i < len(parts); i++ {
|
||||||
|
if len(parts[i]) > 0 {
|
||||||
|
result += strings.ToUpper(parts[i][:1]) + strings.ToLower(parts[i][1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, assume it's PascalCase - just lowercase the first letter
|
||||||
|
return strings.ToLower(s[:1]) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToPascalCase converts snake_case to PascalCase
|
||||||
|
func (tm *TypeMapper) ToPascalCase(s string) string {
|
||||||
|
parts := strings.Split(s, "_")
|
||||||
|
var result string
|
||||||
|
|
||||||
|
for _, part := range parts {
|
||||||
|
if len(part) > 0 {
|
||||||
|
result += strings.ToUpper(part[:1]) + strings.ToLower(part[1:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// DrizzleTypeToTypeScript converts Drizzle column types to TypeScript types
|
||||||
|
func (tm *TypeMapper) DrizzleTypeToTypeScript(drizzleType string, isEnum bool, enumName string) string {
|
||||||
|
if isEnum {
|
||||||
|
return enumName
|
||||||
|
}
|
||||||
|
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"integer": "number",
|
||||||
|
"bigint": "number",
|
||||||
|
"smallint": "number",
|
||||||
|
"serial": "number",
|
||||||
|
"bigserial": "number",
|
||||||
|
"smallserial": "number",
|
||||||
|
"numeric": "number",
|
||||||
|
"real": "number",
|
||||||
|
"doublePrecision": "number",
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"char": "string",
|
||||||
|
"boolean": "boolean",
|
||||||
|
"bytea": "Buffer",
|
||||||
|
"json": "any",
|
||||||
|
"jsonb": "any",
|
||||||
|
"timestamp": "Date",
|
||||||
|
"date": "Date",
|
||||||
|
"time": "Date",
|
||||||
|
"interval": "string",
|
||||||
|
"uuid": "string",
|
||||||
|
"point": "{ x: number; y: number }",
|
||||||
|
"line": "{ a: number; b: number; c: number }",
|
||||||
|
}
|
||||||
|
|
||||||
|
if tsType, ok := typeMap[drizzleType]; ok {
|
||||||
|
return tsType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to any for unknown types
|
||||||
|
return "any"
|
||||||
|
}
|
||||||
543
pkg/writers/drizzle/writer.go
Normal file
543
pkg/writers/drizzle/writer.go
Normal file
@@ -0,0 +1,543 @@
|
|||||||
|
package drizzle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for Drizzle ORM
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
typeMapper *TypeMapper
|
||||||
|
templates *Templates
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new Drizzle writer with the given options
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
w := &Writer{
|
||||||
|
options: options,
|
||||||
|
typeMapper: NewTypeMapper(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize templates
|
||||||
|
tmpl, err := NewTemplates()
|
||||||
|
if err != nil {
|
||||||
|
// Should not happen with embedded templates
|
||||||
|
panic(fmt.Sprintf("failed to initialize templates: %v", err))
|
||||||
|
}
|
||||||
|
w.templates = tmpl
|
||||||
|
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase writes a complete database as Drizzle schema
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
// Check if multi-file mode is enabled
|
||||||
|
multiFile := w.shouldUseMultiFile()
|
||||||
|
|
||||||
|
if multiFile {
|
||||||
|
return w.writeMultiFile(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.writeSingleFile(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema writes a schema as Drizzle schema
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
// Create a temporary database with just this schema
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable writes a single table as a Drizzle schema
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
// Create a temporary schema and database
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeSingleFile writes all tables to a single file
|
||||||
|
func (w *Writer) writeSingleFile(db *models.Database) error {
|
||||||
|
templateData := NewTemplateData()
|
||||||
|
|
||||||
|
// Build enum map for quick lookup
|
||||||
|
enumMap := w.buildEnumMap(db)
|
||||||
|
|
||||||
|
// Process all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Add enums
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
enumData := NewEnumData(enum, w.typeMapper)
|
||||||
|
templateData.AddEnum(enumData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableData := w.buildTableData(table, schema, db, enumMap)
|
||||||
|
templateData.AddTable(tableData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add imports
|
||||||
|
w.addImports(templateData, db)
|
||||||
|
|
||||||
|
// Finalize imports
|
||||||
|
templateData.FinalizeImports()
|
||||||
|
|
||||||
|
// Generate code
|
||||||
|
code, err := w.templates.GenerateCode(templateData)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate code: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write output
|
||||||
|
return w.writeOutput(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeMultiFile writes each table to a separate file
|
||||||
|
func (w *Writer) writeMultiFile(db *models.Database) error {
|
||||||
|
// Ensure output path is a directory
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
return fmt.Errorf("output path is required for multi-file mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create output directory if it doesn't exist
|
||||||
|
if err := os.MkdirAll(w.options.OutputPath, 0755); err != nil {
|
||||||
|
return fmt.Errorf("failed to create output directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build enum map for quick lookup
|
||||||
|
enumMap := w.buildEnumMap(db)
|
||||||
|
|
||||||
|
// Process all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Write enums file if there are any
|
||||||
|
if len(schema.Enums) > 0 {
|
||||||
|
if err := w.writeEnumsFile(schema); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write each table to a separate file
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeTableFile(table, schema, db, enumMap); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeEnumsFile writes all enums to a separate file
|
||||||
|
func (w *Writer) writeEnumsFile(schema *models.Schema) error {
|
||||||
|
templateData := NewTemplateData()
|
||||||
|
|
||||||
|
// Add enums
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
enumData := NewEnumData(enum, w.typeMapper)
|
||||||
|
templateData.AddEnum(enumData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add imports for enums
|
||||||
|
templateData.AddImport("import { pgEnum } from 'drizzle-orm/pg-core';")
|
||||||
|
|
||||||
|
// Generate code
|
||||||
|
code, err := w.templates.GenerateCode(templateData)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate enums code: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to enums.ts file
|
||||||
|
filename := filepath.Join(w.options.OutputPath, "enums.ts")
|
||||||
|
return os.WriteFile(filename, []byte(code), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeTableFile writes a single table to its own file
|
||||||
|
func (w *Writer) writeTableFile(table *models.Table, schema *models.Schema, db *models.Database, enumMap map[string]bool) error {
|
||||||
|
templateData := NewTemplateData()
|
||||||
|
|
||||||
|
// Build table data
|
||||||
|
tableData := w.buildTableData(table, schema, db, enumMap)
|
||||||
|
templateData.AddTable(tableData)
|
||||||
|
|
||||||
|
// Add imports
|
||||||
|
w.addImports(templateData, db)
|
||||||
|
|
||||||
|
// If there are enums, add import from enums file
|
||||||
|
if len(schema.Enums) > 0 && w.tableUsesEnum(table, enumMap) {
|
||||||
|
// Import enum definitions from enums.ts
|
||||||
|
enumNames := w.getTableEnumNames(table, schema, enumMap)
|
||||||
|
if len(enumNames) > 0 {
|
||||||
|
importLine := fmt.Sprintf("import { %s } from './enums';", strings.Join(enumNames, ", "))
|
||||||
|
templateData.AddImport(importLine)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finalize imports
|
||||||
|
templateData.FinalizeImports()
|
||||||
|
|
||||||
|
// Generate code
|
||||||
|
code, err := w.templates.GenerateCode(templateData)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate code for table %s: %w", table.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate filename: {tableName}.ts
|
||||||
|
filename := filepath.Join(w.options.OutputPath, table.Name+".ts")
|
||||||
|
return os.WriteFile(filename, []byte(code), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTableData builds TableData from a models.Table
|
||||||
|
func (w *Writer) buildTableData(table *models.Table, schema *models.Schema, db *models.Database, enumMap map[string]bool) *TableData {
|
||||||
|
tableData := NewTableData(table, w.typeMapper)
|
||||||
|
|
||||||
|
// Add columns
|
||||||
|
for _, colName := range w.getSortedColumnNames(table) {
|
||||||
|
col := table.Columns[colName]
|
||||||
|
|
||||||
|
// Check if this column uses an enum
|
||||||
|
isEnum := enumMap[col.Type]
|
||||||
|
|
||||||
|
columnData := NewColumnData(col, table, w.typeMapper, isEnum)
|
||||||
|
|
||||||
|
// Set TypeScript type
|
||||||
|
drizzleType := w.typeMapper.SQLTypeToDrizzle(col.Type)
|
||||||
|
enumName := ""
|
||||||
|
if isEnum {
|
||||||
|
// For enums, use the enum type name
|
||||||
|
enumName = col.Type
|
||||||
|
}
|
||||||
|
baseType := w.typeMapper.DrizzleTypeToTypeScript(drizzleType, isEnum, enumName)
|
||||||
|
|
||||||
|
// Add null union if column is nullable
|
||||||
|
if !col.NotNull && !col.IsPrimaryKey {
|
||||||
|
columnData.TypeScriptType = baseType + " | null"
|
||||||
|
} else {
|
||||||
|
columnData.TypeScriptType = baseType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this column is a foreign key
|
||||||
|
if fk := w.getForeignKeyForColumn(col.Name, table); fk != nil {
|
||||||
|
columnData.IsForeignKey = true
|
||||||
|
refTableName := fk.ReferencedTable
|
||||||
|
refChain := w.typeMapper.BuildReferencesChain(fk, refTableName)
|
||||||
|
if refChain != "" {
|
||||||
|
columnData.ReferencesLine = "." + refChain
|
||||||
|
// Append to the drizzle chain
|
||||||
|
columnData.DrizzleChain += columnData.ReferencesLine
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tableData.AddColumn(columnData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect all column field names that are used in indexes
|
||||||
|
indexColumnFields := make(map[string]bool)
|
||||||
|
|
||||||
|
// Add indexes (excluding single-column unique indexes, which are handled inline)
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
// Skip single-column unique indexes (handled by .unique() modifier)
|
||||||
|
if index.Unique && len(index.Columns) == 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track which columns are used in indexes
|
||||||
|
for _, colName := range index.Columns {
|
||||||
|
// Find the field name for this column
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
fieldName := w.typeMapper.ToCamelCase(col.Name)
|
||||||
|
indexColumnFields[fieldName] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
indexData := NewIndexData(index, tableData.Name, w.typeMapper)
|
||||||
|
tableData.AddIndex(indexData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add multi-column unique constraints as unique indexes
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint && len(constraint.Columns) > 1 {
|
||||||
|
// Create a unique index for this constraint
|
||||||
|
indexData := &IndexData{
|
||||||
|
Name: w.typeMapper.ToCamelCase(constraint.Name) + "Idx",
|
||||||
|
Columns: constraint.Columns,
|
||||||
|
IsUnique: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track which columns are used in indexes
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
fieldName := w.typeMapper.ToCamelCase(col.Name)
|
||||||
|
indexColumnFields[fieldName] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build column references as field names (for destructuring)
|
||||||
|
colRefs := make([]string, len(constraint.Columns))
|
||||||
|
for i, colName := range constraint.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
colRefs[i] = w.typeMapper.ToCamelCase(col.Name)
|
||||||
|
} else {
|
||||||
|
colRefs[i] = w.typeMapper.ToCamelCase(colName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
indexData.Definition = "uniqueIndex('" + constraint.Name + "').on(" + joinStrings(colRefs, ", ") + ")"
|
||||||
|
tableData.AddIndex(indexData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert index column fields map to sorted slice
|
||||||
|
if len(indexColumnFields) > 0 {
|
||||||
|
fields := make([]string, 0, len(indexColumnFields))
|
||||||
|
for field := range indexColumnFields {
|
||||||
|
fields = append(fields, field)
|
||||||
|
}
|
||||||
|
// Sort for consistent output
|
||||||
|
sortStrings(fields)
|
||||||
|
tableData.IndexColumnFields = fields
|
||||||
|
}
|
||||||
|
|
||||||
|
return tableData
|
||||||
|
}
|
||||||
|
|
||||||
|
// sortStrings sorts a slice of strings in place
|
||||||
|
func sortStrings(strs []string) {
|
||||||
|
for i := 0; i < len(strs); i++ {
|
||||||
|
for j := i + 1; j < len(strs); j++ {
|
||||||
|
if strs[i] > strs[j] {
|
||||||
|
strs[i], strs[j] = strs[j], strs[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// addImports adds the necessary imports to the template data
|
||||||
|
func (w *Writer) addImports(templateData *TemplateData, db *models.Database) {
|
||||||
|
// Determine which Drizzle imports we need
|
||||||
|
needsPgTable := len(templateData.Tables) > 0
|
||||||
|
needsPgEnum := len(templateData.Enums) > 0
|
||||||
|
needsIndex := false
|
||||||
|
needsUniqueIndex := false
|
||||||
|
needsSQL := false
|
||||||
|
|
||||||
|
// Check what we need based on tables
|
||||||
|
for _, table := range templateData.Tables {
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
if index.IsUnique {
|
||||||
|
needsUniqueIndex = true
|
||||||
|
} else {
|
||||||
|
needsIndex = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if any column uses SQL default values
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if strings.Contains(col.DrizzleChain, "sql`") {
|
||||||
|
needsSQL = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the import statement
|
||||||
|
imports := make([]string, 0)
|
||||||
|
|
||||||
|
if needsPgTable {
|
||||||
|
imports = append(imports, "pgTable")
|
||||||
|
}
|
||||||
|
if needsPgEnum {
|
||||||
|
imports = append(imports, "pgEnum")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add column types - for now, add common ones
|
||||||
|
// TODO: Could be optimized to only include used types
|
||||||
|
columnTypes := []string{
|
||||||
|
"integer", "bigint", "smallint",
|
||||||
|
"serial", "bigserial", "smallserial",
|
||||||
|
"text", "varchar", "char",
|
||||||
|
"boolean", "numeric", "real", "doublePrecision",
|
||||||
|
"timestamp", "date", "time", "interval",
|
||||||
|
"json", "jsonb", "uuid", "bytea",
|
||||||
|
}
|
||||||
|
imports = append(imports, columnTypes...)
|
||||||
|
|
||||||
|
if needsIndex {
|
||||||
|
imports = append(imports, "index")
|
||||||
|
}
|
||||||
|
if needsUniqueIndex {
|
||||||
|
imports = append(imports, "uniqueIndex")
|
||||||
|
}
|
||||||
|
|
||||||
|
importLine := "import { " + strings.Join(imports, ", ") + " } from 'drizzle-orm/pg-core';"
|
||||||
|
templateData.AddImport(importLine)
|
||||||
|
|
||||||
|
// Add SQL import if needed
|
||||||
|
if needsSQL {
|
||||||
|
templateData.AddImport("import { sql } from 'drizzle-orm';")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildEnumMap builds a map of enum type names for quick lookup
|
||||||
|
func (w *Writer) buildEnumMap(db *models.Database) map[string]bool {
|
||||||
|
enumMap := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
enumMap[enum.Name] = true
|
||||||
|
// Also add lowercase version for case-insensitive lookup
|
||||||
|
enumMap[strings.ToLower(enum.Name)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return enumMap
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableUsesEnum checks if a table uses any enum types
|
||||||
|
func (w *Writer) tableUsesEnum(table *models.Table, enumMap map[string]bool) bool {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if enumMap[col.Type] || enumMap[strings.ToLower(col.Type)] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// getTableEnumNames returns the list of enum variable names used by a table
|
||||||
|
func (w *Writer) getTableEnumNames(table *models.Table, schema *models.Schema, enumMap map[string]bool) []string {
|
||||||
|
enumNames := make([]string, 0)
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if enumMap[col.Type] || enumMap[strings.ToLower(col.Type)] {
|
||||||
|
// Find the enum in schema
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
if strings.EqualFold(enum.Name, col.Type) {
|
||||||
|
varName := w.typeMapper.ToCamelCase(enum.Name)
|
||||||
|
if !seen[varName] {
|
||||||
|
enumNames = append(enumNames, varName)
|
||||||
|
seen[varName] = true
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return enumNames
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSortedColumnNames returns column names sorted by sequence or name
|
||||||
|
func (w *Writer) getSortedColumnNames(table *models.Table) []string {
|
||||||
|
// Convert map to slice
|
||||||
|
columns := make([]*models.Column, 0, len(table.Columns))
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by sequence, then by primary key, then by name
|
||||||
|
// (Similar to GORM writer)
|
||||||
|
sortColumns := func(i, j int) bool {
|
||||||
|
// Sort by sequence if both have it
|
||||||
|
if columns[i].Sequence > 0 && columns[j].Sequence > 0 {
|
||||||
|
return columns[i].Sequence < columns[j].Sequence
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put primary keys first
|
||||||
|
if columns[i].IsPrimaryKey != columns[j].IsPrimaryKey {
|
||||||
|
return columns[i].IsPrimaryKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise sort alphabetically
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a custom sorter
|
||||||
|
for i := 0; i < len(columns); i++ {
|
||||||
|
for j := i + 1; j < len(columns); j++ {
|
||||||
|
if !sortColumns(i, j) {
|
||||||
|
columns[i], columns[j] = columns[j], columns[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract names
|
||||||
|
names := make([]string, len(columns))
|
||||||
|
for i, col := range columns {
|
||||||
|
names[i] = col.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
|
// getForeignKeyForColumn returns the foreign key constraint for a column, if any
|
||||||
|
func (w *Writer) getForeignKeyForColumn(columnName string, table *models.Table) *models.Constraint {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, col := range constraint.Columns {
|
||||||
|
if col == columnName {
|
||||||
|
return constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeOutput writes the content to file or stdout
|
||||||
|
func (w *Writer) writeOutput(content string) error {
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print to stdout
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// shouldUseMultiFile determines whether to use multi-file mode based on metadata or output path
|
||||||
|
func (w *Writer) shouldUseMultiFile() bool {
|
||||||
|
// Check if multi_file is explicitly set in metadata
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||||
|
return mf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-detect based on output path
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
// No output path means stdout (single file)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with .ts (explicit file)
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, ".ts") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with directory separator
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, "/") || strings.HasSuffix(w.options.OutputPath, "\\") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path exists and is a directory
|
||||||
|
info, err := os.Stat(w.options.OutputPath)
|
||||||
|
if err == nil && info.IsDir() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to single file for ambiguous cases
|
||||||
|
return false
|
||||||
|
}
|
||||||
176
pkg/writers/gorm/README.md
Normal file
176
pkg/writers/gorm/README.md
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
# GORM Writer
|
||||||
|
|
||||||
|
Generates Go source files with GORM model definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The GORM Writer converts RelSpec's internal database model representation into Go source code with GORM struct definitions, complete with proper tags, relationships, and methods.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates GORM-compatible Go structs
|
||||||
|
- Creates proper `gorm` struct tags
|
||||||
|
- Generates `TableName()` methods
|
||||||
|
- Adds relationship fields (belongs-to, has-many)
|
||||||
|
- Supports both single-file and multi-file output
|
||||||
|
- Auto-generates helper methods (optional)
|
||||||
|
- Maps SQL types to Go types
|
||||||
|
- Handles nullable fields with custom sql_types
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Assume db is a *models.Database from a reader
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "models.go",
|
||||||
|
PackageName: "models",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := gorm.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate GORM models from PostgreSQL database (single file)
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output gorm \
|
||||||
|
--out-file models.go \
|
||||||
|
--package models
|
||||||
|
|
||||||
|
# Generate GORM models with multi-file output (one file per table)
|
||||||
|
relspec --input json \
|
||||||
|
--in-file schema.json \
|
||||||
|
--output gorm \
|
||||||
|
--out-file models/ \
|
||||||
|
--package models
|
||||||
|
|
||||||
|
# Convert DBML to GORM models
|
||||||
|
relspec --input dbml --in-file schema.dbml --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Modes
|
||||||
|
|
||||||
|
### Single File Mode
|
||||||
|
|
||||||
|
Generates all models in one file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec --input pgsql --conn "..." --output gorm --out-file models.go
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multi-File Mode
|
||||||
|
|
||||||
|
Generates one file per table (auto-detected when output is a directory):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
relspec --input pgsql --conn "..." --output gorm --out-file models/
|
||||||
|
```
|
||||||
|
|
||||||
|
Files are named: `sql_{schema}_{table}.go`
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
sql_types "git.warky.dev/wdevs/sql_types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelUser struct {
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey;autoIncrement" json:"id"`
|
||||||
|
Username string `gorm:"column:username;type:varchar(50);not null;uniqueIndex" json:"username"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(100);not null" json:"email"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;not null;default:now()" json:"created_at"`
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
Pos []*ModelPost `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE" json:"pos,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelUser) TableName() string {
|
||||||
|
return "public.users"
|
||||||
|
}
|
||||||
|
|
||||||
|
type ModelPost struct {
|
||||||
|
ID int64 `gorm:"column:id;type:bigint;primaryKey" json:"id"`
|
||||||
|
UserID int64 `gorm:"column:user_id;type:bigint;not null" json:"user_id"`
|
||||||
|
Title string `gorm:"column:title;type:varchar(200);not null" json:"title"`
|
||||||
|
Content sql_types.SqlString `gorm:"column:content;type:text" json:"content,omitempty"`
|
||||||
|
|
||||||
|
// Belongs to
|
||||||
|
Use *ModelUser `gorm:"foreignKey:UserID;references:ID" json:"use,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelPost) TableName() string {
|
||||||
|
return "public.posts"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Writer Options
|
||||||
|
|
||||||
|
### Metadata Options
|
||||||
|
|
||||||
|
Configure the writer behavior using metadata in `WriterOptions`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "models.go",
|
||||||
|
PackageName: "models",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"multi_file": true, // Enable multi-file mode
|
||||||
|
"populate_refs": true, // Populate RefDatabase/RefSchema
|
||||||
|
"generate_get_id_str": true, // Generate GetIDStr() methods
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | Go Type | Notes |
|
||||||
|
|----------|---------|-------|
|
||||||
|
| bigint, int8 | int64 | - |
|
||||||
|
| integer, int, int4 | int | - |
|
||||||
|
| smallint, int2 | int16 | - |
|
||||||
|
| varchar, text | string | Not nullable |
|
||||||
|
| varchar, text (nullable) | sql_types.SqlString | Nullable |
|
||||||
|
| boolean, bool | bool | - |
|
||||||
|
| timestamp, timestamptz | time.Time | - |
|
||||||
|
| numeric, decimal | float64 | - |
|
||||||
|
| uuid | string | - |
|
||||||
|
| json, jsonb | string | - |
|
||||||
|
|
||||||
|
## Relationship Generation
|
||||||
|
|
||||||
|
The writer automatically generates relationship fields:
|
||||||
|
|
||||||
|
- **Belongs-to**: Generated for tables with foreign keys
|
||||||
|
- **Has-many**: Generated for tables referenced by foreign keys
|
||||||
|
- Relationship field names use 3-letter prefixes
|
||||||
|
- Includes proper `gorm` tags with `foreignKey` and `references`
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Model names are prefixed with "Model" (e.g., `ModelUser`)
|
||||||
|
- Nullable columns use `sql_types.SqlString`, `sql_types.SqlInt64`, etc.
|
||||||
|
- Generated code is auto-formatted with `go fmt`
|
||||||
|
- JSON tags are automatically added
|
||||||
|
- Supports schema-qualified table names in `TableName()` method
|
||||||
@@ -41,12 +41,7 @@ func NewWriter(options *writers.WriterOptions) *Writer {
|
|||||||
// WriteDatabase writes a complete database as GORM models
|
// WriteDatabase writes a complete database as GORM models
|
||||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
// Check if multi-file mode is enabled
|
// Check if multi-file mode is enabled
|
||||||
multiFile := false
|
multiFile := w.shouldUseMultiFile()
|
||||||
if w.options.Metadata != nil {
|
|
||||||
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
|
||||||
multiFile = mf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if multiFile {
|
if multiFile {
|
||||||
return w.writeMultiFile(db)
|
return w.writeMultiFile(db)
|
||||||
@@ -340,6 +335,41 @@ func (w *Writer) writeOutput(content string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shouldUseMultiFile determines whether to use multi-file mode based on metadata or output path
|
||||||
|
func (w *Writer) shouldUseMultiFile() bool {
|
||||||
|
// Check if multi_file is explicitly set in metadata
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if mf, ok := w.options.Metadata["multi_file"].(bool); ok {
|
||||||
|
return mf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-detect based on output path
|
||||||
|
if w.options.OutputPath == "" {
|
||||||
|
// No output path means stdout (single file)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with .go (explicit file)
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, ".go") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path ends with directory separator
|
||||||
|
if strings.HasSuffix(w.options.OutputPath, "/") || strings.HasSuffix(w.options.OutputPath, "\\") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path exists and is a directory
|
||||||
|
info, err := os.Stat(w.options.OutputPath)
|
||||||
|
if err == nil && info.IsDir() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to single file for ambiguous cases
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
// createDatabaseRef creates a shallow copy of database without schemas to avoid circular references
|
||||||
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
func (w *Writer) createDatabaseRef(db *models.Database) *models.Database {
|
||||||
return &models.Database{
|
return &models.Database{
|
||||||
|
|||||||
272
pkg/writers/graphql/README.md
Normal file
272
pkg/writers/graphql/README.md
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
# GraphQL Schema Writer
|
||||||
|
|
||||||
|
The GraphQL writer converts RelSpec's internal database model into GraphQL Schema Definition Language (SDL) files.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Table to Type mapping**: Database tables become GraphQL types
|
||||||
|
- **Column to Field mapping**: Table columns become type fields
|
||||||
|
- **Enum support**: Database enums are preserved
|
||||||
|
- **Custom scalar declarations**: Automatically declares DateTime, JSON, Date scalars
|
||||||
|
- **Implicit relationships**: Generates relationship fields from foreign keys
|
||||||
|
- **Many-to-many support**: Handles junction tables intelligently
|
||||||
|
- **Clean output**: Proper formatting, field ordering, and comments
|
||||||
|
|
||||||
|
## Type Mappings
|
||||||
|
|
||||||
|
### SQL to GraphQL
|
||||||
|
|
||||||
|
| SQL Type | GraphQL Type | Notes |
|
||||||
|
|----------|--------------|-------|
|
||||||
|
| bigint, integer, serial (PK) | ID | Primary keys map to ID |
|
||||||
|
| bigint, integer, int | Int | |
|
||||||
|
| text, varchar, char | String | |
|
||||||
|
| uuid (PK) | ID | UUID primary keys also map to ID |
|
||||||
|
| uuid | String | Non-PK UUIDs map to String |
|
||||||
|
| double precision, numeric, float | Float | |
|
||||||
|
| boolean | Boolean | |
|
||||||
|
| timestamp, timestamptz | DateTime | Custom scalar |
|
||||||
|
| jsonb, json | JSON | Custom scalar |
|
||||||
|
| date | Date | Custom scalar |
|
||||||
|
| Enum types | Enum | Preserves enum name |
|
||||||
|
| Arrays (e.g., text[]) | [Type] | Mapped to GraphQL lists |
|
||||||
|
|
||||||
|
## Relationship Handling
|
||||||
|
|
||||||
|
The writer intelligently generates relationship fields based on foreign key constraints:
|
||||||
|
|
||||||
|
### Forward Relationships (FK on this table)
|
||||||
|
```sql
|
||||||
|
-- Post table has authorId FK to User.id
|
||||||
|
CREATE TABLE post (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
title text NOT NULL,
|
||||||
|
author_id bigint NOT NULL REFERENCES user(id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
author: User! # Generated from authorId FK
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reverse Relationships (FK on other table)
|
||||||
|
```graphql
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
posts: [Post!]! # Reverse relationship (Post has FK to User)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Many-to-Many Relationships
|
||||||
|
|
||||||
|
Junction tables (tables with only PKs and FKs) are automatically detected and hidden:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE post_tag (
|
||||||
|
post_id bigint NOT NULL REFERENCES post(id),
|
||||||
|
tag_id bigint NOT NULL REFERENCES tag(id),
|
||||||
|
PRIMARY KEY (post_id, tag_id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
tags: [Tag!]! # Many-to-many through PostTag junction table
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
posts: [Post!]! # Reverse many-to-many
|
||||||
|
}
|
||||||
|
|
||||||
|
# Note: PostTag junction table is NOT included in output
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
|
)
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.graphql",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Metadata Options
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.graphql",
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"includeScalarDeclarations": true, // Include scalar declarations
|
||||||
|
"includeComments": true, // Include field/table comments
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Write to Stdout
|
||||||
|
|
||||||
|
```go
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "", // Empty path writes to stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := graphql.NewWriter(opts)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Convert PostgreSQL database to GraphQL
|
||||||
|
relspec convert --from pgsql \
|
||||||
|
--from-conn "postgres://user:pass@localhost:5432/mydb" \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
|
||||||
|
# Convert GORM models to GraphQL
|
||||||
|
relspec convert --from gorm --from-path ./models \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
|
||||||
|
# Convert JSON to GraphQL
|
||||||
|
relspec convert --from json --from-path schema.json \
|
||||||
|
--to graphql --to-path schema.graphql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
The generated GraphQL schema follows this structure:
|
||||||
|
|
||||||
|
1. **Header comment** (if enabled)
|
||||||
|
2. **Custom scalar declarations** (if any custom scalars are used)
|
||||||
|
3. **Enum definitions** (alphabetically sorted)
|
||||||
|
4. **Type definitions** (with fields ordered: ID first, then scalars alphabetically, then relationships)
|
||||||
|
|
||||||
|
### Example Output
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
# Generated GraphQL Schema
|
||||||
|
# Database: myapp
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
MODERATOR
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
createdAt: DateTime!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
role: Role!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
profile: Profile
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
publishedAt: Date
|
||||||
|
title: String!
|
||||||
|
|
||||||
|
author: User!
|
||||||
|
tags: [Tag!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
name: String!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Metadata Options
|
||||||
|
|
||||||
|
| Option | Type | Description | Default |
|
||||||
|
|--------|------|-------------|---------|
|
||||||
|
| `includeScalarDeclarations` | bool | Include `scalar DateTime`, etc. declarations | true |
|
||||||
|
| `includeComments` | bool | Include table/field descriptions as comments | true |
|
||||||
|
| `preservePKType` | bool | Use Int/String for PKs instead of ID | false |
|
||||||
|
|
||||||
|
## Field Naming Conventions
|
||||||
|
|
||||||
|
- **FK columns**: Foreign key columns like `authorId` are removed from the output; instead, a relationship field `author` is generated
|
||||||
|
- **Relationship pluralization**: Reverse one-to-many relationships are pluralized (e.g., `posts`, `tags`)
|
||||||
|
- **CamelCase**: Field names are kept in their original casing from the database
|
||||||
|
|
||||||
|
## Junction Table Detection
|
||||||
|
|
||||||
|
A table is considered a junction table if it:
|
||||||
|
1. Has exactly 2 foreign key constraints
|
||||||
|
2. All columns are either primary keys or foreign keys
|
||||||
|
3. Has a composite primary key on the FK columns
|
||||||
|
|
||||||
|
Junction tables are automatically hidden from the GraphQL output, and many-to-many relationship fields are generated on the related types instead.
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- All tables in all schemas are flattened into a single GraphQL schema
|
||||||
|
- No support for GraphQL-specific features like directives, interfaces, or unions
|
||||||
|
- Nullable vs non-nullable is determined solely by the `NOT NULL` constraint
|
||||||
|
|
||||||
|
## Example Conversion
|
||||||
|
|
||||||
|
**Input** (Database Schema):
|
||||||
|
```sql
|
||||||
|
CREATE TABLE user (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
email text NOT NULL,
|
||||||
|
created_at timestamp NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE post (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
title text NOT NULL,
|
||||||
|
author_id bigint NOT NULL REFERENCES user(id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output** (GraphQL Schema):
|
||||||
|
```graphql
|
||||||
|
scalar DateTime
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
createdAt: DateTime!
|
||||||
|
email: String!
|
||||||
|
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
|
```
|
||||||
178
pkg/writers/graphql/relationships.go
Normal file
178
pkg/writers/graphql/relationships.go
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (w *Writer) generateRelationFields(table *models.Table, db *models.Database, schema *models.Schema) []string {
|
||||||
|
var fields []string
|
||||||
|
|
||||||
|
// 1. Forward relationships (this table has FK)
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type != models.ForeignKeyConstraint {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the related table
|
||||||
|
relatedTable := w.findTable(db, constraint.ReferencedSchema, constraint.ReferencedTable)
|
||||||
|
if relatedTable == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate field name (remove "Id" suffix from FK column if present)
|
||||||
|
fieldName := w.relationFieldName(constraint.Columns[0])
|
||||||
|
|
||||||
|
// Determine nullability from FK column
|
||||||
|
nullable := true
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
if col, exists := table.Columns[colName]; exists {
|
||||||
|
if col.NotNull {
|
||||||
|
nullable = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format: fieldName: RelatedType! or fieldName: RelatedType
|
||||||
|
gqlType := relatedTable.Name
|
||||||
|
if !nullable {
|
||||||
|
gqlType += "!"
|
||||||
|
}
|
||||||
|
|
||||||
|
fields = append(fields, fmt.Sprintf(" %s: %s", fieldName, gqlType))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Reverse relationships (other tables reference this table)
|
||||||
|
for _, otherSchema := range db.Schemas {
|
||||||
|
for _, otherTable := range otherSchema.Tables {
|
||||||
|
if otherTable.Name == table.Name && otherSchema.Name == schema.Name {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip join tables for many-to-many
|
||||||
|
if w.isJoinTable(otherTable) {
|
||||||
|
// Check if this is a many-to-many through this join table
|
||||||
|
if m2mField := w.getManyToManyField(table, otherTable, db); m2mField != "" {
|
||||||
|
fields = append(fields, m2mField)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, constraint := range otherTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint &&
|
||||||
|
constraint.ReferencedTable == table.Name &&
|
||||||
|
constraint.ReferencedSchema == schema.Name {
|
||||||
|
// Add reverse relationship field (array)
|
||||||
|
fieldName := w.pluralize(w.camelCase(otherTable.Name))
|
||||||
|
fields = append(fields, fmt.Sprintf(" %s: [%s!]!", fieldName, otherTable.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) getManyToManyField(table *models.Table, joinTable *models.Table, db *models.Database) string {
|
||||||
|
// Find the two FK constraints in the join table
|
||||||
|
var fk1, fk2 *models.Constraint
|
||||||
|
for _, constraint := range joinTable.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
if fk1 == nil {
|
||||||
|
fk1 = constraint
|
||||||
|
} else {
|
||||||
|
fk2 = constraint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fk1 == nil || fk2 == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which FK points to our table and which to the other table
|
||||||
|
var targetConstraint *models.Constraint
|
||||||
|
if fk1.ReferencedTable == table.Name {
|
||||||
|
targetConstraint = fk2
|
||||||
|
} else if fk2.ReferencedTable == table.Name {
|
||||||
|
targetConstraint = fk1
|
||||||
|
} else {
|
||||||
|
return "" // This join table doesn't involve our table
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the target table
|
||||||
|
targetTable := w.findTable(db, targetConstraint.ReferencedSchema, targetConstraint.ReferencedTable)
|
||||||
|
if targetTable == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate many-to-many field
|
||||||
|
fieldName := w.pluralize(w.camelCase(targetTable.Name))
|
||||||
|
return fmt.Sprintf(" %s: [%s!]!", fieldName, targetTable.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) findTable(db *models.Database, schemaName, tableName string) *models.Table {
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if schema.Name != schemaName {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == tableName {
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) relationFieldName(fkColumnName string) string {
|
||||||
|
// Remove "Id" or "_id" suffix
|
||||||
|
name := fkColumnName
|
||||||
|
if strings.HasSuffix(name, "Id") {
|
||||||
|
name = name[:len(name)-2]
|
||||||
|
} else if strings.HasSuffix(name, "_id") {
|
||||||
|
name = name[:len(name)-3]
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.camelCase(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) camelCase(s string) string {
|
||||||
|
// If already camelCase or PascalCase, convert to camelCase
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert first character to lowercase
|
||||||
|
return strings.ToLower(string(s[0])) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) pluralize(s string) string {
|
||||||
|
// Simple pluralization rules
|
||||||
|
if s == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Already plural
|
||||||
|
if strings.HasSuffix(s, "s") {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Words ending in 'y' → 'ies'
|
||||||
|
if strings.HasSuffix(s, "y") {
|
||||||
|
return s[:len(s)-1] + "ies"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Words ending in 's', 'x', 'z', 'ch', 'sh' → add 'es'
|
||||||
|
if strings.HasSuffix(s, "s") || strings.HasSuffix(s, "x") ||
|
||||||
|
strings.HasSuffix(s, "z") || strings.HasSuffix(s, "ch") ||
|
||||||
|
strings.HasSuffix(s, "sh") {
|
||||||
|
return s + "es"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: add 's'
|
||||||
|
return s + "s"
|
||||||
|
}
|
||||||
148
pkg/writers/graphql/type_mapping.go
Normal file
148
pkg/writers/graphql/type_mapping.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (w *Writer) sqlTypeToGraphQL(sqlType string, column *models.Column, table *models.Table, schema *models.Schema) string {
|
||||||
|
// Check if this is a primary key → ID type
|
||||||
|
if column.IsPrimaryKey {
|
||||||
|
// Check metadata for explicit type preference
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if preserveType, ok := w.options.Metadata["preservePKType"].(bool); ok && preserveType {
|
||||||
|
// Use Int or String based on SQL type
|
||||||
|
if w.isIntegerType(sqlType) {
|
||||||
|
return "Int"
|
||||||
|
}
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "ID"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map SQL types to custom scalars
|
||||||
|
if scalar := w.sqlTypeToCustomScalar(sqlType); scalar != "" {
|
||||||
|
return scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's an enum
|
||||||
|
if w.isEnumType(sqlType, schema) {
|
||||||
|
return sqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard type mappings
|
||||||
|
baseType := strings.Split(sqlType, "(")[0] // Remove length/precision
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
// Handle array types
|
||||||
|
if strings.HasSuffix(baseType, "[]") {
|
||||||
|
elemType := strings.TrimSuffix(baseType, "[]")
|
||||||
|
gqlType := w.mapBaseTypeToGraphQL(elemType)
|
||||||
|
return "[" + gqlType + "]"
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.mapBaseTypeToGraphQL(baseType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) mapBaseTypeToGraphQL(baseType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
// Text types
|
||||||
|
"text": "String",
|
||||||
|
"varchar": "String",
|
||||||
|
"char": "String",
|
||||||
|
"character": "String",
|
||||||
|
"bpchar": "String",
|
||||||
|
"name": "String",
|
||||||
|
|
||||||
|
// UUID
|
||||||
|
"uuid": "ID",
|
||||||
|
|
||||||
|
// Integer types
|
||||||
|
"integer": "Int",
|
||||||
|
"int": "Int",
|
||||||
|
"int2": "Int",
|
||||||
|
"int4": "Int",
|
||||||
|
"int8": "Int",
|
||||||
|
"bigint": "Int",
|
||||||
|
"smallint": "Int",
|
||||||
|
"serial": "Int",
|
||||||
|
"bigserial": "Int",
|
||||||
|
"smallserial": "Int",
|
||||||
|
|
||||||
|
// Float types
|
||||||
|
"double precision": "Float",
|
||||||
|
"float": "Float",
|
||||||
|
"float4": "Float",
|
||||||
|
"float8": "Float",
|
||||||
|
"real": "Float",
|
||||||
|
"numeric": "Float",
|
||||||
|
"decimal": "Float",
|
||||||
|
"money": "Float",
|
||||||
|
|
||||||
|
// Boolean
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"bool": "Boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
if gqlType, ok := typeMap[baseType]; ok {
|
||||||
|
return gqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: capitalize first letter
|
||||||
|
if len(baseType) > 0 {
|
||||||
|
return strings.ToUpper(string(baseType[0])) + baseType[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) sqlTypeToCustomScalar(sqlType string) string {
|
||||||
|
scalarMap := map[string]string{
|
||||||
|
"timestamp": "DateTime",
|
||||||
|
"timestamptz": "DateTime",
|
||||||
|
"timestamp with time zone": "DateTime",
|
||||||
|
"jsonb": "JSON",
|
||||||
|
"json": "JSON",
|
||||||
|
"date": "Date",
|
||||||
|
}
|
||||||
|
|
||||||
|
baseType := strings.Split(sqlType, "(")[0]
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
if scalar, ok := scalarMap[baseType]; ok {
|
||||||
|
return scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isIntegerType(sqlType string) bool {
|
||||||
|
intTypes := map[string]bool{
|
||||||
|
"integer": true,
|
||||||
|
"int": true,
|
||||||
|
"int2": true,
|
||||||
|
"int4": true,
|
||||||
|
"int8": true,
|
||||||
|
"bigint": true,
|
||||||
|
"smallint": true,
|
||||||
|
"serial": true,
|
||||||
|
"bigserial": true,
|
||||||
|
"smallserial": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
baseType := strings.Split(sqlType, "(")[0]
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
return intTypes[baseType]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isEnumType(sqlType string, schema *models.Schema) bool {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
if enum.Name == sqlType {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
272
pkg/writers/graphql/writer.go
Normal file
272
pkg/writers/graphql/writer.go
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
content := w.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
db := models.InitDatabase(schema.Name)
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) databaseToGraphQL(db *models.Database) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Header comment
|
||||||
|
if w.shouldIncludeComments() {
|
||||||
|
sb.WriteString("# Generated GraphQL Schema\n")
|
||||||
|
if db.Name != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf("# Database: %s\n", db.Name))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom scalar declarations
|
||||||
|
if w.shouldIncludeScalarDeclarations() {
|
||||||
|
scalars := w.collectCustomScalars(db)
|
||||||
|
if len(scalars) > 0 {
|
||||||
|
for _, scalar := range scalars {
|
||||||
|
sb.WriteString(fmt.Sprintf("scalar %s\n", scalar))
|
||||||
|
}
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enum definitions
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
sb.WriteString(w.enumToGraphQL(enum))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Type definitions
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Skip join tables (tables with only PK+FK columns)
|
||||||
|
if w.isJoinTable(table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(w.tableToGraphQL(table, db, schema))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) shouldIncludeComments() bool {
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if include, ok := w.options.Metadata["includeComments"].(bool); ok {
|
||||||
|
return include
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true // Default to true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) shouldIncludeScalarDeclarations() bool {
|
||||||
|
if w.options.Metadata != nil {
|
||||||
|
if include, ok := w.options.Metadata["includeScalarDeclarations"].(bool); ok {
|
||||||
|
return include
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true // Default to true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) collectCustomScalars(db *models.Database) []string {
|
||||||
|
scalarsNeeded := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if scalar := w.sqlTypeToCustomScalar(col.Type); scalar != "" {
|
||||||
|
scalarsNeeded[scalar] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to sorted slice
|
||||||
|
scalars := make([]string, 0, len(scalarsNeeded))
|
||||||
|
for scalar := range scalarsNeeded {
|
||||||
|
scalars = append(scalars, scalar)
|
||||||
|
}
|
||||||
|
sort.Strings(scalars)
|
||||||
|
|
||||||
|
return scalars
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isJoinTable(table *models.Table) bool {
|
||||||
|
// A join table typically has:
|
||||||
|
// 1. Exactly 2 FK constraints
|
||||||
|
// 2. Composite primary key on those FK columns
|
||||||
|
// 3. No other columns
|
||||||
|
|
||||||
|
fkCount := 0
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
fkCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fkCount != 2 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if all columns are either PKs or FKs
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
isFKColumn := false
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == col.Name {
|
||||||
|
isFKColumn = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isFKColumn && !col.IsPrimaryKey {
|
||||||
|
// Found a column that's neither PK nor FK
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) enumToGraphQL(enum *models.Enum) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("enum %s {\n", enum.Name))
|
||||||
|
for _, value := range enum.Values {
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s\n", value))
|
||||||
|
}
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) tableToGraphQL(table *models.Table, db *models.Database, schema *models.Schema) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Type name
|
||||||
|
typeName := table.Name
|
||||||
|
|
||||||
|
// Description comment
|
||||||
|
if w.shouldIncludeComments() && (table.Description != "" || table.Comment != "") {
|
||||||
|
desc := table.Description
|
||||||
|
if desc == "" {
|
||||||
|
desc = table.Comment
|
||||||
|
}
|
||||||
|
sb.WriteString(fmt.Sprintf("# %s\n", desc))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("type %s {\n", typeName))
|
||||||
|
|
||||||
|
// Collect and categorize fields
|
||||||
|
var idFields, scalarFields, relationFields []string
|
||||||
|
|
||||||
|
for _, column := range table.Columns {
|
||||||
|
// Skip FK columns (they become relation fields)
|
||||||
|
if w.isForeignKeyColumn(column, table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
gqlType := w.sqlTypeToGraphQL(column.Type, column, table, schema)
|
||||||
|
if gqlType == "" {
|
||||||
|
continue // Skip if type couldn't be mapped
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine nullability
|
||||||
|
if column.NotNull {
|
||||||
|
gqlType += "!"
|
||||||
|
}
|
||||||
|
|
||||||
|
field := fmt.Sprintf(" %s: %s", column.Name, gqlType)
|
||||||
|
|
||||||
|
if column.IsPrimaryKey {
|
||||||
|
idFields = append(idFields, field)
|
||||||
|
} else {
|
||||||
|
scalarFields = append(scalarFields, field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add relation fields
|
||||||
|
relationFields = w.generateRelationFields(table, db, schema)
|
||||||
|
|
||||||
|
// Write fields in order: ID, scalars (sorted), relations (sorted)
|
||||||
|
for _, field := range idFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(scalarFields)
|
||||||
|
for _, field := range scalarFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(relationFields) > 0 {
|
||||||
|
if len(scalarFields) > 0 || len(idFields) > 0 {
|
||||||
|
sb.WriteString("\n") // Blank line before relations
|
||||||
|
}
|
||||||
|
sort.Strings(relationFields)
|
||||||
|
for _, field := range relationFields {
|
||||||
|
sb.WriteString(field + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *Writer) isForeignKeyColumn(column *models.Column, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == column.Name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
412
pkg/writers/graphql/writer_test.go
Normal file
412
pkg/writers/graphql/writer_test.go
Normal file
@@ -0,0 +1,412 @@
|
|||||||
|
package graphql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestWriter_WriteTable_Simple(t *testing.T) {
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
nameCol := models.InitColumn("name", "User", "public")
|
||||||
|
nameCol.Type = "text"
|
||||||
|
nameCol.NotNull = true
|
||||||
|
table.Columns["name"] = nameCol
|
||||||
|
|
||||||
|
emailCol := models.InitColumn("email", "User", "public")
|
||||||
|
emailCol.Type = "text"
|
||||||
|
emailCol.NotNull = false
|
||||||
|
table.Columns["email"] = emailCol
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
OutputPath: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify output contains type definition
|
||||||
|
if !strings.Contains(output, "type User {") {
|
||||||
|
t.Error("Expected 'type User {' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify fields
|
||||||
|
if !strings.Contains(output, "id: ID!") {
|
||||||
|
t.Error("Expected 'id: ID!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "name: String!") {
|
||||||
|
t.Error("Expected 'name: String!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "email: String") {
|
||||||
|
t.Error("Expected 'email: String' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure email is not followed by ! (nullable)
|
||||||
|
if strings.Contains(output, "email: String!") {
|
||||||
|
t.Error("Did not expect 'email: String!' (should be nullable)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_WithEnum(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create enum
|
||||||
|
roleEnum := &models.Enum{
|
||||||
|
Name: "Role",
|
||||||
|
Schema: "public",
|
||||||
|
Values: []string{"ADMIN", "USER", "GUEST"},
|
||||||
|
}
|
||||||
|
schema.Enums = []*models.Enum{roleEnum}
|
||||||
|
|
||||||
|
// Create table with enum field
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
roleCol := models.InitColumn("role", "User", "public")
|
||||||
|
roleCol.Type = "Role"
|
||||||
|
roleCol.NotNull = true
|
||||||
|
table.Columns["role"] = roleCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify enum definition
|
||||||
|
if !strings.Contains(output, "enum Role {") {
|
||||||
|
t.Error("Expected 'enum Role {' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "ADMIN") {
|
||||||
|
t.Error("Expected 'ADMIN' enum value in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify enum usage in type
|
||||||
|
if !strings.Contains(output, "role: Role!") {
|
||||||
|
t.Error("Expected 'role: Role!' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_WithRelations(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create User table
|
||||||
|
userTable := models.InitTable("User", "public")
|
||||||
|
userIdCol := models.InitColumn("id", "User", "public")
|
||||||
|
userIdCol.Type = "bigint"
|
||||||
|
userIdCol.IsPrimaryKey = true
|
||||||
|
userIdCol.NotNull = true
|
||||||
|
userTable.Columns["id"] = userIdCol
|
||||||
|
|
||||||
|
userNameCol := models.InitColumn("name", "User", "public")
|
||||||
|
userNameCol.Type = "text"
|
||||||
|
userNameCol.NotNull = true
|
||||||
|
userTable.Columns["name"] = userNameCol
|
||||||
|
|
||||||
|
// Create Post table with FK to User
|
||||||
|
postTable := models.InitTable("Post", "public")
|
||||||
|
|
||||||
|
postIdCol := models.InitColumn("id", "Post", "public")
|
||||||
|
postIdCol.Type = "bigint"
|
||||||
|
postIdCol.IsPrimaryKey = true
|
||||||
|
postIdCol.NotNull = true
|
||||||
|
postTable.Columns["id"] = postIdCol
|
||||||
|
|
||||||
|
titleCol := models.InitColumn("title", "Post", "public")
|
||||||
|
titleCol.Type = "text"
|
||||||
|
titleCol.NotNull = true
|
||||||
|
postTable.Columns["title"] = titleCol
|
||||||
|
|
||||||
|
authorIdCol := models.InitColumn("authorId", "Post", "public")
|
||||||
|
authorIdCol.Type = "bigint"
|
||||||
|
authorIdCol.NotNull = true
|
||||||
|
postTable.Columns["authorId"] = authorIdCol
|
||||||
|
|
||||||
|
// Add FK constraint
|
||||||
|
fkConstraint := models.InitConstraint("fk_post_author", models.ForeignKeyConstraint)
|
||||||
|
fkConstraint.Schema = "public"
|
||||||
|
fkConstraint.Table = "Post"
|
||||||
|
fkConstraint.Columns = []string{"authorId"}
|
||||||
|
fkConstraint.ReferencedSchema = "public"
|
||||||
|
fkConstraint.ReferencedTable = "User"
|
||||||
|
fkConstraint.ReferencedColumns = []string{"id"}
|
||||||
|
postTable.Constraints["fk_post_author"] = fkConstraint
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{userTable, postTable}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify Post has author field (forward relationship)
|
||||||
|
if !strings.Contains(output, "author: User!") {
|
||||||
|
t.Error("Expected 'author: User!' in Post type")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify authorId FK column is NOT in the output
|
||||||
|
if strings.Contains(output, "authorId:") {
|
||||||
|
t.Error("Did not expect 'authorId:' field in output (FK columns should be hidden)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify User has posts field (reverse relationship)
|
||||||
|
if !strings.Contains(output, "posts: [Post!]!") {
|
||||||
|
t.Error("Expected 'posts: [Post!]!' in User type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_CustomScalars(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("Event", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "Event", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
createdAtCol := models.InitColumn("createdAt", "Event", "public")
|
||||||
|
createdAtCol.Type = "timestamp"
|
||||||
|
createdAtCol.NotNull = true
|
||||||
|
table.Columns["createdAt"] = createdAtCol
|
||||||
|
|
||||||
|
metadataCol := models.InitColumn("metadata", "Event", "public")
|
||||||
|
metadataCol.Type = "jsonb"
|
||||||
|
metadataCol.NotNull = false
|
||||||
|
table.Columns["metadata"] = metadataCol
|
||||||
|
|
||||||
|
dateCol := models.InitColumn("eventDate", "Event", "public")
|
||||||
|
dateCol.Type = "date"
|
||||||
|
dateCol.NotNull = false
|
||||||
|
table.Columns["eventDate"] = dateCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify scalar declarations
|
||||||
|
if !strings.Contains(output, "scalar DateTime") {
|
||||||
|
t.Error("Expected 'scalar DateTime' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "scalar JSON") {
|
||||||
|
t.Error("Expected 'scalar JSON' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "scalar Date") {
|
||||||
|
t.Error("Expected 'scalar Date' declaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify field types
|
||||||
|
if !strings.Contains(output, "createdAt: DateTime!") {
|
||||||
|
t.Error("Expected 'createdAt: DateTime!' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "metadata: JSON") {
|
||||||
|
t.Error("Expected 'metadata: JSON' in output")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(output, "eventDate: Date") {
|
||||||
|
t.Error("Expected 'eventDate: Date' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_ManyToMany(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
// Create Post table
|
||||||
|
postTable := models.InitTable("Post", "public")
|
||||||
|
postIdCol := models.InitColumn("id", "Post", "public")
|
||||||
|
postIdCol.Type = "bigint"
|
||||||
|
postIdCol.IsPrimaryKey = true
|
||||||
|
postIdCol.NotNull = true
|
||||||
|
postTable.Columns["id"] = postIdCol
|
||||||
|
|
||||||
|
titleCol := models.InitColumn("title", "Post", "public")
|
||||||
|
titleCol.Type = "text"
|
||||||
|
titleCol.NotNull = true
|
||||||
|
postTable.Columns["title"] = titleCol
|
||||||
|
|
||||||
|
// Create Tag table
|
||||||
|
tagTable := models.InitTable("Tag", "public")
|
||||||
|
tagIdCol := models.InitColumn("id", "Tag", "public")
|
||||||
|
tagIdCol.Type = "bigint"
|
||||||
|
tagIdCol.IsPrimaryKey = true
|
||||||
|
tagIdCol.NotNull = true
|
||||||
|
tagTable.Columns["id"] = tagIdCol
|
||||||
|
|
||||||
|
nameCol := models.InitColumn("name", "Tag", "public")
|
||||||
|
nameCol.Type = "text"
|
||||||
|
nameCol.NotNull = true
|
||||||
|
tagTable.Columns["name"] = nameCol
|
||||||
|
|
||||||
|
// Create PostTag join table
|
||||||
|
joinTable := models.InitTable("PostTag", "public")
|
||||||
|
|
||||||
|
postIdJoinCol := models.InitColumn("postId", "PostTag", "public")
|
||||||
|
postIdJoinCol.Type = "bigint"
|
||||||
|
postIdJoinCol.NotNull = true
|
||||||
|
postIdJoinCol.IsPrimaryKey = true
|
||||||
|
joinTable.Columns["postId"] = postIdJoinCol
|
||||||
|
|
||||||
|
tagIdJoinCol := models.InitColumn("tagId", "PostTag", "public")
|
||||||
|
tagIdJoinCol.Type = "bigint"
|
||||||
|
tagIdJoinCol.NotNull = true
|
||||||
|
tagIdJoinCol.IsPrimaryKey = true
|
||||||
|
joinTable.Columns["tagId"] = tagIdJoinCol
|
||||||
|
|
||||||
|
// Add composite PK constraint
|
||||||
|
pkConstraint := models.InitConstraint("pk_posttag", models.PrimaryKeyConstraint)
|
||||||
|
pkConstraint.Schema = "public"
|
||||||
|
pkConstraint.Table = "PostTag"
|
||||||
|
pkConstraint.Columns = []string{"postId", "tagId"}
|
||||||
|
joinTable.Constraints["pk_posttag"] = pkConstraint
|
||||||
|
|
||||||
|
// Add FK to Post
|
||||||
|
fk1 := models.InitConstraint("fk_posttag_post", models.ForeignKeyConstraint)
|
||||||
|
fk1.Schema = "public"
|
||||||
|
fk1.Table = "PostTag"
|
||||||
|
fk1.Columns = []string{"postId"}
|
||||||
|
fk1.ReferencedSchema = "public"
|
||||||
|
fk1.ReferencedTable = "Post"
|
||||||
|
fk1.ReferencedColumns = []string{"id"}
|
||||||
|
joinTable.Constraints["fk_posttag_post"] = fk1
|
||||||
|
|
||||||
|
// Add FK to Tag
|
||||||
|
fk2 := models.InitConstraint("fk_posttag_tag", models.ForeignKeyConstraint)
|
||||||
|
fk2.Schema = "public"
|
||||||
|
fk2.Table = "PostTag"
|
||||||
|
fk2.Columns = []string{"tagId"}
|
||||||
|
fk2.ReferencedSchema = "public"
|
||||||
|
fk2.ReferencedTable = "Tag"
|
||||||
|
fk2.ReferencedColumns = []string{"id"}
|
||||||
|
joinTable.Constraints["fk_posttag_tag"] = fk2
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{postTable, tagTable, joinTable}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify join table is NOT in output
|
||||||
|
if strings.Contains(output, "type PostTag") {
|
||||||
|
t.Error("Did not expect 'type PostTag' (join tables should be hidden)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Post has tags field
|
||||||
|
if !strings.Contains(output, "tags: [Tag!]!") {
|
||||||
|
t.Error("Expected 'tags: [Tag!]!' in Post type")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Tag has posts field
|
||||||
|
if !strings.Contains(output, "posts: [Post!]!") {
|
||||||
|
t.Error("Expected 'posts: [Post!]!' in Tag type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_WriteDatabase_UUIDType(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("User", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "User", "public")
|
||||||
|
idCol.Type = "uuid"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
idCol.NotNull = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// UUID primary keys should still map to ID
|
||||||
|
if !strings.Contains(output, "id: ID!") {
|
||||||
|
t.Error("Expected 'id: ID!' for UUID primary key")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriter_Metadata_NoScalarDeclarations(t *testing.T) {
|
||||||
|
schema := models.InitSchema("public")
|
||||||
|
|
||||||
|
table := models.InitTable("Event", "public")
|
||||||
|
|
||||||
|
idCol := models.InitColumn("id", "Event", "public")
|
||||||
|
idCol.Type = "bigint"
|
||||||
|
idCol.IsPrimaryKey = true
|
||||||
|
table.Columns["id"] = idCol
|
||||||
|
|
||||||
|
createdAtCol := models.InitColumn("createdAt", "Event", "public")
|
||||||
|
createdAtCol.Type = "timestamp"
|
||||||
|
createdAtCol.NotNull = true
|
||||||
|
table.Columns["createdAt"] = createdAtCol
|
||||||
|
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
db := models.InitDatabase("test")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
opts := &writers.WriterOptions{
|
||||||
|
Metadata: map[string]any{
|
||||||
|
"includeScalarDeclarations": false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
writer := NewWriter(opts)
|
||||||
|
|
||||||
|
output := writer.databaseToGraphQL(db)
|
||||||
|
|
||||||
|
// Verify no scalar declarations
|
||||||
|
if strings.Contains(output, "scalar DateTime") {
|
||||||
|
t.Error("Did not expect 'scalar DateTime' with includeScalarDeclarations=false")
|
||||||
|
}
|
||||||
|
|
||||||
|
// But field should still use DateTime
|
||||||
|
if !strings.Contains(output, "createdAt: DateTime!") {
|
||||||
|
t.Error("Expected 'createdAt: DateTime!' in output")
|
||||||
|
}
|
||||||
|
}
|
||||||
277
pkg/writers/json/README.md
Normal file
277
pkg/writers/json/README.md
Normal file
@@ -0,0 +1,277 @@
|
|||||||
|
# JSON Writer
|
||||||
|
|
||||||
|
Generates database schema definitions in JSON format.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The JSON Writer converts RelSpec's internal database model representation into JSON format, providing a complete, structured representation of the database schema.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates RelSpec's canonical JSON schema format
|
||||||
|
- Complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
- Pretty-printed, human-readable output
|
||||||
|
- Suitable for version control
|
||||||
|
- Ideal interchange format
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.json",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := json.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Export PostgreSQL database to JSON
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output json \
|
||||||
|
--out-file schema.json
|
||||||
|
|
||||||
|
# Convert GORM models to JSON
|
||||||
|
relspec --input gorm --in-file models.go --output json --out-file schema.json
|
||||||
|
|
||||||
|
# Convert DBML to JSON
|
||||||
|
relspec --input dbml --in-file diagram.dbml --output json --out-file schema.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated JSON Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "myapp",
|
||||||
|
"description": "",
|
||||||
|
"database_type": "postgresql",
|
||||||
|
"database_version": "",
|
||||||
|
"source_format": "pgsql",
|
||||||
|
"schemas": [
|
||||||
|
{
|
||||||
|
"name": "public",
|
||||||
|
"description": "",
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"name": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"description": "",
|
||||||
|
"columns": {
|
||||||
|
"id": {
|
||||||
|
"name": "id",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"type": "bigint",
|
||||||
|
"length": 0,
|
||||||
|
"precision": 0,
|
||||||
|
"scale": 0,
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": true,
|
||||||
|
"auto_increment": true,
|
||||||
|
"default": "",
|
||||||
|
"sequence": 1
|
||||||
|
},
|
||||||
|
"username": {
|
||||||
|
"name": "username",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 50,
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": false,
|
||||||
|
"auto_increment": false,
|
||||||
|
"sequence": 2
|
||||||
|
},
|
||||||
|
"email": {
|
||||||
|
"name": "email",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 100,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"constraints": {
|
||||||
|
"pk_users": {
|
||||||
|
"name": "pk_users",
|
||||||
|
"type": "PRIMARY KEY",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["id"]
|
||||||
|
},
|
||||||
|
"uq_users_username": {
|
||||||
|
"name": "uq_users_username",
|
||||||
|
"type": "UNIQUE",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["username"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"indexes": {
|
||||||
|
"idx_users_email": {
|
||||||
|
"name": "idx_users_email",
|
||||||
|
"table": "users",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["email"],
|
||||||
|
"unique": false,
|
||||||
|
"type": "btree"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"relationships": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "posts",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": {
|
||||||
|
"id": {
|
||||||
|
"name": "id",
|
||||||
|
"type": "bigint",
|
||||||
|
"not_null": true,
|
||||||
|
"is_primary_key": true,
|
||||||
|
"sequence": 1
|
||||||
|
},
|
||||||
|
"user_id": {
|
||||||
|
"name": "user_id",
|
||||||
|
"type": "bigint",
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 2
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"name": "title",
|
||||||
|
"type": "varchar",
|
||||||
|
"length": 200,
|
||||||
|
"not_null": true,
|
||||||
|
"sequence": 3
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"name": "content",
|
||||||
|
"type": "text",
|
||||||
|
"not_null": false,
|
||||||
|
"sequence": 4
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"constraints": {
|
||||||
|
"fk_posts_user_id": {
|
||||||
|
"name": "fk_posts_user_id",
|
||||||
|
"type": "FOREIGN KEY",
|
||||||
|
"table": "posts",
|
||||||
|
"schema": "public",
|
||||||
|
"columns": ["user_id"],
|
||||||
|
"referenced_table": "users",
|
||||||
|
"referenced_schema": "public",
|
||||||
|
"referenced_columns": ["id"],
|
||||||
|
"on_delete": "CASCADE",
|
||||||
|
"on_update": "NO ACTION"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"indexes": {
|
||||||
|
"idx_posts_user_id": {
|
||||||
|
"name": "idx_posts_user_id",
|
||||||
|
"columns": ["user_id"],
|
||||||
|
"unique": false,
|
||||||
|
"type": "btree"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"views": [],
|
||||||
|
"sequences": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The JSON format includes:
|
||||||
|
|
||||||
|
### Database Level
|
||||||
|
- `name` - Database name
|
||||||
|
- `description` - Database description
|
||||||
|
- `database_type` - Database system type
|
||||||
|
- `database_version` - Version information
|
||||||
|
- `source_format` - Original source format
|
||||||
|
- `schemas` - Array of schema objects
|
||||||
|
|
||||||
|
### Schema Level
|
||||||
|
- `name` - Schema name
|
||||||
|
- `description` - Schema description
|
||||||
|
- `tables` - Array of table objects
|
||||||
|
- `views` - Array of view objects
|
||||||
|
- `sequences` - Array of sequence objects
|
||||||
|
|
||||||
|
### Table Level
|
||||||
|
- `name` - Table name
|
||||||
|
- `schema` - Schema name
|
||||||
|
- `description` - Table description
|
||||||
|
- `columns` - Map of column objects
|
||||||
|
- `constraints` - Map of constraint objects
|
||||||
|
- `indexes` - Map of index objects
|
||||||
|
- `relationships` - Map of relationship objects
|
||||||
|
|
||||||
|
### Column Level
|
||||||
|
- `name` - Column name
|
||||||
|
- `type` - Data type
|
||||||
|
- `length` - Type length
|
||||||
|
- `precision`, `scale` - Numeric precision
|
||||||
|
- `not_null` - NOT NULL flag
|
||||||
|
- `is_primary_key` - Primary key flag
|
||||||
|
- `auto_increment` - Auto-increment flag
|
||||||
|
- `default` - Default value
|
||||||
|
- `sequence` - Column order
|
||||||
|
|
||||||
|
### Constraint Level
|
||||||
|
- `name` - Constraint name
|
||||||
|
- `type` - Constraint type (PRIMARY KEY, FOREIGN KEY, UNIQUE, CHECK)
|
||||||
|
- `columns` - Constrained columns
|
||||||
|
- `referenced_table`, `referenced_schema` - FK references
|
||||||
|
- `referenced_columns` - Referenced columns
|
||||||
|
- `on_delete`, `on_update` - FK actions
|
||||||
|
|
||||||
|
### Index Level
|
||||||
|
- `name` - Index name
|
||||||
|
- `columns` - Indexed columns
|
||||||
|
- `unique` - Unique flag
|
||||||
|
- `type` - Index type
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
- **Version Control** - Track schema changes in git
|
||||||
|
- **Documentation** - Human-readable schema documentation
|
||||||
|
- **Interchange** - Standard format for tool integration
|
||||||
|
- **Backup** - Schema backup without database access
|
||||||
|
- **Testing** - Test data for schema validation
|
||||||
|
- **API** - Schema information for APIs
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Output is pretty-printed with 2-space indentation
|
||||||
|
- Preserves all schema metadata
|
||||||
|
- Can be round-tripped (read and write) without loss
|
||||||
|
- Schema-agnostic format
|
||||||
|
- Ideal for automation and tooling
|
||||||
195
pkg/writers/pgsql/README.md
Normal file
195
pkg/writers/pgsql/README.md
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
# PostgreSQL Writer
|
||||||
|
|
||||||
|
Generates PostgreSQL DDL (Data Definition Language) SQL scripts from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The PostgreSQL Writer converts RelSpec's internal database model representation into PostgreSQL-compatible SQL DDL scripts, including CREATE TABLE statements, constraints, indexes, views, and sequences.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates complete PostgreSQL DDL
|
||||||
|
- Creates schemas, tables, columns
|
||||||
|
- Defines constraints (PK, FK, unique, check)
|
||||||
|
- Creates indexes
|
||||||
|
- Generates views and sequences
|
||||||
|
- Supports migration scripts
|
||||||
|
- Includes audit triggers (optional)
|
||||||
|
- Handles PostgreSQL-specific data types
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.sql",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := pgsql.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate PostgreSQL DDL from JSON schema
|
||||||
|
relspec --input json \
|
||||||
|
--in-file schema.json \
|
||||||
|
--output pgsql \
|
||||||
|
--out-file schema.sql
|
||||||
|
|
||||||
|
# Convert GORM models to PostgreSQL DDL
|
||||||
|
relspec --input gorm \
|
||||||
|
--in-file models.go \
|
||||||
|
--output pgsql \
|
||||||
|
--out-file create_tables.sql
|
||||||
|
|
||||||
|
# Export live database schema to SQL
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/source_db" \
|
||||||
|
--output pgsql \
|
||||||
|
--out-file backup_schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated SQL Example
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Schema: public
|
||||||
|
|
||||||
|
CREATE SCHEMA IF NOT EXISTS public;
|
||||||
|
|
||||||
|
-- Table: public.users
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public.users (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
username VARCHAR(50) NOT NULL,
|
||||||
|
email VARCHAR(100) NOT NULL,
|
||||||
|
bio TEXT,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Constraints for public.users
|
||||||
|
|
||||||
|
ALTER TABLE public.users
|
||||||
|
ADD CONSTRAINT uq_users_username UNIQUE (username);
|
||||||
|
|
||||||
|
-- Indexes for public.users
|
||||||
|
|
||||||
|
CREATE INDEX idx_users_email ON public.users (email);
|
||||||
|
|
||||||
|
-- Table: public.posts
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public.posts (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
user_id BIGINT NOT NULL,
|
||||||
|
title VARCHAR(200) NOT NULL,
|
||||||
|
content TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Foreign Keys for public.posts
|
||||||
|
|
||||||
|
ALTER TABLE public.posts
|
||||||
|
ADD CONSTRAINT fk_posts_user_id
|
||||||
|
FOREIGN KEY (user_id)
|
||||||
|
REFERENCES public.users (id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE NO ACTION;
|
||||||
|
|
||||||
|
-- Indexes for public.posts
|
||||||
|
|
||||||
|
CREATE INDEX idx_posts_user_id ON public.posts (user_id);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Writer Options
|
||||||
|
|
||||||
|
### Metadata Options
|
||||||
|
|
||||||
|
```go
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.sql",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"include_drop": true, // Include DROP statements
|
||||||
|
"include_audit": true, // Include audit triggers
|
||||||
|
"if_not_exists": true, // Use IF NOT EXISTS
|
||||||
|
"migration_mode": false, // Generate migration script
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
### Full DDL Generation
|
||||||
|
|
||||||
|
Generates complete database structure:
|
||||||
|
- CREATE SCHEMA statements
|
||||||
|
- CREATE TABLE with all columns and types
|
||||||
|
- PRIMARY KEY constraints
|
||||||
|
- FOREIGN KEY constraints with actions
|
||||||
|
- UNIQUE constraints
|
||||||
|
- CHECK constraints
|
||||||
|
- CREATE INDEX statements
|
||||||
|
- CREATE VIEW statements
|
||||||
|
- CREATE SEQUENCE statements
|
||||||
|
|
||||||
|
### Migration Mode
|
||||||
|
|
||||||
|
When `migration_mode` is enabled, generates migration scripts with:
|
||||||
|
- Version tracking
|
||||||
|
- Up/down migrations
|
||||||
|
- Transactional DDL
|
||||||
|
- Rollback support
|
||||||
|
|
||||||
|
### Audit Triggers
|
||||||
|
|
||||||
|
When `include_audit` is enabled, adds:
|
||||||
|
- Created/updated timestamp triggers
|
||||||
|
- Audit logging functionality
|
||||||
|
- Change tracking
|
||||||
|
|
||||||
|
## PostgreSQL-Specific Features
|
||||||
|
|
||||||
|
- Serial types (SERIAL, BIGSERIAL)
|
||||||
|
- Advanced types (UUID, JSONB, ARRAY)
|
||||||
|
- Schema-qualified names
|
||||||
|
- Constraint actions (CASCADE, RESTRICT, SET NULL)
|
||||||
|
- Partial indexes
|
||||||
|
- Function-based indexes
|
||||||
|
- Check constraints with expressions
|
||||||
|
|
||||||
|
## Data Types
|
||||||
|
|
||||||
|
Supports all PostgreSQL data types:
|
||||||
|
- Integer types: SMALLINT, INTEGER, BIGINT, SERIAL, BIGSERIAL
|
||||||
|
- Numeric types: NUMERIC, DECIMAL, REAL, DOUBLE PRECISION
|
||||||
|
- String types: VARCHAR, CHAR, TEXT
|
||||||
|
- Date/Time: DATE, TIME, TIMESTAMP, TIMESTAMPTZ, INTERVAL
|
||||||
|
- Boolean: BOOLEAN
|
||||||
|
- Binary: BYTEA
|
||||||
|
- JSON: JSON, JSONB
|
||||||
|
- UUID: UUID
|
||||||
|
- Network: INET, CIDR, MACADDR
|
||||||
|
- Special: ARRAY, HSTORE
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Generated SQL is formatted and readable
|
||||||
|
- Comments are preserved from source schema
|
||||||
|
- Schema names are fully qualified
|
||||||
|
- Default values are properly quoted
|
||||||
|
- Constraint names follow PostgreSQL conventions
|
||||||
|
- Compatible with PostgreSQL 12+
|
||||||
135
pkg/writers/prisma/README.md
Normal file
135
pkg/writers/prisma/README.md
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
# Prisma Writer
|
||||||
|
|
||||||
|
Generates Prisma schema files from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Prisma Writer converts RelSpec's internal database model representation into Prisma schema language (`.prisma` files), complete with models, fields, relationships, and attributes.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates Prisma schema syntax
|
||||||
|
- Creates model definitions with proper field types
|
||||||
|
- Adds Prisma attributes (@id, @unique, @default, etc.)
|
||||||
|
- Generates relationship fields
|
||||||
|
- Includes datasource and generator configurations
|
||||||
|
- Maps table/column names with @map and @@map
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.prisma",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"datasource_provider": "postgresql",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := prisma.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate Prisma schema from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output prisma \
|
||||||
|
--out-file schema.prisma
|
||||||
|
|
||||||
|
# Convert GORM models to Prisma
|
||||||
|
relspec --input gorm --in-file models.go --output prisma --out-file schema.prisma
|
||||||
|
|
||||||
|
# Convert JSON to Prisma schema
|
||||||
|
relspec --input json --in-file database.json --output prisma --out-file prisma/schema.prisma
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```prisma
|
||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
}
|
||||||
|
|
||||||
|
generator client {
|
||||||
|
provider = "prisma-client-js"
|
||||||
|
}
|
||||||
|
|
||||||
|
model User {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
username String @unique @db.VarChar(50)
|
||||||
|
email String @db.VarChar(100)
|
||||||
|
bio String? @db.Text
|
||||||
|
createdAt DateTime @default(now()) @map("created_at")
|
||||||
|
|
||||||
|
posts Post[]
|
||||||
|
|
||||||
|
@@map("users")
|
||||||
|
}
|
||||||
|
|
||||||
|
model Post {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
userId Int @map("user_id")
|
||||||
|
title String @db.VarChar(200)
|
||||||
|
content String? @db.Text
|
||||||
|
|
||||||
|
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@map("posts")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Prisma Attributes
|
||||||
|
|
||||||
|
### Field Attributes
|
||||||
|
- `@id` - Primary key
|
||||||
|
- `@unique` - Unique constraint
|
||||||
|
- `@default()` - Default value
|
||||||
|
- `@map()` - Column name mapping
|
||||||
|
- `@db.*` - Database-specific types
|
||||||
|
- `@relation()` - Relationship definition
|
||||||
|
|
||||||
|
### Model Attributes
|
||||||
|
- `@@map()` - Table name mapping
|
||||||
|
- `@@unique()` - Composite unique constraints
|
||||||
|
- `@@index()` - Index definitions
|
||||||
|
- `@@id()` - Composite primary keys
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | Prisma Type | Database Type |
|
||||||
|
|----------|-------------|---------------|
|
||||||
|
| bigint | Int | @db.BigInt |
|
||||||
|
| integer | Int | - |
|
||||||
|
| varchar(n) | String | @db.VarChar(n) |
|
||||||
|
| text | String | @db.Text |
|
||||||
|
| boolean | Boolean | - |
|
||||||
|
| timestamp | DateTime | @db.Timestamp |
|
||||||
|
| uuid | String | @db.Uuid |
|
||||||
|
| json | Json | - |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Model names are PascalCase (e.g., `User`, `Post`)
|
||||||
|
- Field names are camelCase with `@map` for snake_case columns
|
||||||
|
- Table names use `@@map` when different from model name
|
||||||
|
- Nullable fields are marked with `?`
|
||||||
|
- Relationship fields are automatically generated
|
||||||
|
- Datasource provider defaults to `postgresql`
|
||||||
551
pkg/writers/prisma/writer.go
Normal file
551
pkg/writers/prisma/writer.go
Normal file
@@ -0,0 +1,551 @@
|
|||||||
|
package prisma
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for Prisma schema format
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new Prisma writer with the given options
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase writes a Database model to Prisma schema format
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
content := w.databaseToPrisma(db)
|
||||||
|
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema writes a Schema model to Prisma schema format
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
// Create temporary database for schema
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable writes a Table model to Prisma schema format
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
// Create temporary schema and database for table
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
return w.WriteSchema(schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// databaseToPrisma converts a Database to Prisma schema format string
|
||||||
|
func (w *Writer) databaseToPrisma(db *models.Database) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Write datasource block
|
||||||
|
sb.WriteString(w.generateDatasource(db))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Write generator block
|
||||||
|
sb.WriteString(w.generateGenerator())
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Process all schemas (typically just one in Prisma)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Write enums
|
||||||
|
if len(schema.Enums) > 0 {
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
sb.WriteString(w.enumToPrisma(enum))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Identify join tables for implicit M2M
|
||||||
|
joinTables := w.identifyJoinTables(schema)
|
||||||
|
|
||||||
|
// Write models (excluding join tables)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if joinTables[table.Name] {
|
||||||
|
continue // Skip join tables
|
||||||
|
}
|
||||||
|
sb.WriteString(w.tableToPrisma(table, schema, joinTables))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateDatasource generates the datasource block
|
||||||
|
func (w *Writer) generateDatasource(db *models.Database) string {
|
||||||
|
provider := "postgresql"
|
||||||
|
|
||||||
|
// Map database type to Prisma provider
|
||||||
|
switch db.DatabaseType {
|
||||||
|
case models.PostgresqlDatabaseType:
|
||||||
|
provider = "postgresql"
|
||||||
|
case models.MSSQLDatabaseType:
|
||||||
|
provider = "sqlserver"
|
||||||
|
case models.SqlLiteDatabaseType:
|
||||||
|
provider = "sqlite"
|
||||||
|
case "mysql":
|
||||||
|
provider = "mysql"
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(`datasource db {
|
||||||
|
provider = "%s"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
}
|
||||||
|
`, provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateGenerator generates the generator block
|
||||||
|
func (w *Writer) generateGenerator() string {
|
||||||
|
return `generator client {
|
||||||
|
provider = "prisma-client-js"
|
||||||
|
}
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
// enumToPrisma converts an Enum to Prisma enum block
|
||||||
|
func (w *Writer) enumToPrisma(enum *models.Enum) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("enum %s {\n", enum.Name))
|
||||||
|
for _, value := range enum.Values {
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s\n", value))
|
||||||
|
}
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// identifyJoinTables identifies tables that are join tables for M2M relations
|
||||||
|
func (w *Writer) identifyJoinTables(schema *models.Schema) map[string]bool {
|
||||||
|
joinTables := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check if this is a join table:
|
||||||
|
// 1. Starts with _ (Prisma convention)
|
||||||
|
// 2. Has exactly 2 FK constraints
|
||||||
|
// 3. Has composite PK with those 2 columns
|
||||||
|
// 4. Has no other columns except the FK columns
|
||||||
|
|
||||||
|
if !strings.HasPrefix(table.Name, "_") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
if len(fks) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if columns are only the FK columns
|
||||||
|
if len(table.Columns) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if both FK columns are part of PK
|
||||||
|
pkCols := 0
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
pkCols++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkCols == 2 {
|
||||||
|
joinTables[table.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return joinTables
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableToPrisma converts a Table to Prisma model block
|
||||||
|
func (w *Writer) tableToPrisma(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("model %s {\n", table.Name))
|
||||||
|
|
||||||
|
// Collect columns to write
|
||||||
|
columns := make([]*models.Column, 0, len(table.Columns))
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort columns for consistent output
|
||||||
|
sort.Slice(columns, func(i, j int) bool {
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
})
|
||||||
|
|
||||||
|
// Write scalar fields
|
||||||
|
for _, col := range columns {
|
||||||
|
// Skip if this column is part of a relation that will be output as array field
|
||||||
|
if w.isRelationColumn(col, table) {
|
||||||
|
// We'll output this with the relation field
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(w.columnToField(col, table, schema))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write relation fields
|
||||||
|
sb.WriteString(w.generateRelationFields(table, schema, joinTables))
|
||||||
|
|
||||||
|
// Write block attributes (@@id, @@unique, @@index)
|
||||||
|
sb.WriteString(w.generateBlockAttributes(table))
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// columnToField converts a Column to a Prisma field definition
|
||||||
|
func (w *Writer) columnToField(col *models.Column, table *models.Table, schema *models.Schema) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Field name
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s", col.Name))
|
||||||
|
|
||||||
|
// Field type
|
||||||
|
prismaType := w.sqlTypeToPrisma(col.Type, schema)
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s", prismaType))
|
||||||
|
|
||||||
|
// Optional modifier
|
||||||
|
if !col.NotNull && !col.IsPrimaryKey {
|
||||||
|
sb.WriteString("?")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field attributes
|
||||||
|
attributes := w.generateFieldAttributes(col, table)
|
||||||
|
if attributes != "" {
|
||||||
|
sb.WriteString(" ")
|
||||||
|
sb.WriteString(attributes)
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// sqlTypeToPrisma converts SQL types to Prisma types
|
||||||
|
func (w *Writer) sqlTypeToPrisma(sqlType string, schema *models.Schema) string {
|
||||||
|
// Check if it's an enum
|
||||||
|
for _, enum := range schema.Enums {
|
||||||
|
if strings.EqualFold(sqlType, enum.Name) {
|
||||||
|
return enum.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard type mapping
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"text": "String",
|
||||||
|
"varchar": "String",
|
||||||
|
"character varying": "String",
|
||||||
|
"char": "String",
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"bool": "Boolean",
|
||||||
|
"integer": "Int",
|
||||||
|
"int": "Int",
|
||||||
|
"int4": "Int",
|
||||||
|
"bigint": "BigInt",
|
||||||
|
"int8": "BigInt",
|
||||||
|
"double precision": "Float",
|
||||||
|
"float": "Float",
|
||||||
|
"float8": "Float",
|
||||||
|
"decimal": "Decimal",
|
||||||
|
"numeric": "Decimal",
|
||||||
|
"timestamp": "DateTime",
|
||||||
|
"timestamptz": "DateTime",
|
||||||
|
"date": "DateTime",
|
||||||
|
"jsonb": "Json",
|
||||||
|
"json": "Json",
|
||||||
|
"bytea": "Bytes",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlPattern, prismaType := range typeMap {
|
||||||
|
if strings.Contains(strings.ToLower(sqlType), sqlPattern) {
|
||||||
|
return prismaType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to String for unknown types
|
||||||
|
return "String"
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateFieldAttributes generates field attributes like @id, @unique, @default
|
||||||
|
func (w *Writer) generateFieldAttributes(col *models.Column, table *models.Table) string {
|
||||||
|
attrs := make([]string, 0)
|
||||||
|
|
||||||
|
// @id
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
// Check if this is part of a composite key
|
||||||
|
pkCount := 0
|
||||||
|
for _, c := range table.Columns {
|
||||||
|
if c.IsPrimaryKey {
|
||||||
|
pkCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pkCount == 1 {
|
||||||
|
attrs = append(attrs, "@id")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @unique
|
||||||
|
if w.hasUniqueConstraint(col.Name, table) {
|
||||||
|
attrs = append(attrs, "@unique")
|
||||||
|
}
|
||||||
|
|
||||||
|
// @default
|
||||||
|
if col.AutoIncrement {
|
||||||
|
attrs = append(attrs, "@default(autoincrement())")
|
||||||
|
} else if col.Default != nil {
|
||||||
|
defaultAttr := w.formatDefaultValue(col.Default)
|
||||||
|
if defaultAttr != "" {
|
||||||
|
attrs = append(attrs, fmt.Sprintf("@default(%s)", defaultAttr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @updatedAt (check comment)
|
||||||
|
if strings.Contains(col.Comment, "@updatedAt") {
|
||||||
|
attrs = append(attrs, "@updatedAt")
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(attrs, " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatDefaultValue formats a default value for Prisma
|
||||||
|
func (w *Writer) formatDefaultValue(defaultValue any) string {
|
||||||
|
switch v := defaultValue.(type) {
|
||||||
|
case string:
|
||||||
|
if v == "now()" {
|
||||||
|
return "now()"
|
||||||
|
} else if v == "gen_random_uuid()" {
|
||||||
|
return "uuid()"
|
||||||
|
} else if strings.Contains(strings.ToLower(v), "uuid") {
|
||||||
|
return "uuid()"
|
||||||
|
} else {
|
||||||
|
// String literal
|
||||||
|
return fmt.Sprintf(`"%s"`, v)
|
||||||
|
}
|
||||||
|
case bool:
|
||||||
|
if v {
|
||||||
|
return "true"
|
||||||
|
}
|
||||||
|
return "false"
|
||||||
|
case int, int64, int32:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%v", v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasUniqueConstraint checks if a column has a unique constraint
|
||||||
|
func (w *Writer) hasUniqueConstraint(colName string, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint &&
|
||||||
|
len(constraint.Columns) == 1 &&
|
||||||
|
constraint.Columns[0] == colName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// isRelationColumn checks if a column is a FK column
|
||||||
|
func (w *Writer) isRelationColumn(col *models.Column, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == col.Name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateRelationFields generates relation fields and their FK columns
|
||||||
|
func (w *Writer) generateRelationFields(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Get all FK constraints
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
|
||||||
|
for _, fk := range fks {
|
||||||
|
// Generate the FK scalar field
|
||||||
|
for _, fkCol := range fk.Columns {
|
||||||
|
if col, exists := table.Columns[fkCol]; exists {
|
||||||
|
sb.WriteString(w.columnToField(col, table, schema))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate the relation field
|
||||||
|
relationType := fk.ReferencedTable
|
||||||
|
isOptional := false
|
||||||
|
|
||||||
|
// Check if FK column is nullable
|
||||||
|
for _, fkCol := range fk.Columns {
|
||||||
|
if col, exists := table.Columns[fkCol]; exists {
|
||||||
|
if !col.NotNull {
|
||||||
|
isOptional = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
relationName := relationType
|
||||||
|
if strings.HasSuffix(strings.ToLower(relationName), "s") {
|
||||||
|
relationName = relationName[:len(relationName)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s %s", strings.ToLower(relationName), relationType))
|
||||||
|
|
||||||
|
if isOptional {
|
||||||
|
sb.WriteString("?")
|
||||||
|
}
|
||||||
|
|
||||||
|
// @relation attribute
|
||||||
|
relationAttr := w.generateRelationAttribute(fk)
|
||||||
|
if relationAttr != "" {
|
||||||
|
sb.WriteString(" ")
|
||||||
|
sb.WriteString(relationAttr)
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate inverse relations (arrays) for tables that reference this one
|
||||||
|
sb.WriteString(w.generateInverseRelations(table, schema, joinTables))
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateRelationAttribute generates the @relation(...) attribute
|
||||||
|
func (w *Writer) generateRelationAttribute(fk *models.Constraint) string {
|
||||||
|
parts := make([]string, 0)
|
||||||
|
|
||||||
|
// fields
|
||||||
|
fieldsStr := strings.Join(fk.Columns, ", ")
|
||||||
|
parts = append(parts, fmt.Sprintf("fields: [%s]", fieldsStr))
|
||||||
|
|
||||||
|
// references
|
||||||
|
referencesStr := strings.Join(fk.ReferencedColumns, ", ")
|
||||||
|
parts = append(parts, fmt.Sprintf("references: [%s]", referencesStr))
|
||||||
|
|
||||||
|
// onDelete
|
||||||
|
if fk.OnDelete != "" {
|
||||||
|
parts = append(parts, fmt.Sprintf("onDelete: %s", fk.OnDelete))
|
||||||
|
}
|
||||||
|
|
||||||
|
// onUpdate
|
||||||
|
if fk.OnUpdate != "" {
|
||||||
|
parts = append(parts, fmt.Sprintf("onUpdate: %s", fk.OnUpdate))
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("@relation(%s)", strings.Join(parts, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateInverseRelations generates array fields for reverse relationships
|
||||||
|
func (w *Writer) generateInverseRelations(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Find all tables that have FKs pointing to this table
|
||||||
|
for _, otherTable := range schema.Tables {
|
||||||
|
if otherTable.Name == table.Name {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a join table
|
||||||
|
if joinTables[otherTable.Name] {
|
||||||
|
// Handle implicit M2M
|
||||||
|
if w.isJoinTableFor(otherTable, table.Name) {
|
||||||
|
// Find the other side of the M2M
|
||||||
|
for _, fk := range otherTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable != table.Name {
|
||||||
|
// This is the other side
|
||||||
|
otherSide := fk.ReferencedTable
|
||||||
|
sb.WriteString(fmt.Sprintf(" %ss %s[]\n",
|
||||||
|
strings.ToLower(otherSide), otherSide))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regular one-to-many inverse relation
|
||||||
|
for _, fk := range otherTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable == table.Name {
|
||||||
|
// This table is referenced by otherTable
|
||||||
|
pluralName := otherTable.Name
|
||||||
|
if !strings.HasSuffix(pluralName, "s") {
|
||||||
|
pluralName += "s"
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s %s[]\n",
|
||||||
|
strings.ToLower(pluralName), otherTable.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// isJoinTableFor checks if a table is a join table involving the specified model
|
||||||
|
func (w *Writer) isJoinTableFor(joinTable *models.Table, modelName string) bool {
|
||||||
|
for _, fk := range joinTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable == modelName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateBlockAttributes generates block-level attributes like @@id, @@unique, @@index
|
||||||
|
func (w *Writer) generateBlockAttributes(table *models.Table) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// @@id for composite primary key
|
||||||
|
pkCols := make([]string, 0)
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
pkCols = append(pkCols, col.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(pkCols) > 1 {
|
||||||
|
sort.Strings(pkCols)
|
||||||
|
sb.WriteString(fmt.Sprintf(" @@id([%s])\n", strings.Join(pkCols, ", ")))
|
||||||
|
}
|
||||||
|
|
||||||
|
// @@unique for multi-column unique constraints
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint && len(constraint.Columns) > 1 {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @@unique([%s])\n", strings.Join(constraint.Columns, ", ")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @@index for indexes
|
||||||
|
for _, index := range table.Indexes {
|
||||||
|
if !index.Unique { // Unique indexes are handled by @@unique
|
||||||
|
sb.WriteString(fmt.Sprintf(" @@index([%s])\n", strings.Join(index.Columns, ", ")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
169
pkg/writers/typeorm/README.md
Normal file
169
pkg/writers/typeorm/README.md
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
# TypeORM Writer
|
||||||
|
|
||||||
|
Generates TypeScript files with TypeORM entity definitions from database schema information.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The TypeORM Writer converts RelSpec's internal database model representation into TypeScript source code with TypeORM entity classes, including proper decorators, relationships, and column configurations.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates TypeORM-compatible TypeScript entities
|
||||||
|
- Creates proper decorator usage (@Entity, @Column, etc.)
|
||||||
|
- Adds relationship decorators (@OneToMany, @ManyToOne, @JoinColumn)
|
||||||
|
- Handles column types and options
|
||||||
|
- Supports constraints and indexes
|
||||||
|
- Outputs formatted TypeScript code
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "entities/",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := typeorm.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate TypeORM entities from PostgreSQL database
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output typeorm \
|
||||||
|
--out-file entities/
|
||||||
|
|
||||||
|
# Convert GORM models to TypeORM
|
||||||
|
relspec --input gorm --in-file models.go --output typeorm --out-file src/entities/
|
||||||
|
|
||||||
|
# Convert JSON to TypeORM entities
|
||||||
|
relspec --input json --in-file schema.json --output typeorm --out-file entities/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated Code Example
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
Entity,
|
||||||
|
PrimaryGeneratedColumn,
|
||||||
|
Column,
|
||||||
|
CreateDateColumn,
|
||||||
|
OneToMany,
|
||||||
|
ManyToOne,
|
||||||
|
JoinColumn,
|
||||||
|
Index,
|
||||||
|
} from 'typeorm';
|
||||||
|
import { Post } from './Post';
|
||||||
|
|
||||||
|
@Entity('users')
|
||||||
|
export class User {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 50, unique: true })
|
||||||
|
@Index()
|
||||||
|
username: string;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 100 })
|
||||||
|
email: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text', nullable: true })
|
||||||
|
bio: string | null;
|
||||||
|
|
||||||
|
@CreateDateColumn({ name: 'created_at' })
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@OneToMany(() => Post, (post) => post.user)
|
||||||
|
posts: Post[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity('posts')
|
||||||
|
export class Post {
|
||||||
|
@PrimaryGeneratedColumn('increment')
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ name: 'user_id' })
|
||||||
|
userId: number;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 200 })
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text', nullable: true })
|
||||||
|
content: string | null;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, (user) => user.posts, { onDelete: 'CASCADE' })
|
||||||
|
@JoinColumn({ name: 'user_id' })
|
||||||
|
user: User;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported TypeORM Decorators
|
||||||
|
|
||||||
|
### Entity Decorators
|
||||||
|
- `@Entity()` - Define entity/table
|
||||||
|
- `@PrimaryGeneratedColumn()` - Auto-increment primary key
|
||||||
|
- `@PrimaryColumn()` - Primary key
|
||||||
|
- `@Column()` - Column definition
|
||||||
|
- `@CreateDateColumn()` - Auto-set creation timestamp
|
||||||
|
- `@UpdateDateColumn()` - Auto-update timestamp
|
||||||
|
|
||||||
|
### Relationship Decorators
|
||||||
|
- `@OneToMany()` - One-to-many relationship
|
||||||
|
- `@ManyToOne()` - Many-to-one relationship
|
||||||
|
- `@JoinColumn()` - Foreign key column specification
|
||||||
|
|
||||||
|
### Constraint Decorators
|
||||||
|
- `@Index()` - Create index
|
||||||
|
- `@Unique()` - Unique constraint
|
||||||
|
|
||||||
|
## Column Options
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Column({
|
||||||
|
type: 'varchar', // Column type
|
||||||
|
length: 255, // Length for varchar/char
|
||||||
|
nullable: true, // Allow NULL
|
||||||
|
unique: true, // Unique constraint
|
||||||
|
default: 'value', // Default value
|
||||||
|
name: 'column_name', // Database column name
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Mapping
|
||||||
|
|
||||||
|
| SQL Type | TypeScript Type | TypeORM Type |
|
||||||
|
|----------|-----------------|--------------|
|
||||||
|
| bigint | number | 'bigint' |
|
||||||
|
| integer | number | 'int' |
|
||||||
|
| varchar | string | 'varchar' |
|
||||||
|
| text | string | 'text' |
|
||||||
|
| boolean | boolean | 'boolean' |
|
||||||
|
| timestamp | Date | 'timestamp' |
|
||||||
|
| json | object | 'json' |
|
||||||
|
| uuid | string | 'uuid' |
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Entity class names are PascalCase
|
||||||
|
- One file per entity (named after the entity)
|
||||||
|
- Relationship imports are auto-generated
|
||||||
|
- Nullable columns use TypeScript union with `null`
|
||||||
|
- Foreign key actions (CASCADE, etc.) are included
|
||||||
|
- Schema names can be specified in `@Entity()` decorator
|
||||||
631
pkg/writers/typeorm/writer.go
Normal file
631
pkg/writers/typeorm/writer.go
Normal file
@@ -0,0 +1,631 @@
|
|||||||
|
package typeorm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the writers.Writer interface for TypeORM entity format
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new TypeORM writer with the given options
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase writes a Database model to TypeORM entity format
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
content := w.databaseToTypeORM(db)
|
||||||
|
|
||||||
|
if w.options.OutputPath != "" {
|
||||||
|
return os.WriteFile(w.options.OutputPath, []byte(content), 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print(content)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema writes a Schema model to TypeORM entity format
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
db := models.InitDatabase("database")
|
||||||
|
db.Schemas = []*models.Schema{schema}
|
||||||
|
|
||||||
|
return w.WriteDatabase(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable writes a Table model to TypeORM entity format
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = []*models.Table{table}
|
||||||
|
|
||||||
|
return w.WriteSchema(schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// databaseToTypeORM converts a Database to TypeORM entity format string
|
||||||
|
func (w *Writer) databaseToTypeORM(db *models.Database) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate imports
|
||||||
|
sb.WriteString(w.generateImports(db))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
// Process all schemas
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Identify join tables
|
||||||
|
joinTables := w.identifyJoinTables(schema)
|
||||||
|
|
||||||
|
// Write entities (excluding join tables)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if joinTables[table.Name] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
sb.WriteString(w.tableToEntity(table, schema, joinTables))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write view entities
|
||||||
|
for _, view := range schema.Views {
|
||||||
|
sb.WriteString(w.viewToEntity(view))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateImports generates the TypeORM import statement
|
||||||
|
func (w *Writer) generateImports(db *models.Database) string {
|
||||||
|
imports := make([]string, 0)
|
||||||
|
|
||||||
|
// Always include basic decorators
|
||||||
|
imports = append(imports, "Entity", "PrimaryGeneratedColumn", "Column")
|
||||||
|
|
||||||
|
// Check if we need relation decorators
|
||||||
|
needsManyToOne := false
|
||||||
|
needsOneToMany := false
|
||||||
|
needsManyToMany := false
|
||||||
|
needsJoinTable := false
|
||||||
|
needsCreateDate := false
|
||||||
|
needsUpdateDate := false
|
||||||
|
needsViewEntity := false
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
// Check for views
|
||||||
|
if len(schema.Views) > 0 {
|
||||||
|
needsViewEntity = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check for timestamp columns
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.Default == "now()" {
|
||||||
|
needsCreateDate = true
|
||||||
|
}
|
||||||
|
if strings.Contains(col.Comment, "auto-update") {
|
||||||
|
needsUpdateDate = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for relations
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
needsManyToOne = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OneToMany is the inverse of ManyToOne
|
||||||
|
if needsManyToOne {
|
||||||
|
needsOneToMany = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for M2M (join tables indicate M2M relations)
|
||||||
|
joinTables := make(map[string]bool)
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
jt := w.identifyJoinTables(schema)
|
||||||
|
for name := range jt {
|
||||||
|
joinTables[name] = true
|
||||||
|
needsManyToMany = true
|
||||||
|
needsJoinTable = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if needsManyToOne {
|
||||||
|
imports = append(imports, "ManyToOne")
|
||||||
|
}
|
||||||
|
if needsOneToMany {
|
||||||
|
imports = append(imports, "OneToMany")
|
||||||
|
}
|
||||||
|
if needsManyToMany {
|
||||||
|
imports = append(imports, "ManyToMany")
|
||||||
|
}
|
||||||
|
if needsJoinTable {
|
||||||
|
imports = append(imports, "JoinTable")
|
||||||
|
}
|
||||||
|
if needsCreateDate {
|
||||||
|
imports = append(imports, "CreateDateColumn")
|
||||||
|
}
|
||||||
|
if needsUpdateDate {
|
||||||
|
imports = append(imports, "UpdateDateColumn")
|
||||||
|
}
|
||||||
|
if needsViewEntity {
|
||||||
|
imports = append(imports, "ViewEntity")
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("import { %s } from 'typeorm';\n", strings.Join(imports, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// identifyJoinTables identifies tables that are join tables for M2M relations
|
||||||
|
func (w *Writer) identifyJoinTables(schema *models.Schema) map[string]bool {
|
||||||
|
joinTables := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
// Check if this is a join table:
|
||||||
|
// 1. Has exactly 2 FK constraints
|
||||||
|
// 2. Has composite PK with those 2 columns
|
||||||
|
// 3. Has no other columns except the FK columns
|
||||||
|
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
if len(fks) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if columns are only the FK columns
|
||||||
|
if len(table.Columns) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if both FK columns are part of PK
|
||||||
|
pkCols := 0
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
pkCols++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkCols == 2 {
|
||||||
|
joinTables[table.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return joinTables
|
||||||
|
}
|
||||||
|
|
||||||
|
// tableToEntity converts a Table to a TypeORM entity class
|
||||||
|
func (w *Writer) tableToEntity(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate @Entity decorator with options
|
||||||
|
entityOptions := w.buildEntityOptions(table)
|
||||||
|
sb.WriteString(fmt.Sprintf("@Entity({\n%s\n})\n", entityOptions))
|
||||||
|
|
||||||
|
// Get class name (from metadata if different from table name)
|
||||||
|
className := table.Name
|
||||||
|
if table.Metadata != nil {
|
||||||
|
if classNameVal, ok := table.Metadata["class_name"]; ok {
|
||||||
|
if classNameStr, ok := classNameVal.(string); ok {
|
||||||
|
className = classNameStr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("export class %s {\n", className))
|
||||||
|
|
||||||
|
// Collect and sort columns
|
||||||
|
columns := make([]*models.Column, 0, len(table.Columns))
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
// Skip FK columns (they'll be represented as relations)
|
||||||
|
if w.isForeignKeyColumn(col, table) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(columns, func(i, j int) bool {
|
||||||
|
// Put PK first, then alphabetical
|
||||||
|
if columns[i].IsPrimaryKey && !columns[j].IsPrimaryKey {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if !columns[i].IsPrimaryKey && columns[j].IsPrimaryKey {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
})
|
||||||
|
|
||||||
|
// Write scalar fields
|
||||||
|
for _, col := range columns {
|
||||||
|
sb.WriteString(w.columnToField(col, table))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write relation fields
|
||||||
|
sb.WriteString(w.generateRelationFields(table, schema, joinTables))
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// viewToEntity converts a View to a TypeORM @ViewEntity class
|
||||||
|
func (w *Writer) viewToEntity(view *models.View) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate @ViewEntity decorator with expression
|
||||||
|
sb.WriteString("@ViewEntity({\n")
|
||||||
|
if view.Definition != "" {
|
||||||
|
// Format the SQL expression with proper indentation
|
||||||
|
sb.WriteString(" expression: `\n")
|
||||||
|
sb.WriteString(" ")
|
||||||
|
sb.WriteString(view.Definition)
|
||||||
|
sb.WriteString("\n `,\n")
|
||||||
|
}
|
||||||
|
sb.WriteString("})\n")
|
||||||
|
|
||||||
|
// Generate class
|
||||||
|
sb.WriteString(fmt.Sprintf("export class %s {\n", view.Name))
|
||||||
|
|
||||||
|
// Generate field definitions (without decorators for view fields)
|
||||||
|
columns := make([]*models.Column, 0, len(view.Columns))
|
||||||
|
for _, col := range view.Columns {
|
||||||
|
columns = append(columns, col)
|
||||||
|
}
|
||||||
|
sort.Slice(columns, func(i, j int) bool {
|
||||||
|
return columns[i].Name < columns[j].Name
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, col := range columns {
|
||||||
|
tsType := w.sqlTypeToTypeScript(col.Type)
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s: %s;\n", col.Name, tsType))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString("}\n")
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// columnToField converts a Column to a TypeORM field
|
||||||
|
func (w *Writer) columnToField(col *models.Column, table *models.Table) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Generate decorator
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
if col.AutoIncrement {
|
||||||
|
sb.WriteString(" @PrimaryGeneratedColumn('increment')\n")
|
||||||
|
} else if col.Type == "uuid" || strings.Contains(fmt.Sprint(col.Default), "uuid") {
|
||||||
|
sb.WriteString(" @PrimaryGeneratedColumn('uuid')\n")
|
||||||
|
} else {
|
||||||
|
sb.WriteString(" @PrimaryGeneratedColumn()\n")
|
||||||
|
}
|
||||||
|
} else if col.Default == "now()" {
|
||||||
|
sb.WriteString(" @CreateDateColumn()\n")
|
||||||
|
} else if strings.Contains(col.Comment, "auto-update") {
|
||||||
|
sb.WriteString(" @UpdateDateColumn()\n")
|
||||||
|
} else {
|
||||||
|
// Regular @Column decorator
|
||||||
|
options := w.buildColumnOptions(col, table)
|
||||||
|
if options != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @Column({ %s })\n", options))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(" @Column()\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate field declaration
|
||||||
|
tsType := w.sqlTypeToTypeScript(col.Type)
|
||||||
|
nullable := ""
|
||||||
|
if !col.NotNull {
|
||||||
|
nullable = " | null"
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s: %s%s;", col.Name, tsType, nullable))
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildColumnOptions builds the options object for @Column decorator
|
||||||
|
func (w *Writer) buildColumnOptions(col *models.Column, table *models.Table) string {
|
||||||
|
options := make([]string, 0)
|
||||||
|
|
||||||
|
// Type (if not default)
|
||||||
|
if w.needsExplicitType(col.Type) {
|
||||||
|
options = append(options, fmt.Sprintf("type: '%s'", col.Type))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Nullable
|
||||||
|
if !col.NotNull {
|
||||||
|
options = append(options, "nullable: true")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique
|
||||||
|
if w.hasUniqueConstraint(col.Name, table) {
|
||||||
|
options = append(options, "unique: true")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default
|
||||||
|
if col.Default != nil && col.Default != "now()" {
|
||||||
|
defaultStr := fmt.Sprint(col.Default)
|
||||||
|
if defaultStr != "" {
|
||||||
|
options = append(options, fmt.Sprintf("default: '%s'", defaultStr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(options, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// needsExplicitType checks if a SQL type needs explicit type declaration
|
||||||
|
func (w *Writer) needsExplicitType(sqlType string) bool {
|
||||||
|
// Types that don't map cleanly to TypeScript types need explicit declaration
|
||||||
|
explicitTypes := []string{"text", "uuid", "jsonb", "bigint"}
|
||||||
|
for _, t := range explicitTypes {
|
||||||
|
if strings.Contains(sqlType, t) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasUniqueConstraint checks if a column has a unique constraint
|
||||||
|
func (w *Writer) hasUniqueConstraint(colName string, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint &&
|
||||||
|
len(constraint.Columns) == 1 &&
|
||||||
|
constraint.Columns[0] == colName {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// sqlTypeToTypeScript converts SQL types to TypeScript types
|
||||||
|
func (w *Writer) sqlTypeToTypeScript(sqlType string) string {
|
||||||
|
typeMap := map[string]string{
|
||||||
|
"text": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"character varying": "string",
|
||||||
|
"char": "string",
|
||||||
|
"uuid": "string",
|
||||||
|
"boolean": "boolean",
|
||||||
|
"bool": "boolean",
|
||||||
|
"integer": "number",
|
||||||
|
"int": "number",
|
||||||
|
"bigint": "number",
|
||||||
|
"double precision": "number",
|
||||||
|
"float": "number",
|
||||||
|
"decimal": "number",
|
||||||
|
"numeric": "number",
|
||||||
|
"timestamp": "Date",
|
||||||
|
"timestamptz": "Date",
|
||||||
|
"date": "Date",
|
||||||
|
"jsonb": "any",
|
||||||
|
"json": "any",
|
||||||
|
}
|
||||||
|
|
||||||
|
for sqlPattern, tsType := range typeMap {
|
||||||
|
if strings.Contains(strings.ToLower(sqlType), sqlPattern) {
|
||||||
|
return tsType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "any"
|
||||||
|
}
|
||||||
|
|
||||||
|
// isForeignKeyColumn checks if a column is a FK column
|
||||||
|
func (w *Writer) isForeignKeyColumn(col *models.Column, table *models.Table) bool {
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
for _, fkCol := range constraint.Columns {
|
||||||
|
if fkCol == col.Name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateRelationFields generates relation fields for a table
|
||||||
|
func (w *Writer) generateRelationFields(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
|
||||||
|
// Get all FK constraints
|
||||||
|
fks := table.GetForeignKeys()
|
||||||
|
|
||||||
|
// Generate @ManyToOne fields
|
||||||
|
for _, fk := range fks {
|
||||||
|
relatedTable := fk.ReferencedTable
|
||||||
|
fieldName := strings.ToLower(relatedTable)
|
||||||
|
|
||||||
|
// Determine if nullable
|
||||||
|
isNullable := false
|
||||||
|
for _, fkCol := range fk.Columns {
|
||||||
|
if col, exists := table.Columns[fkCol]; exists {
|
||||||
|
if !col.NotNull {
|
||||||
|
isNullable = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nullable := ""
|
||||||
|
if isNullable {
|
||||||
|
nullable = " | null"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find inverse field name if possible
|
||||||
|
inverseField := w.findInverseFieldName(table.Name, relatedTable, schema)
|
||||||
|
|
||||||
|
if inverseField != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s, %s => %s.%s)\n",
|
||||||
|
relatedTable, strings.ToLower(relatedTable), strings.ToLower(relatedTable), inverseField))
|
||||||
|
} else {
|
||||||
|
if isNullable {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s, { nullable: true })\n", relatedTable))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s)\n", relatedTable))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf(" %s: %s%s;\n", fieldName, relatedTable, nullable))
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate @OneToMany fields (inverse of FKs pointing to this table)
|
||||||
|
w.generateInverseRelations(table, schema, joinTables, &sb)
|
||||||
|
|
||||||
|
// Generate @ManyToMany fields
|
||||||
|
w.generateManyToManyRelations(table, schema, joinTables, &sb)
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// findInverseFieldName finds the inverse field name for a relation
|
||||||
|
func (w *Writer) findInverseFieldName(fromTable, toTable string, schema *models.Schema) string {
|
||||||
|
// Look for tables that have FKs pointing back to fromTable
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name != toTable {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint && constraint.ReferencedTable == fromTable {
|
||||||
|
// Found an inverse relation
|
||||||
|
// Use pluralized form of fromTable
|
||||||
|
return w.pluralize(strings.ToLower(fromTable))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateInverseRelations generates @OneToMany fields
|
||||||
|
func (w *Writer) generateInverseRelations(table *models.Table, schema *models.Schema, joinTables map[string]bool, sb *strings.Builder) {
|
||||||
|
for _, otherTable := range schema.Tables {
|
||||||
|
if otherTable.Name == table.Name || joinTables[otherTable.Name] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fk := range otherTable.GetForeignKeys() {
|
||||||
|
if fk.ReferencedTable == table.Name {
|
||||||
|
// This table is referenced by otherTable
|
||||||
|
fieldName := w.pluralize(strings.ToLower(otherTable.Name))
|
||||||
|
inverseName := strings.ToLower(table.Name)
|
||||||
|
|
||||||
|
fmt.Fprintf(sb, " @OneToMany(() => %s, %s => %s.%s)\n",
|
||||||
|
otherTable.Name, strings.ToLower(otherTable.Name), strings.ToLower(otherTable.Name), inverseName)
|
||||||
|
fmt.Fprintf(sb, " %s: %s[];\n", fieldName, otherTable.Name)
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateManyToManyRelations generates @ManyToMany fields
|
||||||
|
func (w *Writer) generateManyToManyRelations(table *models.Table, schema *models.Schema, joinTables map[string]bool, sb *strings.Builder) {
|
||||||
|
for joinTableName := range joinTables {
|
||||||
|
joinTable := w.findTable(joinTableName, schema)
|
||||||
|
if joinTable == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fks := joinTable.GetForeignKeys()
|
||||||
|
if len(fks) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this table is part of the M2M relation
|
||||||
|
var thisTableFK *models.Constraint
|
||||||
|
var otherTableFK *models.Constraint
|
||||||
|
|
||||||
|
for i, fk := range fks {
|
||||||
|
if fk.ReferencedTable == table.Name {
|
||||||
|
thisTableFK = fk
|
||||||
|
if i == 0 {
|
||||||
|
otherTableFK = fks[1]
|
||||||
|
} else {
|
||||||
|
otherTableFK = fks[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if thisTableFK == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which side owns the relation (has @JoinTable)
|
||||||
|
// We'll make the first entity alphabetically the owner
|
||||||
|
isOwner := table.Name < otherTableFK.ReferencedTable
|
||||||
|
|
||||||
|
otherTable := otherTableFK.ReferencedTable
|
||||||
|
fieldName := w.pluralize(strings.ToLower(otherTable))
|
||||||
|
inverseName := w.pluralize(strings.ToLower(table.Name))
|
||||||
|
|
||||||
|
if isOwner {
|
||||||
|
fmt.Fprintf(sb, " @ManyToMany(() => %s, %s => %s.%s)\n",
|
||||||
|
otherTable, strings.ToLower(otherTable), strings.ToLower(otherTable), inverseName)
|
||||||
|
sb.WriteString(" @JoinTable()\n")
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(sb, " @ManyToMany(() => %s, %s => %s.%s)\n",
|
||||||
|
otherTable, strings.ToLower(otherTable), strings.ToLower(otherTable), inverseName)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(sb, " %s: %s[];\n", fieldName, otherTable)
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// findTable finds a table by name in a schema
|
||||||
|
func (w *Writer) findTable(name string, schema *models.Schema) *models.Table {
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if table.Name == name {
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildEntityOptions builds the options object for @Entity decorator
|
||||||
|
func (w *Writer) buildEntityOptions(table *models.Table) string {
|
||||||
|
options := make([]string, 0)
|
||||||
|
|
||||||
|
// Always include table name
|
||||||
|
options = append(options, fmt.Sprintf(" name: \"%s\"", table.Name))
|
||||||
|
|
||||||
|
// Always include schema
|
||||||
|
options = append(options, fmt.Sprintf(" schema: \"%s\"", table.Schema))
|
||||||
|
|
||||||
|
// Database name from metadata
|
||||||
|
if table.Metadata != nil {
|
||||||
|
if database, ok := table.Metadata["database"]; ok {
|
||||||
|
if databaseStr, ok := database.(string); ok {
|
||||||
|
options = append(options, fmt.Sprintf(" database: \"%s\"", databaseStr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Engine from metadata
|
||||||
|
if engine, ok := table.Metadata["engine"]; ok {
|
||||||
|
if engineStr, ok := engine.(string); ok {
|
||||||
|
options = append(options, fmt.Sprintf(" engine: \"%s\"", engineStr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(options, ",\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// pluralize adds 's' to make a word plural (simple version)
|
||||||
|
func (w *Writer) pluralize(word string) string {
|
||||||
|
if strings.HasSuffix(word, "s") {
|
||||||
|
return word
|
||||||
|
}
|
||||||
|
return word + "s"
|
||||||
|
}
|
||||||
212
pkg/writers/yaml/README.md
Normal file
212
pkg/writers/yaml/README.md
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
# YAML Writer
|
||||||
|
|
||||||
|
Generates database schema definitions in YAML format.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The YAML Writer converts RelSpec's internal database model representation into YAML format, providing a human-readable, structured representation of the database schema.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Generates RelSpec's canonical YAML schema format
|
||||||
|
- Human-readable alternative to JSON
|
||||||
|
- Complete schema representation including:
|
||||||
|
- Databases and schemas
|
||||||
|
- Tables, columns, and data types
|
||||||
|
- Constraints (PK, FK, unique, check)
|
||||||
|
- Indexes
|
||||||
|
- Relationships
|
||||||
|
- Views and sequences
|
||||||
|
- Supports comments
|
||||||
|
- Ideal for manual editing and configuration
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
options := &writers.WriterOptions{
|
||||||
|
OutputPath: "schema.yaml",
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := yaml.NewWriter(options)
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Export PostgreSQL database to YAML
|
||||||
|
relspec --input pgsql \
|
||||||
|
--conn "postgres://localhost/mydb" \
|
||||||
|
--output yaml \
|
||||||
|
--out-file schema.yaml
|
||||||
|
|
||||||
|
# Convert GORM models to YAML
|
||||||
|
relspec --input gorm --in-file models.go --output yaml --out-file schema.yaml
|
||||||
|
|
||||||
|
# Convert JSON to YAML
|
||||||
|
relspec --input json --in-file schema.json --output yaml --out-file schema.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generated YAML Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: myapp
|
||||||
|
database_type: postgresql
|
||||||
|
source_format: pgsql
|
||||||
|
schemas:
|
||||||
|
- name: public
|
||||||
|
tables:
|
||||||
|
- name: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
auto_increment: true
|
||||||
|
sequence: 1
|
||||||
|
username:
|
||||||
|
name: username
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
type: varchar
|
||||||
|
length: 50
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
email:
|
||||||
|
name: email
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
type: varchar
|
||||||
|
length: 100
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
constraints:
|
||||||
|
pk_users:
|
||||||
|
name: pk_users
|
||||||
|
type: PRIMARY KEY
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- id
|
||||||
|
uq_users_username:
|
||||||
|
name: uq_users_username
|
||||||
|
type: UNIQUE
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- username
|
||||||
|
indexes:
|
||||||
|
idx_users_email:
|
||||||
|
name: idx_users_email
|
||||||
|
table: users
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- email
|
||||||
|
unique: false
|
||||||
|
type: btree
|
||||||
|
|
||||||
|
- name: posts
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
id:
|
||||||
|
name: id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
is_primary_key: true
|
||||||
|
sequence: 1
|
||||||
|
user_id:
|
||||||
|
name: user_id
|
||||||
|
type: bigint
|
||||||
|
not_null: true
|
||||||
|
sequence: 2
|
||||||
|
title:
|
||||||
|
name: title
|
||||||
|
type: varchar
|
||||||
|
length: 200
|
||||||
|
not_null: true
|
||||||
|
sequence: 3
|
||||||
|
content:
|
||||||
|
name: content
|
||||||
|
type: text
|
||||||
|
not_null: false
|
||||||
|
sequence: 4
|
||||||
|
constraints:
|
||||||
|
fk_posts_user_id:
|
||||||
|
name: fk_posts_user_id
|
||||||
|
type: FOREIGN KEY
|
||||||
|
table: posts
|
||||||
|
schema: public
|
||||||
|
columns:
|
||||||
|
- user_id
|
||||||
|
referenced_table: users
|
||||||
|
referenced_schema: public
|
||||||
|
referenced_columns:
|
||||||
|
- id
|
||||||
|
on_delete: CASCADE
|
||||||
|
on_update: NO ACTION
|
||||||
|
indexes:
|
||||||
|
idx_posts_user_id:
|
||||||
|
name: idx_posts_user_id
|
||||||
|
columns:
|
||||||
|
- user_id
|
||||||
|
unique: false
|
||||||
|
type: btree
|
||||||
|
views: []
|
||||||
|
sequences: []
|
||||||
|
```
|
||||||
|
|
||||||
|
## Schema Structure
|
||||||
|
|
||||||
|
The YAML format mirrors the JSON structure with human-readable syntax:
|
||||||
|
|
||||||
|
- Database level: `name`, `database_type`, `source_format`, `schemas`
|
||||||
|
- Schema level: `name`, `tables`, `views`, `sequences`
|
||||||
|
- Table level: `name`, `schema`, `columns`, `constraints`, `indexes`
|
||||||
|
- Column level: `name`, `type`, `length`, `not_null`, etc.
|
||||||
|
- Constraint level: `name`, `type`, `columns`, foreign key details
|
||||||
|
- Index level: `name`, `columns`, `unique`, `type`
|
||||||
|
|
||||||
|
## Advantages Over JSON
|
||||||
|
|
||||||
|
- More human-readable
|
||||||
|
- Easier to edit manually
|
||||||
|
- Supports comments
|
||||||
|
- Less verbose (no braces/brackets)
|
||||||
|
- Better for configuration files
|
||||||
|
- Natural indentation
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
- **Configuration** - Schema as configuration
|
||||||
|
- **Documentation** - Human-readable schema docs
|
||||||
|
- **Version Control** - Easier to read diffs
|
||||||
|
- **Manual Editing** - Easier to modify by hand
|
||||||
|
- **Code Generation** - Template-friendly format
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Output is properly indented (2 spaces)
|
||||||
|
- Preserves all schema metadata
|
||||||
|
- Can be round-tripped with YAML reader
|
||||||
|
- Compatible with YAML 1.2
|
||||||
|
- More readable than JSON for large schemas
|
||||||
|
- Ideal for documentation and manual workflows
|
||||||
60
tests/assets/bun/complex.go
Normal file
60
tests/assets/bun/complex.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ModelUser represents a user in the system
|
||||||
|
type ModelUser struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
Username string `bun:"username,notnull,type:varchar(100),unique:idx_username"`
|
||||||
|
Email string `bun:"email,notnull,type:varchar(255),unique"`
|
||||||
|
Password string `bun:"password,notnull,type:varchar(255)"`
|
||||||
|
FirstName *string `bun:"first_name,type:varchar(100)"`
|
||||||
|
LastName *string `bun:"last_name,type:varchar(100)"`
|
||||||
|
Bio *string `bun:"bio,type:text"`
|
||||||
|
IsActive bool `bun:"is_active,type:boolean"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,type:timestamp"`
|
||||||
|
|
||||||
|
Posts []*ModelPost `bun:"rel:has-many,join:id=user_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelPost represents a blog post
|
||||||
|
type ModelPost struct {
|
||||||
|
bun.BaseModel `bun:"table:posts,alias:p"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
UserID int64 `bun:"user_id,notnull,type:bigint"`
|
||||||
|
Title string `bun:"title,notnull,type:varchar(255)"`
|
||||||
|
Slug string `bun:"slug,notnull,type:varchar(255),unique:idx_slug"`
|
||||||
|
Content string `bun:"content,notnull,type:text"`
|
||||||
|
Excerpt *string `bun:"excerpt,type:text"`
|
||||||
|
Published bool `bun:"published,type:boolean"`
|
||||||
|
ViewCount int64 `bun:"view_count,type:bigint"`
|
||||||
|
PublishedAt *time.Time `bun:"published_at,type:timestamp,nullzero"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,type:timestamp"`
|
||||||
|
|
||||||
|
User *ModelUser `bun:"rel:belongs-to,join:user_id=id"`
|
||||||
|
Comments []*ModelComment `bun:"rel:has-many,join:id=post_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelComment represents a comment on a post
|
||||||
|
type ModelComment struct {
|
||||||
|
bun.BaseModel `bun:"table:comments,alias:c"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
PostID int64 `bun:"post_id,notnull,type:bigint"`
|
||||||
|
UserID *int64 `bun:"user_id,type:bigint"`
|
||||||
|
Content string `bun:"content,notnull,type:text"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at,type:timestamp"`
|
||||||
|
|
||||||
|
Post *ModelPost `bun:"rel:belongs-to,join:post_id=id"`
|
||||||
|
User *ModelUser `bun:"rel:belongs-to,join:user_id=id"`
|
||||||
|
}
|
||||||
18
tests/assets/bun/simple.go
Normal file
18
tests/assets/bun/simple.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
bun.BaseModel `bun:"table:users,alias:u"`
|
||||||
|
|
||||||
|
ID int64 `bun:"id,pk,autoincrement,type:bigint"`
|
||||||
|
Email string `bun:"email,notnull,type:varchar(255),unique"`
|
||||||
|
Name string `bun:"name,type:text"`
|
||||||
|
Age *int `bun:"age,type:integer"`
|
||||||
|
IsActive bool `bun:"is_active,type:boolean"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,type:timestamp,default:now()"`
|
||||||
|
}
|
||||||
156
tests/assets/drizzle/schema-updated.ts
Normal file
156
tests/assets/drizzle/schema-updated.ts
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
import { pgTable, pgEnum, integer, bigint, smallint, serial, bigserial, smallserial, text, varchar, char, boolean, numeric, real, doublePrecision, timestamp, date, time, interval, json, jsonb, uuid, bytea } from 'drizzle-orm/pg-core';
|
||||||
|
import { sql } from 'drizzle-orm';
|
||||||
|
|
||||||
|
|
||||||
|
// Enums
|
||||||
|
export const userRole = pgEnum('UserRole', ['admin', 'user', 'moderator', 'guest']);
|
||||||
|
export const orderStatus = pgEnum('OrderStatus', ['pending', 'processing', 'shipped', 'delivered', 'cancelled']);
|
||||||
|
|
||||||
|
|
||||||
|
// Table: users
|
||||||
|
export const users = pgTable('users', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
email: varchar('email').notNull().unique(),
|
||||||
|
isActive: boolean('is_active').notNull().default(true),
|
||||||
|
lastLoginAt: timestamp('last_login_at'),
|
||||||
|
passwordHash: varchar('password_hash').notNull(),
|
||||||
|
profile: jsonb('profile'),
|
||||||
|
role: pgEnum('UserRole')('role').notNull(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
username: varchar('username').notNull().unique(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for users
|
||||||
|
export type Users = typeof users.$inferSelect;
|
||||||
|
export type NewUsers = typeof users.$inferInsert;
|
||||||
|
// Table: profiles
|
||||||
|
export const profiles = pgTable('profiles', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
avatarUrl: varchar('avatar_url'),
|
||||||
|
bio: text('bio'),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
dateOfBirth: date('date_of_birth'),
|
||||||
|
firstName: varchar('first_name'),
|
||||||
|
lastName: varchar('last_name'),
|
||||||
|
phoneNumber: varchar('phone_number'),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
userId: integer('user_id').notNull().unique().references(() => users.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for profiles
|
||||||
|
export type Profiles = typeof profiles.$inferSelect;
|
||||||
|
export type NewProfiles = typeof profiles.$inferInsert;
|
||||||
|
// Table: posts
|
||||||
|
export const posts = pgTable('posts', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
authorId: integer('author_id').notNull().references(() => users.id),
|
||||||
|
content: text('content').notNull(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
excerpt: text('excerpt'),
|
||||||
|
featuredImage: varchar('featured_image'),
|
||||||
|
isPublished: boolean('is_published').notNull().default(false),
|
||||||
|
publishedAt: timestamp('published_at'),
|
||||||
|
slug: varchar('slug').notNull().unique(),
|
||||||
|
title: varchar('title').notNull(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
viewCount: integer('view_count').notNull().default(0),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for posts
|
||||||
|
export type Posts = typeof posts.$inferSelect;
|
||||||
|
export type NewPosts = typeof posts.$inferInsert;
|
||||||
|
// Table: comments
|
||||||
|
export const comments = pgTable('comments', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
authorId: integer('author_id').notNull().references(() => users.id),
|
||||||
|
content: text('content').notNull(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
isApproved: boolean('is_approved').notNull().default(false),
|
||||||
|
parentId: integer('parent_id').references(() => comments.id),
|
||||||
|
postId: integer('post_id').notNull().references(() => posts.id),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for comments
|
||||||
|
export type Comments = typeof comments.$inferSelect;
|
||||||
|
export type NewComments = typeof comments.$inferInsert;
|
||||||
|
// Table: categories
|
||||||
|
export const categories = pgTable('categories', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
description: text('description'),
|
||||||
|
name: varchar('name').notNull().unique(),
|
||||||
|
parentId: integer('parent_id').references(() => categories.id),
|
||||||
|
slug: varchar('slug').notNull().unique(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for categories
|
||||||
|
export type Categories = typeof categories.$inferSelect;
|
||||||
|
export type NewCategories = typeof categories.$inferInsert;
|
||||||
|
// Table: post_categories
|
||||||
|
export const postCategories = pgTable('post_categories', {
|
||||||
|
categoryId: integer('category_id').notNull().references(() => categories.id),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
postId: integer('post_id').notNull().references(() => posts.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for post_categories
|
||||||
|
export type PostCategories = typeof postCategories.$inferSelect;
|
||||||
|
export type NewPostCategories = typeof postCategories.$inferInsert;
|
||||||
|
// Table: tags
|
||||||
|
export const tags = pgTable('tags', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
name: varchar('name').notNull().unique(),
|
||||||
|
slug: varchar('slug').notNull().unique(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for tags
|
||||||
|
export type Tags = typeof tags.$inferSelect;
|
||||||
|
export type NewTags = typeof tags.$inferInsert;
|
||||||
|
// Table: post_tags
|
||||||
|
export const postTags = pgTable('post_tags', {
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
postId: integer('post_id').notNull().references(() => posts.id),
|
||||||
|
tagId: integer('tag_id').notNull().references(() => tags.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for post_tags
|
||||||
|
export type PostTags = typeof postTags.$inferSelect;
|
||||||
|
export type NewPostTags = typeof postTags.$inferInsert;
|
||||||
|
// Table: orders
|
||||||
|
export const orders = pgTable('orders', {
|
||||||
|
id: serial('id').primaryKey(),
|
||||||
|
billingAddress: jsonb('billing_address').notNull(),
|
||||||
|
completedAt: timestamp('completed_at'),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
currency: varchar('currency').notNull().default('USD'),
|
||||||
|
notes: text('notes'),
|
||||||
|
orderNumber: varchar('order_number').notNull().unique(),
|
||||||
|
shippingAddress: jsonb('shipping_address').notNull(),
|
||||||
|
status: pgEnum('OrderStatus')('status').notNull().default('pending'),
|
||||||
|
totalAmount: numeric('total_amount').notNull(),
|
||||||
|
updatedAt: timestamp('updated_at').notNull().default(sql`now()`),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for orders
|
||||||
|
export type Orders = typeof orders.$inferSelect;
|
||||||
|
export type NewOrders = typeof orders.$inferInsert;
|
||||||
|
// Table: sessions
|
||||||
|
export const sessions = pgTable('sessions', {
|
||||||
|
id: uuid('id').primaryKey().default(sql`gen_random_uuid()`),
|
||||||
|
createdAt: timestamp('created_at').notNull().default(sql`now()`),
|
||||||
|
expiresAt: timestamp('expires_at').notNull(),
|
||||||
|
ipAddress: varchar('ip_address'),
|
||||||
|
token: varchar('token').notNull().unique(),
|
||||||
|
userAgent: text('user_agent'),
|
||||||
|
userId: integer('user_id').notNull().references(() => users.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Types for sessions
|
||||||
|
export type Sessions = typeof sessions.$inferSelect;
|
||||||
|
export type NewSessions = typeof sessions.$inferInsert;
|
||||||
90
tests/assets/drizzle/schema.ts
Normal file
90
tests/assets/drizzle/schema.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
import { pgTable, pgEnum, integer, bigint, smallint, serial, bigserial, smallserial, text, varchar, char, boolean, numeric, real, doublePrecision, timestamp, date, time, interval, json, jsonb, uuid, bytea } from 'drizzle-orm/pg-core';
|
||||||
|
import { sql } from 'drizzle-orm';
|
||||||
|
|
||||||
|
|
||||||
|
// Enums
|
||||||
|
export const role = pgEnum('Role', ['USER', 'ADMIN']);
|
||||||
|
export type Role = 'USER' | 'ADMIN';
|
||||||
|
|
||||||
|
|
||||||
|
// Table: User
|
||||||
|
export interface User {
|
||||||
|
id: number;
|
||||||
|
email: string;
|
||||||
|
name: string | null;
|
||||||
|
profile: string | null;
|
||||||
|
role: Role;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const user = pgTable('User', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
email: text('email').notNull().unique(),
|
||||||
|
name: text('name'),
|
||||||
|
profile: text('profile'),
|
||||||
|
role: pgEnum('Role')('role').notNull().default('USER'),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewUser = typeof user.$inferInsert;
|
||||||
|
// Table: Profile
|
||||||
|
export interface Profile {
|
||||||
|
id: number;
|
||||||
|
bio: string;
|
||||||
|
user: string;
|
||||||
|
userId: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const profile = pgTable('Profile', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
bio: text('bio').notNull(),
|
||||||
|
user: text('user').notNull(),
|
||||||
|
userId: integer('userId').notNull().unique().references(() => user.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewProfile = typeof profile.$inferInsert;
|
||||||
|
// Table: Post
|
||||||
|
export interface Post {
|
||||||
|
id: number;
|
||||||
|
author: string;
|
||||||
|
authorId: number;
|
||||||
|
createdAt: Date;
|
||||||
|
published: boolean;
|
||||||
|
title: string;
|
||||||
|
updatedAt: Date; // @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
export const post = pgTable('Post', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
author: text('author').notNull(),
|
||||||
|
authorId: integer('authorId').notNull().references(() => user.id),
|
||||||
|
createdAt: timestamp('createdAt').notNull().default(sql`now()`),
|
||||||
|
published: boolean('published').notNull().default(false),
|
||||||
|
title: text('title').notNull(),
|
||||||
|
updatedAt: timestamp('updatedAt').notNull(), // @updatedAt
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewPost = typeof post.$inferInsert;
|
||||||
|
// Table: Category
|
||||||
|
export interface Category {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const category = pgTable('Category', {
|
||||||
|
id: integer('id').primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
name: text('name').notNull(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewCategory = typeof category.$inferInsert;
|
||||||
|
// Table: _CategoryToPost
|
||||||
|
export interface Categorytopost {
|
||||||
|
categoryId: number;
|
||||||
|
postId: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const Categorytopost = pgTable('_CategoryToPost', {
|
||||||
|
categoryId: integer('CategoryId').primaryKey().references(() => category.id),
|
||||||
|
postId: integer('PostId').primaryKey().references(() => post.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type NewCategorytopost = typeof Categorytopost.$inferInsert;
|
||||||
65
tests/assets/gorm/complex.go
Normal file
65
tests/assets/gorm/complex.go
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ModelUser represents a user in the system
|
||||||
|
type ModelUser struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
Username string `gorm:"column:username;type:varchar(100);not null;uniqueIndex:idx_username"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(255);not null;uniqueIndex"`
|
||||||
|
Password string `gorm:"column:password;type:varchar(255);not null"`
|
||||||
|
FirstName *string `gorm:"column:first_name;type:varchar(100)"`
|
||||||
|
LastName *string `gorm:"column:last_name;type:varchar(100)"`
|
||||||
|
Bio *string `gorm:"column:bio;type:text"`
|
||||||
|
IsActive bool `gorm:"column:is_active;type:boolean;default:true"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
UpdatedAt time.Time `gorm:"column:updated_at;type:timestamp;default:now()"`
|
||||||
|
|
||||||
|
Posts []*ModelPost `gorm:"foreignKey:UserID;association_foreignkey:ID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
|
||||||
|
Comments []*ModelComment `gorm:"foreignKey:UserID;association_foreignkey:ID;constraint:OnDelete:SET NULL"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelUser) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelPost represents a blog post
|
||||||
|
type ModelPost struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
UserID int64 `gorm:"column:user_id;type:bigint;not null;index:idx_user_id"`
|
||||||
|
Title string `gorm:"column:title;type:varchar(255);not null"`
|
||||||
|
Slug string `gorm:"column:slug;type:varchar(255);not null;uniqueIndex:idx_slug"`
|
||||||
|
Content string `gorm:"column:content;type:text;not null"`
|
||||||
|
Excerpt *string `gorm:"column:excerpt;type:text"`
|
||||||
|
Published bool `gorm:"column:published;type:boolean;default:false"`
|
||||||
|
ViewCount int64 `gorm:"column:view_count;type:bigint;default:0"`
|
||||||
|
PublishedAt *time.Time `gorm:"column:published_at;type:timestamp"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
UpdatedAt time.Time `gorm:"column:updated_at;type:timestamp;default:now()"`
|
||||||
|
|
||||||
|
User *ModelUser `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
|
||||||
|
Comments []*ModelComment `gorm:"foreignKey:PostID;association_foreignkey:ID;constraint:OnDelete:CASCADE"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelPost) TableName() string {
|
||||||
|
return "posts"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelComment represents a comment on a post
|
||||||
|
type ModelComment struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
PostID int64 `gorm:"column:post_id;type:bigint;not null;index:idx_post_id"`
|
||||||
|
UserID *int64 `gorm:"column:user_id;type:bigint;index:idx_user_id"`
|
||||||
|
Content string `gorm:"column:content;type:text;not null"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
UpdatedAt time.Time `gorm:"column:updated_at;type:timestamp;default:now()"`
|
||||||
|
|
||||||
|
Post *ModelPost `gorm:"foreignKey:PostID;references:ID;constraint:OnDelete:CASCADE"`
|
||||||
|
User *ModelUser `gorm:"foreignKey:UserID;references:ID;constraint:OnDelete:SET NULL"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ModelComment) TableName() string {
|
||||||
|
return "comments"
|
||||||
|
}
|
||||||
18
tests/assets/gorm/simple.go
Normal file
18
tests/assets/gorm/simple.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type User struct {
|
||||||
|
ID int64 `gorm:"column:id;primaryKey;autoIncrement;type:bigint"`
|
||||||
|
Email string `gorm:"column:email;type:varchar(255);not null"`
|
||||||
|
Name string `gorm:"column:name;type:text"`
|
||||||
|
Age *int `gorm:"column:age;type:integer"`
|
||||||
|
IsActive bool `gorm:"column:is_active;type:boolean"`
|
||||||
|
CreatedAt time.Time `gorm:"column:created_at;type:timestamp;default:now()"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (User) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
46
tests/assets/graphql/complex.graphql
Normal file
46
tests/assets/graphql/complex.graphql
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# Complex GraphQL schema with multiple features
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
USER
|
||||||
|
ADMIN
|
||||||
|
MODERATOR
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
role: Role!
|
||||||
|
createdAt: DateTime!
|
||||||
|
posts: [Post!]!
|
||||||
|
profile: Profile
|
||||||
|
}
|
||||||
|
|
||||||
|
type Profile {
|
||||||
|
id: ID!
|
||||||
|
bio: String
|
||||||
|
avatar: String
|
||||||
|
metadata: JSON
|
||||||
|
user: User!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
slug: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
publishedAt: Date
|
||||||
|
author: User!
|
||||||
|
tags: [Tag!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag {
|
||||||
|
id: ID!
|
||||||
|
name: String!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
13
tests/assets/graphql/custom_scalars.graphql
Normal file
13
tests/assets/graphql/custom_scalars.graphql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# GraphQL schema with custom scalars
|
||||||
|
|
||||||
|
scalar DateTime
|
||||||
|
scalar JSON
|
||||||
|
scalar Date
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
createdAt: DateTime!
|
||||||
|
metadata: JSON
|
||||||
|
birthDate: Date
|
||||||
|
}
|
||||||
13
tests/assets/graphql/enums.graphql
Normal file
13
tests/assets/graphql/enums.graphql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# GraphQL schema with enums
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
GUEST
|
||||||
|
}
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
role: Role!
|
||||||
|
}
|
||||||
16
tests/assets/graphql/relations.graphql
Normal file
16
tests/assets/graphql/relations.graphql
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# GraphQL schema with relationships
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String!
|
||||||
|
posts: [Post!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Post {
|
||||||
|
id: ID!
|
||||||
|
title: String!
|
||||||
|
content: String
|
||||||
|
published: Boolean!
|
||||||
|
author: User!
|
||||||
|
}
|
||||||
9
tests/assets/graphql/simple.graphql
Normal file
9
tests/assets/graphql/simple.graphql
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Simple GraphQL schema for testing basic type parsing
|
||||||
|
|
||||||
|
type User {
|
||||||
|
id: ID!
|
||||||
|
email: String!
|
||||||
|
name: String
|
||||||
|
age: Int
|
||||||
|
active: Boolean!
|
||||||
|
}
|
||||||
46
tests/assets/prisma/example.prisma
Normal file
46
tests/assets/prisma/example.prisma
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
}
|
||||||
|
|
||||||
|
generator client {
|
||||||
|
provider = "prisma-client"
|
||||||
|
output = "./generated"
|
||||||
|
}
|
||||||
|
|
||||||
|
model User {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
email String @unique
|
||||||
|
name String?
|
||||||
|
role Role @default(USER)
|
||||||
|
posts Post[]
|
||||||
|
profile Profile?
|
||||||
|
}
|
||||||
|
|
||||||
|
model Profile {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
bio String
|
||||||
|
user User @relation(fields: [userId], references: [id])
|
||||||
|
userId Int @unique
|
||||||
|
}
|
||||||
|
|
||||||
|
model Post {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
title String
|
||||||
|
published Boolean @default(false)
|
||||||
|
author User @relation(fields: [authorId], references: [id])
|
||||||
|
authorId Int
|
||||||
|
categories Category[]
|
||||||
|
}
|
||||||
|
|
||||||
|
model Category {
|
||||||
|
id Int @id @default(autoincrement())
|
||||||
|
name String
|
||||||
|
posts Post[]
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Role {
|
||||||
|
USER
|
||||||
|
ADMIN
|
||||||
|
}
|
||||||
115
tests/assets/typeorm/example.ts
Normal file
115
tests/assets/typeorm/example.ts
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
//@ts-nocheck
|
||||||
|
import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, OneToMany, ManyToMany, JoinTable, CreateDateColumn, UpdateDateColumn } from 'typeorm';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class User {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column({ unique: true })
|
||||||
|
email: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@UpdateDateColumn()
|
||||||
|
updatedAt: Date;
|
||||||
|
|
||||||
|
@OneToMany(() => Project, project => project.owner)
|
||||||
|
ownedProjects: Project[];
|
||||||
|
|
||||||
|
@ManyToMany(() => Project, project => project.members)
|
||||||
|
@JoinTable()
|
||||||
|
projects: Project[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Project {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ nullable: true })
|
||||||
|
description: string;
|
||||||
|
|
||||||
|
@Column({ default: 'active' })
|
||||||
|
status: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, user => user.ownedProjects)
|
||||||
|
owner: User;
|
||||||
|
|
||||||
|
@ManyToMany(() => User, user => user.projects)
|
||||||
|
members: User[];
|
||||||
|
|
||||||
|
@OneToMany(() => Task, task => task.project)
|
||||||
|
tasks: Task[];
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Task {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
title: string;
|
||||||
|
|
||||||
|
@Column({ type: 'text', nullable: true })
|
||||||
|
description: string;
|
||||||
|
|
||||||
|
@Column({ default: 'todo' })
|
||||||
|
status: string;
|
||||||
|
|
||||||
|
@Column({ nullable: true })
|
||||||
|
dueDate: Date;
|
||||||
|
|
||||||
|
@ManyToOne(() => Project, project => project.tasks)
|
||||||
|
project: Project;
|
||||||
|
|
||||||
|
@ManyToOne(() => User, { nullable: true })
|
||||||
|
assignee: User;
|
||||||
|
|
||||||
|
@OneToMany(() => Comment, comment => comment.task)
|
||||||
|
comments: Comment[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Comment {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column('text')
|
||||||
|
content: string;
|
||||||
|
|
||||||
|
@ManyToOne(() => Task, task => task.comments)
|
||||||
|
task: Task;
|
||||||
|
|
||||||
|
@ManyToOne(() => User)
|
||||||
|
author: User;
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class Tag {
|
||||||
|
@PrimaryGeneratedColumn('uuid')
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
@Column({ unique: true })
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
color: string;
|
||||||
|
|
||||||
|
@ManyToMany(() => Task)
|
||||||
|
@JoinTable()
|
||||||
|
tasks: Task[];
|
||||||
|
}
|
||||||
15
vendor/github.com/davecgh/go-spew/LICENSE
generated
vendored
Normal file
15
vendor/github.com/davecgh/go-spew/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
ISC License
|
||||||
|
|
||||||
|
Copyright (c) 2012-2016 Dave Collins <dave@davec.name>
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
145
vendor/github.com/davecgh/go-spew/spew/bypass.go
generated
vendored
Normal file
145
vendor/github.com/davecgh/go-spew/spew/bypass.go
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
// Copyright (c) 2015-2016 Dave Collins <dave@davec.name>
|
||||||
|
//
|
||||||
|
// Permission to use, copy, modify, and distribute this software for any
|
||||||
|
// purpose with or without fee is hereby granted, provided that the above
|
||||||
|
// copyright notice and this permission notice appear in all copies.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
// NOTE: Due to the following build constraints, this file will only be compiled
|
||||||
|
// when the code is not running on Google App Engine, compiled by GopherJS, and
|
||||||
|
// "-tags safe" is not added to the go build command line. The "disableunsafe"
|
||||||
|
// tag is deprecated and thus should not be used.
|
||||||
|
// Go versions prior to 1.4 are disabled because they use a different layout
|
||||||
|
// for interfaces which make the implementation of unsafeReflectValue more complex.
|
||||||
|
// +build !js,!appengine,!safe,!disableunsafe,go1.4
|
||||||
|
|
||||||
|
package spew
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// UnsafeDisabled is a build-time constant which specifies whether or
|
||||||
|
// not access to the unsafe package is available.
|
||||||
|
UnsafeDisabled = false
|
||||||
|
|
||||||
|
// ptrSize is the size of a pointer on the current arch.
|
||||||
|
ptrSize = unsafe.Sizeof((*byte)(nil))
|
||||||
|
)
|
||||||
|
|
||||||
|
type flag uintptr
|
||||||
|
|
||||||
|
var (
|
||||||
|
// flagRO indicates whether the value field of a reflect.Value
|
||||||
|
// is read-only.
|
||||||
|
flagRO flag
|
||||||
|
|
||||||
|
// flagAddr indicates whether the address of the reflect.Value's
|
||||||
|
// value may be taken.
|
||||||
|
flagAddr flag
|
||||||
|
)
|
||||||
|
|
||||||
|
// flagKindMask holds the bits that make up the kind
|
||||||
|
// part of the flags field. In all the supported versions,
|
||||||
|
// it is in the lower 5 bits.
|
||||||
|
const flagKindMask = flag(0x1f)
|
||||||
|
|
||||||
|
// Different versions of Go have used different
|
||||||
|
// bit layouts for the flags type. This table
|
||||||
|
// records the known combinations.
|
||||||
|
var okFlags = []struct {
|
||||||
|
ro, addr flag
|
||||||
|
}{{
|
||||||
|
// From Go 1.4 to 1.5
|
||||||
|
ro: 1 << 5,
|
||||||
|
addr: 1 << 7,
|
||||||
|
}, {
|
||||||
|
// Up to Go tip.
|
||||||
|
ro: 1<<5 | 1<<6,
|
||||||
|
addr: 1 << 8,
|
||||||
|
}}
|
||||||
|
|
||||||
|
var flagValOffset = func() uintptr {
|
||||||
|
field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag")
|
||||||
|
if !ok {
|
||||||
|
panic("reflect.Value has no flag field")
|
||||||
|
}
|
||||||
|
return field.Offset
|
||||||
|
}()
|
||||||
|
|
||||||
|
// flagField returns a pointer to the flag field of a reflect.Value.
|
||||||
|
func flagField(v *reflect.Value) *flag {
|
||||||
|
return (*flag)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + flagValOffset))
|
||||||
|
}
|
||||||
|
|
||||||
|
// unsafeReflectValue converts the passed reflect.Value into a one that bypasses
|
||||||
|
// the typical safety restrictions preventing access to unaddressable and
|
||||||
|
// unexported data. It works by digging the raw pointer to the underlying
|
||||||
|
// value out of the protected value and generating a new unprotected (unsafe)
|
||||||
|
// reflect.Value to it.
|
||||||
|
//
|
||||||
|
// This allows us to check for implementations of the Stringer and error
|
||||||
|
// interfaces to be used for pretty printing ordinarily unaddressable and
|
||||||
|
// inaccessible values such as unexported struct fields.
|
||||||
|
func unsafeReflectValue(v reflect.Value) reflect.Value {
|
||||||
|
if !v.IsValid() || (v.CanInterface() && v.CanAddr()) {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
flagFieldPtr := flagField(&v)
|
||||||
|
*flagFieldPtr &^= flagRO
|
||||||
|
*flagFieldPtr |= flagAddr
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanity checks against future reflect package changes
|
||||||
|
// to the type or semantics of the Value.flag field.
|
||||||
|
func init() {
|
||||||
|
field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag")
|
||||||
|
if !ok {
|
||||||
|
panic("reflect.Value has no flag field")
|
||||||
|
}
|
||||||
|
if field.Type.Kind() != reflect.TypeOf(flag(0)).Kind() {
|
||||||
|
panic("reflect.Value flag field has changed kind")
|
||||||
|
}
|
||||||
|
type t0 int
|
||||||
|
var t struct {
|
||||||
|
A t0
|
||||||
|
// t0 will have flagEmbedRO set.
|
||||||
|
t0
|
||||||
|
// a will have flagStickyRO set
|
||||||
|
a t0
|
||||||
|
}
|
||||||
|
vA := reflect.ValueOf(t).FieldByName("A")
|
||||||
|
va := reflect.ValueOf(t).FieldByName("a")
|
||||||
|
vt0 := reflect.ValueOf(t).FieldByName("t0")
|
||||||
|
|
||||||
|
// Infer flagRO from the difference between the flags
|
||||||
|
// for the (otherwise identical) fields in t.
|
||||||
|
flagPublic := *flagField(&vA)
|
||||||
|
flagWithRO := *flagField(&va) | *flagField(&vt0)
|
||||||
|
flagRO = flagPublic ^ flagWithRO
|
||||||
|
|
||||||
|
// Infer flagAddr from the difference between a value
|
||||||
|
// taken from a pointer and not.
|
||||||
|
vPtrA := reflect.ValueOf(&t).Elem().FieldByName("A")
|
||||||
|
flagNoPtr := *flagField(&vA)
|
||||||
|
flagPtr := *flagField(&vPtrA)
|
||||||
|
flagAddr = flagNoPtr ^ flagPtr
|
||||||
|
|
||||||
|
// Check that the inferred flags tally with one of the known versions.
|
||||||
|
for _, f := range okFlags {
|
||||||
|
if flagRO == f.ro && flagAddr == f.addr {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic("reflect.Value read-only flag has changed semantics")
|
||||||
|
}
|
||||||
38
vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
generated
vendored
Normal file
38
vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// Copyright (c) 2015-2016 Dave Collins <dave@davec.name>
|
||||||
|
//
|
||||||
|
// Permission to use, copy, modify, and distribute this software for any
|
||||||
|
// purpose with or without fee is hereby granted, provided that the above
|
||||||
|
// copyright notice and this permission notice appear in all copies.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
// NOTE: Due to the following build constraints, this file will only be compiled
|
||||||
|
// when the code is running on Google App Engine, compiled by GopherJS, or
|
||||||
|
// "-tags safe" is added to the go build command line. The "disableunsafe"
|
||||||
|
// tag is deprecated and thus should not be used.
|
||||||
|
// +build js appengine safe disableunsafe !go1.4
|
||||||
|
|
||||||
|
package spew
|
||||||
|
|
||||||
|
import "reflect"
|
||||||
|
|
||||||
|
const (
|
||||||
|
// UnsafeDisabled is a build-time constant which specifies whether or
|
||||||
|
// not access to the unsafe package is available.
|
||||||
|
UnsafeDisabled = true
|
||||||
|
)
|
||||||
|
|
||||||
|
// unsafeReflectValue typically converts the passed reflect.Value into a one
|
||||||
|
// that bypasses the typical safety restrictions preventing access to
|
||||||
|
// unaddressable and unexported data. However, doing this relies on access to
|
||||||
|
// the unsafe package. This is a stub version which simply returns the passed
|
||||||
|
// reflect.Value when the unsafe package is not available.
|
||||||
|
func unsafeReflectValue(v reflect.Value) reflect.Value {
|
||||||
|
return v
|
||||||
|
}
|
||||||
341
vendor/github.com/davecgh/go-spew/spew/common.go
generated
vendored
Normal file
341
vendor/github.com/davecgh/go-spew/spew/common.go
generated
vendored
Normal file
@@ -0,0 +1,341 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013-2016 Dave Collins <dave@davec.name>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package spew
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Some constants in the form of bytes to avoid string overhead. This mirrors
|
||||||
|
// the technique used in the fmt package.
|
||||||
|
var (
|
||||||
|
panicBytes = []byte("(PANIC=")
|
||||||
|
plusBytes = []byte("+")
|
||||||
|
iBytes = []byte("i")
|
||||||
|
trueBytes = []byte("true")
|
||||||
|
falseBytes = []byte("false")
|
||||||
|
interfaceBytes = []byte("(interface {})")
|
||||||
|
commaNewlineBytes = []byte(",\n")
|
||||||
|
newlineBytes = []byte("\n")
|
||||||
|
openBraceBytes = []byte("{")
|
||||||
|
openBraceNewlineBytes = []byte("{\n")
|
||||||
|
closeBraceBytes = []byte("}")
|
||||||
|
asteriskBytes = []byte("*")
|
||||||
|
colonBytes = []byte(":")
|
||||||
|
colonSpaceBytes = []byte(": ")
|
||||||
|
openParenBytes = []byte("(")
|
||||||
|
closeParenBytes = []byte(")")
|
||||||
|
spaceBytes = []byte(" ")
|
||||||
|
pointerChainBytes = []byte("->")
|
||||||
|
nilAngleBytes = []byte("<nil>")
|
||||||
|
maxNewlineBytes = []byte("<max depth reached>\n")
|
||||||
|
maxShortBytes = []byte("<max>")
|
||||||
|
circularBytes = []byte("<already shown>")
|
||||||
|
circularShortBytes = []byte("<shown>")
|
||||||
|
invalidAngleBytes = []byte("<invalid>")
|
||||||
|
openBracketBytes = []byte("[")
|
||||||
|
closeBracketBytes = []byte("]")
|
||||||
|
percentBytes = []byte("%")
|
||||||
|
precisionBytes = []byte(".")
|
||||||
|
openAngleBytes = []byte("<")
|
||||||
|
closeAngleBytes = []byte(">")
|
||||||
|
openMapBytes = []byte("map[")
|
||||||
|
closeMapBytes = []byte("]")
|
||||||
|
lenEqualsBytes = []byte("len=")
|
||||||
|
capEqualsBytes = []byte("cap=")
|
||||||
|
)
|
||||||
|
|
||||||
|
// hexDigits is used to map a decimal value to a hex digit.
|
||||||
|
var hexDigits = "0123456789abcdef"
|
||||||
|
|
||||||
|
// catchPanic handles any panics that might occur during the handleMethods
|
||||||
|
// calls.
|
||||||
|
func catchPanic(w io.Writer, v reflect.Value) {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
w.Write(panicBytes)
|
||||||
|
fmt.Fprintf(w, "%v", err)
|
||||||
|
w.Write(closeParenBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleMethods attempts to call the Error and String methods on the underlying
|
||||||
|
// type the passed reflect.Value represents and outputes the result to Writer w.
|
||||||
|
//
|
||||||
|
// It handles panics in any called methods by catching and displaying the error
|
||||||
|
// as the formatted value.
|
||||||
|
func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool) {
|
||||||
|
// We need an interface to check if the type implements the error or
|
||||||
|
// Stringer interface. However, the reflect package won't give us an
|
||||||
|
// interface on certain things like unexported struct fields in order
|
||||||
|
// to enforce visibility rules. We use unsafe, when it's available,
|
||||||
|
// to bypass these restrictions since this package does not mutate the
|
||||||
|
// values.
|
||||||
|
if !v.CanInterface() {
|
||||||
|
if UnsafeDisabled {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
v = unsafeReflectValue(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Choose whether or not to do error and Stringer interface lookups against
|
||||||
|
// the base type or a pointer to the base type depending on settings.
|
||||||
|
// Technically calling one of these methods with a pointer receiver can
|
||||||
|
// mutate the value, however, types which choose to satisify an error or
|
||||||
|
// Stringer interface with a pointer receiver should not be mutating their
|
||||||
|
// state inside these interface methods.
|
||||||
|
if !cs.DisablePointerMethods && !UnsafeDisabled && !v.CanAddr() {
|
||||||
|
v = unsafeReflectValue(v)
|
||||||
|
}
|
||||||
|
if v.CanAddr() {
|
||||||
|
v = v.Addr()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is it an error or Stringer?
|
||||||
|
switch iface := v.Interface().(type) {
|
||||||
|
case error:
|
||||||
|
defer catchPanic(w, v)
|
||||||
|
if cs.ContinueOnMethod {
|
||||||
|
w.Write(openParenBytes)
|
||||||
|
w.Write([]byte(iface.Error()))
|
||||||
|
w.Write(closeParenBytes)
|
||||||
|
w.Write(spaceBytes)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Write([]byte(iface.Error()))
|
||||||
|
return true
|
||||||
|
|
||||||
|
case fmt.Stringer:
|
||||||
|
defer catchPanic(w, v)
|
||||||
|
if cs.ContinueOnMethod {
|
||||||
|
w.Write(openParenBytes)
|
||||||
|
w.Write([]byte(iface.String()))
|
||||||
|
w.Write(closeParenBytes)
|
||||||
|
w.Write(spaceBytes)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
w.Write([]byte(iface.String()))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// printBool outputs a boolean value as true or false to Writer w.
|
||||||
|
func printBool(w io.Writer, val bool) {
|
||||||
|
if val {
|
||||||
|
w.Write(trueBytes)
|
||||||
|
} else {
|
||||||
|
w.Write(falseBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// printInt outputs a signed integer value to Writer w.
|
||||||
|
func printInt(w io.Writer, val int64, base int) {
|
||||||
|
w.Write([]byte(strconv.FormatInt(val, base)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// printUint outputs an unsigned integer value to Writer w.
|
||||||
|
func printUint(w io.Writer, val uint64, base int) {
|
||||||
|
w.Write([]byte(strconv.FormatUint(val, base)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// printFloat outputs a floating point value using the specified precision,
|
||||||
|
// which is expected to be 32 or 64bit, to Writer w.
|
||||||
|
func printFloat(w io.Writer, val float64, precision int) {
|
||||||
|
w.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// printComplex outputs a complex value using the specified float precision
|
||||||
|
// for the real and imaginary parts to Writer w.
|
||||||
|
func printComplex(w io.Writer, c complex128, floatPrecision int) {
|
||||||
|
r := real(c)
|
||||||
|
w.Write(openParenBytes)
|
||||||
|
w.Write([]byte(strconv.FormatFloat(r, 'g', -1, floatPrecision)))
|
||||||
|
i := imag(c)
|
||||||
|
if i >= 0 {
|
||||||
|
w.Write(plusBytes)
|
||||||
|
}
|
||||||
|
w.Write([]byte(strconv.FormatFloat(i, 'g', -1, floatPrecision)))
|
||||||
|
w.Write(iBytes)
|
||||||
|
w.Write(closeParenBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// printHexPtr outputs a uintptr formatted as hexadecimal with a leading '0x'
|
||||||
|
// prefix to Writer w.
|
||||||
|
func printHexPtr(w io.Writer, p uintptr) {
|
||||||
|
// Null pointer.
|
||||||
|
num := uint64(p)
|
||||||
|
if num == 0 {
|
||||||
|
w.Write(nilAngleBytes)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Max uint64 is 16 bytes in hex + 2 bytes for '0x' prefix
|
||||||
|
buf := make([]byte, 18)
|
||||||
|
|
||||||
|
// It's simpler to construct the hex string right to left.
|
||||||
|
base := uint64(16)
|
||||||
|
i := len(buf) - 1
|
||||||
|
for num >= base {
|
||||||
|
buf[i] = hexDigits[num%base]
|
||||||
|
num /= base
|
||||||
|
i--
|
||||||
|
}
|
||||||
|
buf[i] = hexDigits[num]
|
||||||
|
|
||||||
|
// Add '0x' prefix.
|
||||||
|
i--
|
||||||
|
buf[i] = 'x'
|
||||||
|
i--
|
||||||
|
buf[i] = '0'
|
||||||
|
|
||||||
|
// Strip unused leading bytes.
|
||||||
|
buf = buf[i:]
|
||||||
|
w.Write(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// valuesSorter implements sort.Interface to allow a slice of reflect.Value
|
||||||
|
// elements to be sorted.
|
||||||
|
type valuesSorter struct {
|
||||||
|
values []reflect.Value
|
||||||
|
strings []string // either nil or same len and values
|
||||||
|
cs *ConfigState
|
||||||
|
}
|
||||||
|
|
||||||
|
// newValuesSorter initializes a valuesSorter instance, which holds a set of
|
||||||
|
// surrogate keys on which the data should be sorted. It uses flags in
|
||||||
|
// ConfigState to decide if and how to populate those surrogate keys.
|
||||||
|
func newValuesSorter(values []reflect.Value, cs *ConfigState) sort.Interface {
|
||||||
|
vs := &valuesSorter{values: values, cs: cs}
|
||||||
|
if canSortSimply(vs.values[0].Kind()) {
|
||||||
|
return vs
|
||||||
|
}
|
||||||
|
if !cs.DisableMethods {
|
||||||
|
vs.strings = make([]string, len(values))
|
||||||
|
for i := range vs.values {
|
||||||
|
b := bytes.Buffer{}
|
||||||
|
if !handleMethods(cs, &b, vs.values[i]) {
|
||||||
|
vs.strings = nil
|
||||||
|
break
|
||||||
|
}
|
||||||
|
vs.strings[i] = b.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if vs.strings == nil && cs.SpewKeys {
|
||||||
|
vs.strings = make([]string, len(values))
|
||||||
|
for i := range vs.values {
|
||||||
|
vs.strings[i] = Sprintf("%#v", vs.values[i].Interface())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return vs
|
||||||
|
}
|
||||||
|
|
||||||
|
// canSortSimply tests whether a reflect.Kind is a primitive that can be sorted
|
||||||
|
// directly, or whether it should be considered for sorting by surrogate keys
|
||||||
|
// (if the ConfigState allows it).
|
||||||
|
func canSortSimply(kind reflect.Kind) bool {
|
||||||
|
// This switch parallels valueSortLess, except for the default case.
|
||||||
|
switch kind {
|
||||||
|
case reflect.Bool:
|
||||||
|
return true
|
||||||
|
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
|
||||||
|
return true
|
||||||
|
case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
|
||||||
|
return true
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return true
|
||||||
|
case reflect.String:
|
||||||
|
return true
|
||||||
|
case reflect.Uintptr:
|
||||||
|
return true
|
||||||
|
case reflect.Array:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Len returns the number of values in the slice. It is part of the
|
||||||
|
// sort.Interface implementation.
|
||||||
|
func (s *valuesSorter) Len() int {
|
||||||
|
return len(s.values)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Swap swaps the values at the passed indices. It is part of the
|
||||||
|
// sort.Interface implementation.
|
||||||
|
func (s *valuesSorter) Swap(i, j int) {
|
||||||
|
s.values[i], s.values[j] = s.values[j], s.values[i]
|
||||||
|
if s.strings != nil {
|
||||||
|
s.strings[i], s.strings[j] = s.strings[j], s.strings[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// valueSortLess returns whether the first value should sort before the second
|
||||||
|
// value. It is used by valueSorter.Less as part of the sort.Interface
|
||||||
|
// implementation.
|
||||||
|
func valueSortLess(a, b reflect.Value) bool {
|
||||||
|
switch a.Kind() {
|
||||||
|
case reflect.Bool:
|
||||||
|
return !a.Bool() && b.Bool()
|
||||||
|
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
|
||||||
|
return a.Int() < b.Int()
|
||||||
|
case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
|
||||||
|
return a.Uint() < b.Uint()
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return a.Float() < b.Float()
|
||||||
|
case reflect.String:
|
||||||
|
return a.String() < b.String()
|
||||||
|
case reflect.Uintptr:
|
||||||
|
return a.Uint() < b.Uint()
|
||||||
|
case reflect.Array:
|
||||||
|
// Compare the contents of both arrays.
|
||||||
|
l := a.Len()
|
||||||
|
for i := 0; i < l; i++ {
|
||||||
|
av := a.Index(i)
|
||||||
|
bv := b.Index(i)
|
||||||
|
if av.Interface() == bv.Interface() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return valueSortLess(av, bv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return a.String() < b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Less returns whether the value at index i should sort before the
|
||||||
|
// value at index j. It is part of the sort.Interface implementation.
|
||||||
|
func (s *valuesSorter) Less(i, j int) bool {
|
||||||
|
if s.strings == nil {
|
||||||
|
return valueSortLess(s.values[i], s.values[j])
|
||||||
|
}
|
||||||
|
return s.strings[i] < s.strings[j]
|
||||||
|
}
|
||||||
|
|
||||||
|
// sortValues is a sort function that handles both native types and any type that
|
||||||
|
// can be converted to error or Stringer. Other inputs are sorted according to
|
||||||
|
// their Value.String() value to ensure display stability.
|
||||||
|
func sortValues(values []reflect.Value, cs *ConfigState) {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sort.Sort(newValuesSorter(values, cs))
|
||||||
|
}
|
||||||
306
vendor/github.com/davecgh/go-spew/spew/config.go
generated
vendored
Normal file
306
vendor/github.com/davecgh/go-spew/spew/config.go
generated
vendored
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013-2016 Dave Collins <dave@davec.name>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package spew
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConfigState houses the configuration options used by spew to format and
|
||||||
|
// display values. There is a global instance, Config, that is used to control
|
||||||
|
// all top-level Formatter and Dump functionality. Each ConfigState instance
|
||||||
|
// provides methods equivalent to the top-level functions.
|
||||||
|
//
|
||||||
|
// The zero value for ConfigState provides no indentation. You would typically
|
||||||
|
// want to set it to a space or a tab.
|
||||||
|
//
|
||||||
|
// Alternatively, you can use NewDefaultConfig to get a ConfigState instance
|
||||||
|
// with default settings. See the documentation of NewDefaultConfig for default
|
||||||
|
// values.
|
||||||
|
type ConfigState struct {
|
||||||
|
// Indent specifies the string to use for each indentation level. The
|
||||||
|
// global config instance that all top-level functions use set this to a
|
||||||
|
// single space by default. If you would like more indentation, you might
|
||||||
|
// set this to a tab with "\t" or perhaps two spaces with " ".
|
||||||
|
Indent string
|
||||||
|
|
||||||
|
// MaxDepth controls the maximum number of levels to descend into nested
|
||||||
|
// data structures. The default, 0, means there is no limit.
|
||||||
|
//
|
||||||
|
// NOTE: Circular data structures are properly detected, so it is not
|
||||||
|
// necessary to set this value unless you specifically want to limit deeply
|
||||||
|
// nested data structures.
|
||||||
|
MaxDepth int
|
||||||
|
|
||||||
|
// DisableMethods specifies whether or not error and Stringer interfaces are
|
||||||
|
// invoked for types that implement them.
|
||||||
|
DisableMethods bool
|
||||||
|
|
||||||
|
// DisablePointerMethods specifies whether or not to check for and invoke
|
||||||
|
// error and Stringer interfaces on types which only accept a pointer
|
||||||
|
// receiver when the current type is not a pointer.
|
||||||
|
//
|
||||||
|
// NOTE: This might be an unsafe action since calling one of these methods
|
||||||
|
// with a pointer receiver could technically mutate the value, however,
|
||||||
|
// in practice, types which choose to satisify an error or Stringer
|
||||||
|
// interface with a pointer receiver should not be mutating their state
|
||||||
|
// inside these interface methods. As a result, this option relies on
|
||||||
|
// access to the unsafe package, so it will not have any effect when
|
||||||
|
// running in environments without access to the unsafe package such as
|
||||||
|
// Google App Engine or with the "safe" build tag specified.
|
||||||
|
DisablePointerMethods bool
|
||||||
|
|
||||||
|
// DisablePointerAddresses specifies whether to disable the printing of
|
||||||
|
// pointer addresses. This is useful when diffing data structures in tests.
|
||||||
|
DisablePointerAddresses bool
|
||||||
|
|
||||||
|
// DisableCapacities specifies whether to disable the printing of capacities
|
||||||
|
// for arrays, slices, maps and channels. This is useful when diffing
|
||||||
|
// data structures in tests.
|
||||||
|
DisableCapacities bool
|
||||||
|
|
||||||
|
// ContinueOnMethod specifies whether or not recursion should continue once
|
||||||
|
// a custom error or Stringer interface is invoked. The default, false,
|
||||||
|
// means it will print the results of invoking the custom error or Stringer
|
||||||
|
// interface and return immediately instead of continuing to recurse into
|
||||||
|
// the internals of the data type.
|
||||||
|
//
|
||||||
|
// NOTE: This flag does not have any effect if method invocation is disabled
|
||||||
|
// via the DisableMethods or DisablePointerMethods options.
|
||||||
|
ContinueOnMethod bool
|
||||||
|
|
||||||
|
// SortKeys specifies map keys should be sorted before being printed. Use
|
||||||
|
// this to have a more deterministic, diffable output. Note that only
|
||||||
|
// native types (bool, int, uint, floats, uintptr and string) and types
|
||||||
|
// that support the error or Stringer interfaces (if methods are
|
||||||
|
// enabled) are supported, with other types sorted according to the
|
||||||
|
// reflect.Value.String() output which guarantees display stability.
|
||||||
|
SortKeys bool
|
||||||
|
|
||||||
|
// SpewKeys specifies that, as a last resort attempt, map keys should
|
||||||
|
// be spewed to strings and sorted by those strings. This is only
|
||||||
|
// considered if SortKeys is true.
|
||||||
|
SpewKeys bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Config is the active configuration of the top-level functions.
|
||||||
|
// The configuration can be changed by modifying the contents of spew.Config.
|
||||||
|
var Config = ConfigState{Indent: " "}
|
||||||
|
|
||||||
|
// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the formatted string as a value that satisfies error. See NewFormatter
|
||||||
|
// for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Errorf(format, c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Errorf(format string, a ...interface{}) (err error) {
|
||||||
|
return fmt.Errorf(format, c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Fprint(w, c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Fprint(w io.Writer, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Fprint(w, c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Fprintf(w, format, c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Fprintf(w, format, c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Fprintln(w, c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Fprintln(w, c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print is a wrapper for fmt.Print that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Print(c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Print(a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Print(c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Printf is a wrapper for fmt.Printf that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Printf(format, c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Printf(format string, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Printf(format, c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Println is a wrapper for fmt.Println that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Println(c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Println(a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Println(c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the resulting string. See NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Sprint(c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Sprint(a ...interface{}) string {
|
||||||
|
return fmt.Sprint(c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were
|
||||||
|
// passed with a Formatter interface returned by c.NewFormatter. It returns
|
||||||
|
// the resulting string. See NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Sprintf(format, c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Sprintf(format string, a ...interface{}) string {
|
||||||
|
return fmt.Sprintf(format, c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it
|
||||||
|
// were passed with a Formatter interface returned by c.NewFormatter. It
|
||||||
|
// returns the resulting string. See NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Sprintln(c.NewFormatter(a), c.NewFormatter(b))
|
||||||
|
func (c *ConfigState) Sprintln(a ...interface{}) string {
|
||||||
|
return fmt.Sprintln(c.convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
NewFormatter returns a custom formatter that satisfies the fmt.Formatter
|
||||||
|
interface. As a result, it integrates cleanly with standard fmt package
|
||||||
|
printing functions. The formatter is useful for inline printing of smaller data
|
||||||
|
types similar to the standard %v format specifier.
|
||||||
|
|
||||||
|
The custom formatter only responds to the %v (most compact), %+v (adds pointer
|
||||||
|
addresses), %#v (adds types), and %#+v (adds types and pointer addresses) verb
|
||||||
|
combinations. Any other verbs such as %x and %q will be sent to the the
|
||||||
|
standard fmt package for formatting. In addition, the custom formatter ignores
|
||||||
|
the width and precision arguments (however they will still work on the format
|
||||||
|
specifiers not handled by the custom formatter).
|
||||||
|
|
||||||
|
Typically this function shouldn't be called directly. It is much easier to make
|
||||||
|
use of the custom formatter by calling one of the convenience functions such as
|
||||||
|
c.Printf, c.Println, or c.Printf.
|
||||||
|
*/
|
||||||
|
func (c *ConfigState) NewFormatter(v interface{}) fmt.Formatter {
|
||||||
|
return newFormatter(c, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fdump formats and displays the passed arguments to io.Writer w. It formats
|
||||||
|
// exactly the same as Dump.
|
||||||
|
func (c *ConfigState) Fdump(w io.Writer, a ...interface{}) {
|
||||||
|
fdump(c, w, a...)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Dump displays the passed parameters to standard out with newlines, customizable
|
||||||
|
indentation, and additional debug information such as complete types and all
|
||||||
|
pointer addresses used to indirect to the final value. It provides the
|
||||||
|
following features over the built-in printing facilities provided by the fmt
|
||||||
|
package:
|
||||||
|
|
||||||
|
* Pointers are dereferenced and followed
|
||||||
|
* Circular data structures are detected and handled properly
|
||||||
|
* Custom Stringer/error interfaces are optionally invoked, including
|
||||||
|
on unexported types
|
||||||
|
* Custom types which only implement the Stringer/error interfaces via
|
||||||
|
a pointer receiver are optionally invoked when passing non-pointer
|
||||||
|
variables
|
||||||
|
* Byte arrays and slices are dumped like the hexdump -C command which
|
||||||
|
includes offsets, byte values in hex, and ASCII output
|
||||||
|
|
||||||
|
The configuration options are controlled by modifying the public members
|
||||||
|
of c. See ConfigState for options documentation.
|
||||||
|
|
||||||
|
See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to
|
||||||
|
get the formatted result as a string.
|
||||||
|
*/
|
||||||
|
func (c *ConfigState) Dump(a ...interface{}) {
|
||||||
|
fdump(c, os.Stdout, a...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sdump returns a string with the passed arguments formatted exactly the same
|
||||||
|
// as Dump.
|
||||||
|
func (c *ConfigState) Sdump(a ...interface{}) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fdump(c, &buf, a...)
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertArgs accepts a slice of arguments and returns a slice of the same
|
||||||
|
// length with each argument converted to a spew Formatter interface using
|
||||||
|
// the ConfigState associated with s.
|
||||||
|
func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) {
|
||||||
|
formatters = make([]interface{}, len(args))
|
||||||
|
for index, arg := range args {
|
||||||
|
formatters[index] = newFormatter(c, arg)
|
||||||
|
}
|
||||||
|
return formatters
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDefaultConfig returns a ConfigState with the following default settings.
|
||||||
|
//
|
||||||
|
// Indent: " "
|
||||||
|
// MaxDepth: 0
|
||||||
|
// DisableMethods: false
|
||||||
|
// DisablePointerMethods: false
|
||||||
|
// ContinueOnMethod: false
|
||||||
|
// SortKeys: false
|
||||||
|
func NewDefaultConfig() *ConfigState {
|
||||||
|
return &ConfigState{Indent: " "}
|
||||||
|
}
|
||||||
211
vendor/github.com/davecgh/go-spew/spew/doc.go
generated
vendored
Normal file
211
vendor/github.com/davecgh/go-spew/spew/doc.go
generated
vendored
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013-2016 Dave Collins <dave@davec.name>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package spew implements a deep pretty printer for Go data structures to aid in
|
||||||
|
debugging.
|
||||||
|
|
||||||
|
A quick overview of the additional features spew provides over the built-in
|
||||||
|
printing facilities for Go data types are as follows:
|
||||||
|
|
||||||
|
* Pointers are dereferenced and followed
|
||||||
|
* Circular data structures are detected and handled properly
|
||||||
|
* Custom Stringer/error interfaces are optionally invoked, including
|
||||||
|
on unexported types
|
||||||
|
* Custom types which only implement the Stringer/error interfaces via
|
||||||
|
a pointer receiver are optionally invoked when passing non-pointer
|
||||||
|
variables
|
||||||
|
* Byte arrays and slices are dumped like the hexdump -C command which
|
||||||
|
includes offsets, byte values in hex, and ASCII output (only when using
|
||||||
|
Dump style)
|
||||||
|
|
||||||
|
There are two different approaches spew allows for dumping Go data structures:
|
||||||
|
|
||||||
|
* Dump style which prints with newlines, customizable indentation,
|
||||||
|
and additional debug information such as types and all pointer addresses
|
||||||
|
used to indirect to the final value
|
||||||
|
* A custom Formatter interface that integrates cleanly with the standard fmt
|
||||||
|
package and replaces %v, %+v, %#v, and %#+v to provide inline printing
|
||||||
|
similar to the default %v while providing the additional functionality
|
||||||
|
outlined above and passing unsupported format verbs such as %x and %q
|
||||||
|
along to fmt
|
||||||
|
|
||||||
|
Quick Start
|
||||||
|
|
||||||
|
This section demonstrates how to quickly get started with spew. See the
|
||||||
|
sections below for further details on formatting and configuration options.
|
||||||
|
|
||||||
|
To dump a variable with full newlines, indentation, type, and pointer
|
||||||
|
information use Dump, Fdump, or Sdump:
|
||||||
|
spew.Dump(myVar1, myVar2, ...)
|
||||||
|
spew.Fdump(someWriter, myVar1, myVar2, ...)
|
||||||
|
str := spew.Sdump(myVar1, myVar2, ...)
|
||||||
|
|
||||||
|
Alternatively, if you would prefer to use format strings with a compacted inline
|
||||||
|
printing style, use the convenience wrappers Printf, Fprintf, etc with
|
||||||
|
%v (most compact), %+v (adds pointer addresses), %#v (adds types), or
|
||||||
|
%#+v (adds types and pointer addresses):
|
||||||
|
spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2)
|
||||||
|
spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
|
||||||
|
spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2)
|
||||||
|
spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
|
||||||
|
|
||||||
|
Configuration Options
|
||||||
|
|
||||||
|
Configuration of spew is handled by fields in the ConfigState type. For
|
||||||
|
convenience, all of the top-level functions use a global state available
|
||||||
|
via the spew.Config global.
|
||||||
|
|
||||||
|
It is also possible to create a ConfigState instance that provides methods
|
||||||
|
equivalent to the top-level functions. This allows concurrent configuration
|
||||||
|
options. See the ConfigState documentation for more details.
|
||||||
|
|
||||||
|
The following configuration options are available:
|
||||||
|
* Indent
|
||||||
|
String to use for each indentation level for Dump functions.
|
||||||
|
It is a single space by default. A popular alternative is "\t".
|
||||||
|
|
||||||
|
* MaxDepth
|
||||||
|
Maximum number of levels to descend into nested data structures.
|
||||||
|
There is no limit by default.
|
||||||
|
|
||||||
|
* DisableMethods
|
||||||
|
Disables invocation of error and Stringer interface methods.
|
||||||
|
Method invocation is enabled by default.
|
||||||
|
|
||||||
|
* DisablePointerMethods
|
||||||
|
Disables invocation of error and Stringer interface methods on types
|
||||||
|
which only accept pointer receivers from non-pointer variables.
|
||||||
|
Pointer method invocation is enabled by default.
|
||||||
|
|
||||||
|
* DisablePointerAddresses
|
||||||
|
DisablePointerAddresses specifies whether to disable the printing of
|
||||||
|
pointer addresses. This is useful when diffing data structures in tests.
|
||||||
|
|
||||||
|
* DisableCapacities
|
||||||
|
DisableCapacities specifies whether to disable the printing of
|
||||||
|
capacities for arrays, slices, maps and channels. This is useful when
|
||||||
|
diffing data structures in tests.
|
||||||
|
|
||||||
|
* ContinueOnMethod
|
||||||
|
Enables recursion into types after invoking error and Stringer interface
|
||||||
|
methods. Recursion after method invocation is disabled by default.
|
||||||
|
|
||||||
|
* SortKeys
|
||||||
|
Specifies map keys should be sorted before being printed. Use
|
||||||
|
this to have a more deterministic, diffable output. Note that
|
||||||
|
only native types (bool, int, uint, floats, uintptr and string)
|
||||||
|
and types which implement error or Stringer interfaces are
|
||||||
|
supported with other types sorted according to the
|
||||||
|
reflect.Value.String() output which guarantees display
|
||||||
|
stability. Natural map order is used by default.
|
||||||
|
|
||||||
|
* SpewKeys
|
||||||
|
Specifies that, as a last resort attempt, map keys should be
|
||||||
|
spewed to strings and sorted by those strings. This is only
|
||||||
|
considered if SortKeys is true.
|
||||||
|
|
||||||
|
Dump Usage
|
||||||
|
|
||||||
|
Simply call spew.Dump with a list of variables you want to dump:
|
||||||
|
|
||||||
|
spew.Dump(myVar1, myVar2, ...)
|
||||||
|
|
||||||
|
You may also call spew.Fdump if you would prefer to output to an arbitrary
|
||||||
|
io.Writer. For example, to dump to standard error:
|
||||||
|
|
||||||
|
spew.Fdump(os.Stderr, myVar1, myVar2, ...)
|
||||||
|
|
||||||
|
A third option is to call spew.Sdump to get the formatted output as a string:
|
||||||
|
|
||||||
|
str := spew.Sdump(myVar1, myVar2, ...)
|
||||||
|
|
||||||
|
Sample Dump Output
|
||||||
|
|
||||||
|
See the Dump example for details on the setup of the types and variables being
|
||||||
|
shown here.
|
||||||
|
|
||||||
|
(main.Foo) {
|
||||||
|
unexportedField: (*main.Bar)(0xf84002e210)({
|
||||||
|
flag: (main.Flag) flagTwo,
|
||||||
|
data: (uintptr) <nil>
|
||||||
|
}),
|
||||||
|
ExportedField: (map[interface {}]interface {}) (len=1) {
|
||||||
|
(string) (len=3) "one": (bool) true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Byte (and uint8) arrays and slices are displayed uniquely like the hexdump -C
|
||||||
|
command as shown.
|
||||||
|
([]uint8) (len=32 cap=32) {
|
||||||
|
00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... |
|
||||||
|
00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0|
|
||||||
|
00000020 31 32 |12|
|
||||||
|
}
|
||||||
|
|
||||||
|
Custom Formatter
|
||||||
|
|
||||||
|
Spew provides a custom formatter that implements the fmt.Formatter interface
|
||||||
|
so that it integrates cleanly with standard fmt package printing functions. The
|
||||||
|
formatter is useful for inline printing of smaller data types similar to the
|
||||||
|
standard %v format specifier.
|
||||||
|
|
||||||
|
The custom formatter only responds to the %v (most compact), %+v (adds pointer
|
||||||
|
addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb
|
||||||
|
combinations. Any other verbs such as %x and %q will be sent to the the
|
||||||
|
standard fmt package for formatting. In addition, the custom formatter ignores
|
||||||
|
the width and precision arguments (however they will still work on the format
|
||||||
|
specifiers not handled by the custom formatter).
|
||||||
|
|
||||||
|
Custom Formatter Usage
|
||||||
|
|
||||||
|
The simplest way to make use of the spew custom formatter is to call one of the
|
||||||
|
convenience functions such as spew.Printf, spew.Println, or spew.Printf. The
|
||||||
|
functions have syntax you are most likely already familiar with:
|
||||||
|
|
||||||
|
spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2)
|
||||||
|
spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
|
||||||
|
spew.Println(myVar, myVar2)
|
||||||
|
spew.Fprintf(os.Stderr, "myVar1: %v -- myVar2: %+v", myVar1, myVar2)
|
||||||
|
spew.Fprintf(os.Stderr, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
|
||||||
|
|
||||||
|
See the Index for the full list convenience functions.
|
||||||
|
|
||||||
|
Sample Formatter Output
|
||||||
|
|
||||||
|
Double pointer to a uint8:
|
||||||
|
%v: <**>5
|
||||||
|
%+v: <**>(0xf8400420d0->0xf8400420c8)5
|
||||||
|
%#v: (**uint8)5
|
||||||
|
%#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5
|
||||||
|
|
||||||
|
Pointer to circular struct with a uint8 field and a pointer to itself:
|
||||||
|
%v: <*>{1 <*><shown>}
|
||||||
|
%+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)<shown>}
|
||||||
|
%#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)<shown>}
|
||||||
|
%#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)<shown>}
|
||||||
|
|
||||||
|
See the Printf example for details on the setup of variables being shown
|
||||||
|
here.
|
||||||
|
|
||||||
|
Errors
|
||||||
|
|
||||||
|
Since it is possible for custom Stringer/error interfaces to panic, spew
|
||||||
|
detects them and handles them internally by printing the panic information
|
||||||
|
inline with the output. Since spew is intended to provide deep pretty printing
|
||||||
|
capabilities on structures, it intentionally does not return any errors.
|
||||||
|
*/
|
||||||
|
package spew
|
||||||
509
vendor/github.com/davecgh/go-spew/spew/dump.go
generated
vendored
Normal file
509
vendor/github.com/davecgh/go-spew/spew/dump.go
generated
vendored
Normal file
@@ -0,0 +1,509 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013-2016 Dave Collins <dave@davec.name>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package spew
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// uint8Type is a reflect.Type representing a uint8. It is used to
|
||||||
|
// convert cgo types to uint8 slices for hexdumping.
|
||||||
|
uint8Type = reflect.TypeOf(uint8(0))
|
||||||
|
|
||||||
|
// cCharRE is a regular expression that matches a cgo char.
|
||||||
|
// It is used to detect character arrays to hexdump them.
|
||||||
|
cCharRE = regexp.MustCompile(`^.*\._Ctype_char$`)
|
||||||
|
|
||||||
|
// cUnsignedCharRE is a regular expression that matches a cgo unsigned
|
||||||
|
// char. It is used to detect unsigned character arrays to hexdump
|
||||||
|
// them.
|
||||||
|
cUnsignedCharRE = regexp.MustCompile(`^.*\._Ctype_unsignedchar$`)
|
||||||
|
|
||||||
|
// cUint8tCharRE is a regular expression that matches a cgo uint8_t.
|
||||||
|
// It is used to detect uint8_t arrays to hexdump them.
|
||||||
|
cUint8tCharRE = regexp.MustCompile(`^.*\._Ctype_uint8_t$`)
|
||||||
|
)
|
||||||
|
|
||||||
|
// dumpState contains information about the state of a dump operation.
|
||||||
|
type dumpState struct {
|
||||||
|
w io.Writer
|
||||||
|
depth int
|
||||||
|
pointers map[uintptr]int
|
||||||
|
ignoreNextType bool
|
||||||
|
ignoreNextIndent bool
|
||||||
|
cs *ConfigState
|
||||||
|
}
|
||||||
|
|
||||||
|
// indent performs indentation according to the depth level and cs.Indent
|
||||||
|
// option.
|
||||||
|
func (d *dumpState) indent() {
|
||||||
|
if d.ignoreNextIndent {
|
||||||
|
d.ignoreNextIndent = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
d.w.Write(bytes.Repeat([]byte(d.cs.Indent), d.depth))
|
||||||
|
}
|
||||||
|
|
||||||
|
// unpackValue returns values inside of non-nil interfaces when possible.
|
||||||
|
// This is useful for data types like structs, arrays, slices, and maps which
|
||||||
|
// can contain varying types packed inside an interface.
|
||||||
|
func (d *dumpState) unpackValue(v reflect.Value) reflect.Value {
|
||||||
|
if v.Kind() == reflect.Interface && !v.IsNil() {
|
||||||
|
v = v.Elem()
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// dumpPtr handles formatting of pointers by indirecting them as necessary.
|
||||||
|
func (d *dumpState) dumpPtr(v reflect.Value) {
|
||||||
|
// Remove pointers at or below the current depth from map used to detect
|
||||||
|
// circular refs.
|
||||||
|
for k, depth := range d.pointers {
|
||||||
|
if depth >= d.depth {
|
||||||
|
delete(d.pointers, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep list of all dereferenced pointers to show later.
|
||||||
|
pointerChain := make([]uintptr, 0)
|
||||||
|
|
||||||
|
// Figure out how many levels of indirection there are by dereferencing
|
||||||
|
// pointers and unpacking interfaces down the chain while detecting circular
|
||||||
|
// references.
|
||||||
|
nilFound := false
|
||||||
|
cycleFound := false
|
||||||
|
indirects := 0
|
||||||
|
ve := v
|
||||||
|
for ve.Kind() == reflect.Ptr {
|
||||||
|
if ve.IsNil() {
|
||||||
|
nilFound = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
indirects++
|
||||||
|
addr := ve.Pointer()
|
||||||
|
pointerChain = append(pointerChain, addr)
|
||||||
|
if pd, ok := d.pointers[addr]; ok && pd < d.depth {
|
||||||
|
cycleFound = true
|
||||||
|
indirects--
|
||||||
|
break
|
||||||
|
}
|
||||||
|
d.pointers[addr] = d.depth
|
||||||
|
|
||||||
|
ve = ve.Elem()
|
||||||
|
if ve.Kind() == reflect.Interface {
|
||||||
|
if ve.IsNil() {
|
||||||
|
nilFound = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
ve = ve.Elem()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display type information.
|
||||||
|
d.w.Write(openParenBytes)
|
||||||
|
d.w.Write(bytes.Repeat(asteriskBytes, indirects))
|
||||||
|
d.w.Write([]byte(ve.Type().String()))
|
||||||
|
d.w.Write(closeParenBytes)
|
||||||
|
|
||||||
|
// Display pointer information.
|
||||||
|
if !d.cs.DisablePointerAddresses && len(pointerChain) > 0 {
|
||||||
|
d.w.Write(openParenBytes)
|
||||||
|
for i, addr := range pointerChain {
|
||||||
|
if i > 0 {
|
||||||
|
d.w.Write(pointerChainBytes)
|
||||||
|
}
|
||||||
|
printHexPtr(d.w, addr)
|
||||||
|
}
|
||||||
|
d.w.Write(closeParenBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display dereferenced value.
|
||||||
|
d.w.Write(openParenBytes)
|
||||||
|
switch {
|
||||||
|
case nilFound:
|
||||||
|
d.w.Write(nilAngleBytes)
|
||||||
|
|
||||||
|
case cycleFound:
|
||||||
|
d.w.Write(circularBytes)
|
||||||
|
|
||||||
|
default:
|
||||||
|
d.ignoreNextType = true
|
||||||
|
d.dump(ve)
|
||||||
|
}
|
||||||
|
d.w.Write(closeParenBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// dumpSlice handles formatting of arrays and slices. Byte (uint8 under
|
||||||
|
// reflection) arrays and slices are dumped in hexdump -C fashion.
|
||||||
|
func (d *dumpState) dumpSlice(v reflect.Value) {
|
||||||
|
// Determine whether this type should be hex dumped or not. Also,
|
||||||
|
// for types which should be hexdumped, try to use the underlying data
|
||||||
|
// first, then fall back to trying to convert them to a uint8 slice.
|
||||||
|
var buf []uint8
|
||||||
|
doConvert := false
|
||||||
|
doHexDump := false
|
||||||
|
numEntries := v.Len()
|
||||||
|
if numEntries > 0 {
|
||||||
|
vt := v.Index(0).Type()
|
||||||
|
vts := vt.String()
|
||||||
|
switch {
|
||||||
|
// C types that need to be converted.
|
||||||
|
case cCharRE.MatchString(vts):
|
||||||
|
fallthrough
|
||||||
|
case cUnsignedCharRE.MatchString(vts):
|
||||||
|
fallthrough
|
||||||
|
case cUint8tCharRE.MatchString(vts):
|
||||||
|
doConvert = true
|
||||||
|
|
||||||
|
// Try to use existing uint8 slices and fall back to converting
|
||||||
|
// and copying if that fails.
|
||||||
|
case vt.Kind() == reflect.Uint8:
|
||||||
|
// We need an addressable interface to convert the type
|
||||||
|
// to a byte slice. However, the reflect package won't
|
||||||
|
// give us an interface on certain things like
|
||||||
|
// unexported struct fields in order to enforce
|
||||||
|
// visibility rules. We use unsafe, when available, to
|
||||||
|
// bypass these restrictions since this package does not
|
||||||
|
// mutate the values.
|
||||||
|
vs := v
|
||||||
|
if !vs.CanInterface() || !vs.CanAddr() {
|
||||||
|
vs = unsafeReflectValue(vs)
|
||||||
|
}
|
||||||
|
if !UnsafeDisabled {
|
||||||
|
vs = vs.Slice(0, numEntries)
|
||||||
|
|
||||||
|
// Use the existing uint8 slice if it can be
|
||||||
|
// type asserted.
|
||||||
|
iface := vs.Interface()
|
||||||
|
if slice, ok := iface.([]uint8); ok {
|
||||||
|
buf = slice
|
||||||
|
doHexDump = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The underlying data needs to be converted if it can't
|
||||||
|
// be type asserted to a uint8 slice.
|
||||||
|
doConvert = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy and convert the underlying type if needed.
|
||||||
|
if doConvert && vt.ConvertibleTo(uint8Type) {
|
||||||
|
// Convert and copy each element into a uint8 byte
|
||||||
|
// slice.
|
||||||
|
buf = make([]uint8, numEntries)
|
||||||
|
for i := 0; i < numEntries; i++ {
|
||||||
|
vv := v.Index(i)
|
||||||
|
buf[i] = uint8(vv.Convert(uint8Type).Uint())
|
||||||
|
}
|
||||||
|
doHexDump = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hexdump the entire slice as needed.
|
||||||
|
if doHexDump {
|
||||||
|
indent := strings.Repeat(d.cs.Indent, d.depth)
|
||||||
|
str := indent + hex.Dump(buf)
|
||||||
|
str = strings.Replace(str, "\n", "\n"+indent, -1)
|
||||||
|
str = strings.TrimRight(str, d.cs.Indent)
|
||||||
|
d.w.Write([]byte(str))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively call dump for each item.
|
||||||
|
for i := 0; i < numEntries; i++ {
|
||||||
|
d.dump(d.unpackValue(v.Index(i)))
|
||||||
|
if i < (numEntries - 1) {
|
||||||
|
d.w.Write(commaNewlineBytes)
|
||||||
|
} else {
|
||||||
|
d.w.Write(newlineBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// dump is the main workhorse for dumping a value. It uses the passed reflect
|
||||||
|
// value to figure out what kind of object we are dealing with and formats it
|
||||||
|
// appropriately. It is a recursive function, however circular data structures
|
||||||
|
// are detected and handled properly.
|
||||||
|
func (d *dumpState) dump(v reflect.Value) {
|
||||||
|
// Handle invalid reflect values immediately.
|
||||||
|
kind := v.Kind()
|
||||||
|
if kind == reflect.Invalid {
|
||||||
|
d.w.Write(invalidAngleBytes)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle pointers specially.
|
||||||
|
if kind == reflect.Ptr {
|
||||||
|
d.indent()
|
||||||
|
d.dumpPtr(v)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print type information unless already handled elsewhere.
|
||||||
|
if !d.ignoreNextType {
|
||||||
|
d.indent()
|
||||||
|
d.w.Write(openParenBytes)
|
||||||
|
d.w.Write([]byte(v.Type().String()))
|
||||||
|
d.w.Write(closeParenBytes)
|
||||||
|
d.w.Write(spaceBytes)
|
||||||
|
}
|
||||||
|
d.ignoreNextType = false
|
||||||
|
|
||||||
|
// Display length and capacity if the built-in len and cap functions
|
||||||
|
// work with the value's kind and the len/cap itself is non-zero.
|
||||||
|
valueLen, valueCap := 0, 0
|
||||||
|
switch v.Kind() {
|
||||||
|
case reflect.Array, reflect.Slice, reflect.Chan:
|
||||||
|
valueLen, valueCap = v.Len(), v.Cap()
|
||||||
|
case reflect.Map, reflect.String:
|
||||||
|
valueLen = v.Len()
|
||||||
|
}
|
||||||
|
if valueLen != 0 || !d.cs.DisableCapacities && valueCap != 0 {
|
||||||
|
d.w.Write(openParenBytes)
|
||||||
|
if valueLen != 0 {
|
||||||
|
d.w.Write(lenEqualsBytes)
|
||||||
|
printInt(d.w, int64(valueLen), 10)
|
||||||
|
}
|
||||||
|
if !d.cs.DisableCapacities && valueCap != 0 {
|
||||||
|
if valueLen != 0 {
|
||||||
|
d.w.Write(spaceBytes)
|
||||||
|
}
|
||||||
|
d.w.Write(capEqualsBytes)
|
||||||
|
printInt(d.w, int64(valueCap), 10)
|
||||||
|
}
|
||||||
|
d.w.Write(closeParenBytes)
|
||||||
|
d.w.Write(spaceBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call Stringer/error interfaces if they exist and the handle methods flag
|
||||||
|
// is enabled
|
||||||
|
if !d.cs.DisableMethods {
|
||||||
|
if (kind != reflect.Invalid) && (kind != reflect.Interface) {
|
||||||
|
if handled := handleMethods(d.cs, d.w, v); handled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch kind {
|
||||||
|
case reflect.Invalid:
|
||||||
|
// Do nothing. We should never get here since invalid has already
|
||||||
|
// been handled above.
|
||||||
|
|
||||||
|
case reflect.Bool:
|
||||||
|
printBool(d.w, v.Bool())
|
||||||
|
|
||||||
|
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
|
||||||
|
printInt(d.w, v.Int(), 10)
|
||||||
|
|
||||||
|
case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
|
||||||
|
printUint(d.w, v.Uint(), 10)
|
||||||
|
|
||||||
|
case reflect.Float32:
|
||||||
|
printFloat(d.w, v.Float(), 32)
|
||||||
|
|
||||||
|
case reflect.Float64:
|
||||||
|
printFloat(d.w, v.Float(), 64)
|
||||||
|
|
||||||
|
case reflect.Complex64:
|
||||||
|
printComplex(d.w, v.Complex(), 32)
|
||||||
|
|
||||||
|
case reflect.Complex128:
|
||||||
|
printComplex(d.w, v.Complex(), 64)
|
||||||
|
|
||||||
|
case reflect.Slice:
|
||||||
|
if v.IsNil() {
|
||||||
|
d.w.Write(nilAngleBytes)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
|
||||||
|
case reflect.Array:
|
||||||
|
d.w.Write(openBraceNewlineBytes)
|
||||||
|
d.depth++
|
||||||
|
if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
|
||||||
|
d.indent()
|
||||||
|
d.w.Write(maxNewlineBytes)
|
||||||
|
} else {
|
||||||
|
d.dumpSlice(v)
|
||||||
|
}
|
||||||
|
d.depth--
|
||||||
|
d.indent()
|
||||||
|
d.w.Write(closeBraceBytes)
|
||||||
|
|
||||||
|
case reflect.String:
|
||||||
|
d.w.Write([]byte(strconv.Quote(v.String())))
|
||||||
|
|
||||||
|
case reflect.Interface:
|
||||||
|
// The only time we should get here is for nil interfaces due to
|
||||||
|
// unpackValue calls.
|
||||||
|
if v.IsNil() {
|
||||||
|
d.w.Write(nilAngleBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
case reflect.Ptr:
|
||||||
|
// Do nothing. We should never get here since pointers have already
|
||||||
|
// been handled above.
|
||||||
|
|
||||||
|
case reflect.Map:
|
||||||
|
// nil maps should be indicated as different than empty maps
|
||||||
|
if v.IsNil() {
|
||||||
|
d.w.Write(nilAngleBytes)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
d.w.Write(openBraceNewlineBytes)
|
||||||
|
d.depth++
|
||||||
|
if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
|
||||||
|
d.indent()
|
||||||
|
d.w.Write(maxNewlineBytes)
|
||||||
|
} else {
|
||||||
|
numEntries := v.Len()
|
||||||
|
keys := v.MapKeys()
|
||||||
|
if d.cs.SortKeys {
|
||||||
|
sortValues(keys, d.cs)
|
||||||
|
}
|
||||||
|
for i, key := range keys {
|
||||||
|
d.dump(d.unpackValue(key))
|
||||||
|
d.w.Write(colonSpaceBytes)
|
||||||
|
d.ignoreNextIndent = true
|
||||||
|
d.dump(d.unpackValue(v.MapIndex(key)))
|
||||||
|
if i < (numEntries - 1) {
|
||||||
|
d.w.Write(commaNewlineBytes)
|
||||||
|
} else {
|
||||||
|
d.w.Write(newlineBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.depth--
|
||||||
|
d.indent()
|
||||||
|
d.w.Write(closeBraceBytes)
|
||||||
|
|
||||||
|
case reflect.Struct:
|
||||||
|
d.w.Write(openBraceNewlineBytes)
|
||||||
|
d.depth++
|
||||||
|
if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
|
||||||
|
d.indent()
|
||||||
|
d.w.Write(maxNewlineBytes)
|
||||||
|
} else {
|
||||||
|
vt := v.Type()
|
||||||
|
numFields := v.NumField()
|
||||||
|
for i := 0; i < numFields; i++ {
|
||||||
|
d.indent()
|
||||||
|
vtf := vt.Field(i)
|
||||||
|
d.w.Write([]byte(vtf.Name))
|
||||||
|
d.w.Write(colonSpaceBytes)
|
||||||
|
d.ignoreNextIndent = true
|
||||||
|
d.dump(d.unpackValue(v.Field(i)))
|
||||||
|
if i < (numFields - 1) {
|
||||||
|
d.w.Write(commaNewlineBytes)
|
||||||
|
} else {
|
||||||
|
d.w.Write(newlineBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.depth--
|
||||||
|
d.indent()
|
||||||
|
d.w.Write(closeBraceBytes)
|
||||||
|
|
||||||
|
case reflect.Uintptr:
|
||||||
|
printHexPtr(d.w, uintptr(v.Uint()))
|
||||||
|
|
||||||
|
case reflect.UnsafePointer, reflect.Chan, reflect.Func:
|
||||||
|
printHexPtr(d.w, v.Pointer())
|
||||||
|
|
||||||
|
// There were not any other types at the time this code was written, but
|
||||||
|
// fall back to letting the default fmt package handle it in case any new
|
||||||
|
// types are added.
|
||||||
|
default:
|
||||||
|
if v.CanInterface() {
|
||||||
|
fmt.Fprintf(d.w, "%v", v.Interface())
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(d.w, "%v", v.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fdump is a helper function to consolidate the logic from the various public
|
||||||
|
// methods which take varying writers and config states.
|
||||||
|
func fdump(cs *ConfigState, w io.Writer, a ...interface{}) {
|
||||||
|
for _, arg := range a {
|
||||||
|
if arg == nil {
|
||||||
|
w.Write(interfaceBytes)
|
||||||
|
w.Write(spaceBytes)
|
||||||
|
w.Write(nilAngleBytes)
|
||||||
|
w.Write(newlineBytes)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
d := dumpState{w: w, cs: cs}
|
||||||
|
d.pointers = make(map[uintptr]int)
|
||||||
|
d.dump(reflect.ValueOf(arg))
|
||||||
|
d.w.Write(newlineBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fdump formats and displays the passed arguments to io.Writer w. It formats
|
||||||
|
// exactly the same as Dump.
|
||||||
|
func Fdump(w io.Writer, a ...interface{}) {
|
||||||
|
fdump(&Config, w, a...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sdump returns a string with the passed arguments formatted exactly the same
|
||||||
|
// as Dump.
|
||||||
|
func Sdump(a ...interface{}) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fdump(&Config, &buf, a...)
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Dump displays the passed parameters to standard out with newlines, customizable
|
||||||
|
indentation, and additional debug information such as complete types and all
|
||||||
|
pointer addresses used to indirect to the final value. It provides the
|
||||||
|
following features over the built-in printing facilities provided by the fmt
|
||||||
|
package:
|
||||||
|
|
||||||
|
* Pointers are dereferenced and followed
|
||||||
|
* Circular data structures are detected and handled properly
|
||||||
|
* Custom Stringer/error interfaces are optionally invoked, including
|
||||||
|
on unexported types
|
||||||
|
* Custom types which only implement the Stringer/error interfaces via
|
||||||
|
a pointer receiver are optionally invoked when passing non-pointer
|
||||||
|
variables
|
||||||
|
* Byte arrays and slices are dumped like the hexdump -C command which
|
||||||
|
includes offsets, byte values in hex, and ASCII output
|
||||||
|
|
||||||
|
The configuration options are controlled by an exported package global,
|
||||||
|
spew.Config. See ConfigState for options documentation.
|
||||||
|
|
||||||
|
See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to
|
||||||
|
get the formatted result as a string.
|
||||||
|
*/
|
||||||
|
func Dump(a ...interface{}) {
|
||||||
|
fdump(&Config, os.Stdout, a...)
|
||||||
|
}
|
||||||
419
vendor/github.com/davecgh/go-spew/spew/format.go
generated
vendored
Normal file
419
vendor/github.com/davecgh/go-spew/spew/format.go
generated
vendored
Normal file
@@ -0,0 +1,419 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013-2016 Dave Collins <dave@davec.name>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package spew
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// supportedFlags is a list of all the character flags supported by fmt package.
|
||||||
|
const supportedFlags = "0-+# "
|
||||||
|
|
||||||
|
// formatState implements the fmt.Formatter interface and contains information
|
||||||
|
// about the state of a formatting operation. The NewFormatter function can
|
||||||
|
// be used to get a new Formatter which can be used directly as arguments
|
||||||
|
// in standard fmt package printing calls.
|
||||||
|
type formatState struct {
|
||||||
|
value interface{}
|
||||||
|
fs fmt.State
|
||||||
|
depth int
|
||||||
|
pointers map[uintptr]int
|
||||||
|
ignoreNextType bool
|
||||||
|
cs *ConfigState
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildDefaultFormat recreates the original format string without precision
|
||||||
|
// and width information to pass in to fmt.Sprintf in the case of an
|
||||||
|
// unrecognized type. Unless new types are added to the language, this
|
||||||
|
// function won't ever be called.
|
||||||
|
func (f *formatState) buildDefaultFormat() (format string) {
|
||||||
|
buf := bytes.NewBuffer(percentBytes)
|
||||||
|
|
||||||
|
for _, flag := range supportedFlags {
|
||||||
|
if f.fs.Flag(int(flag)) {
|
||||||
|
buf.WriteRune(flag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteRune('v')
|
||||||
|
|
||||||
|
format = buf.String()
|
||||||
|
return format
|
||||||
|
}
|
||||||
|
|
||||||
|
// constructOrigFormat recreates the original format string including precision
|
||||||
|
// and width information to pass along to the standard fmt package. This allows
|
||||||
|
// automatic deferral of all format strings this package doesn't support.
|
||||||
|
func (f *formatState) constructOrigFormat(verb rune) (format string) {
|
||||||
|
buf := bytes.NewBuffer(percentBytes)
|
||||||
|
|
||||||
|
for _, flag := range supportedFlags {
|
||||||
|
if f.fs.Flag(int(flag)) {
|
||||||
|
buf.WriteRune(flag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if width, ok := f.fs.Width(); ok {
|
||||||
|
buf.WriteString(strconv.Itoa(width))
|
||||||
|
}
|
||||||
|
|
||||||
|
if precision, ok := f.fs.Precision(); ok {
|
||||||
|
buf.Write(precisionBytes)
|
||||||
|
buf.WriteString(strconv.Itoa(precision))
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteRune(verb)
|
||||||
|
|
||||||
|
format = buf.String()
|
||||||
|
return format
|
||||||
|
}
|
||||||
|
|
||||||
|
// unpackValue returns values inside of non-nil interfaces when possible and
|
||||||
|
// ensures that types for values which have been unpacked from an interface
|
||||||
|
// are displayed when the show types flag is also set.
|
||||||
|
// This is useful for data types like structs, arrays, slices, and maps which
|
||||||
|
// can contain varying types packed inside an interface.
|
||||||
|
func (f *formatState) unpackValue(v reflect.Value) reflect.Value {
|
||||||
|
if v.Kind() == reflect.Interface {
|
||||||
|
f.ignoreNextType = false
|
||||||
|
if !v.IsNil() {
|
||||||
|
v = v.Elem()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatPtr handles formatting of pointers by indirecting them as necessary.
|
||||||
|
func (f *formatState) formatPtr(v reflect.Value) {
|
||||||
|
// Display nil if top level pointer is nil.
|
||||||
|
showTypes := f.fs.Flag('#')
|
||||||
|
if v.IsNil() && (!showTypes || f.ignoreNextType) {
|
||||||
|
f.fs.Write(nilAngleBytes)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove pointers at or below the current depth from map used to detect
|
||||||
|
// circular refs.
|
||||||
|
for k, depth := range f.pointers {
|
||||||
|
if depth >= f.depth {
|
||||||
|
delete(f.pointers, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep list of all dereferenced pointers to possibly show later.
|
||||||
|
pointerChain := make([]uintptr, 0)
|
||||||
|
|
||||||
|
// Figure out how many levels of indirection there are by derferencing
|
||||||
|
// pointers and unpacking interfaces down the chain while detecting circular
|
||||||
|
// references.
|
||||||
|
nilFound := false
|
||||||
|
cycleFound := false
|
||||||
|
indirects := 0
|
||||||
|
ve := v
|
||||||
|
for ve.Kind() == reflect.Ptr {
|
||||||
|
if ve.IsNil() {
|
||||||
|
nilFound = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
indirects++
|
||||||
|
addr := ve.Pointer()
|
||||||
|
pointerChain = append(pointerChain, addr)
|
||||||
|
if pd, ok := f.pointers[addr]; ok && pd < f.depth {
|
||||||
|
cycleFound = true
|
||||||
|
indirects--
|
||||||
|
break
|
||||||
|
}
|
||||||
|
f.pointers[addr] = f.depth
|
||||||
|
|
||||||
|
ve = ve.Elem()
|
||||||
|
if ve.Kind() == reflect.Interface {
|
||||||
|
if ve.IsNil() {
|
||||||
|
nilFound = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
ve = ve.Elem()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display type or indirection level depending on flags.
|
||||||
|
if showTypes && !f.ignoreNextType {
|
||||||
|
f.fs.Write(openParenBytes)
|
||||||
|
f.fs.Write(bytes.Repeat(asteriskBytes, indirects))
|
||||||
|
f.fs.Write([]byte(ve.Type().String()))
|
||||||
|
f.fs.Write(closeParenBytes)
|
||||||
|
} else {
|
||||||
|
if nilFound || cycleFound {
|
||||||
|
indirects += strings.Count(ve.Type().String(), "*")
|
||||||
|
}
|
||||||
|
f.fs.Write(openAngleBytes)
|
||||||
|
f.fs.Write([]byte(strings.Repeat("*", indirects)))
|
||||||
|
f.fs.Write(closeAngleBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display pointer information depending on flags.
|
||||||
|
if f.fs.Flag('+') && (len(pointerChain) > 0) {
|
||||||
|
f.fs.Write(openParenBytes)
|
||||||
|
for i, addr := range pointerChain {
|
||||||
|
if i > 0 {
|
||||||
|
f.fs.Write(pointerChainBytes)
|
||||||
|
}
|
||||||
|
printHexPtr(f.fs, addr)
|
||||||
|
}
|
||||||
|
f.fs.Write(closeParenBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display dereferenced value.
|
||||||
|
switch {
|
||||||
|
case nilFound:
|
||||||
|
f.fs.Write(nilAngleBytes)
|
||||||
|
|
||||||
|
case cycleFound:
|
||||||
|
f.fs.Write(circularShortBytes)
|
||||||
|
|
||||||
|
default:
|
||||||
|
f.ignoreNextType = true
|
||||||
|
f.format(ve)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// format is the main workhorse for providing the Formatter interface. It
|
||||||
|
// uses the passed reflect value to figure out what kind of object we are
|
||||||
|
// dealing with and formats it appropriately. It is a recursive function,
|
||||||
|
// however circular data structures are detected and handled properly.
|
||||||
|
func (f *formatState) format(v reflect.Value) {
|
||||||
|
// Handle invalid reflect values immediately.
|
||||||
|
kind := v.Kind()
|
||||||
|
if kind == reflect.Invalid {
|
||||||
|
f.fs.Write(invalidAngleBytes)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle pointers specially.
|
||||||
|
if kind == reflect.Ptr {
|
||||||
|
f.formatPtr(v)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print type information unless already handled elsewhere.
|
||||||
|
if !f.ignoreNextType && f.fs.Flag('#') {
|
||||||
|
f.fs.Write(openParenBytes)
|
||||||
|
f.fs.Write([]byte(v.Type().String()))
|
||||||
|
f.fs.Write(closeParenBytes)
|
||||||
|
}
|
||||||
|
f.ignoreNextType = false
|
||||||
|
|
||||||
|
// Call Stringer/error interfaces if they exist and the handle methods
|
||||||
|
// flag is enabled.
|
||||||
|
if !f.cs.DisableMethods {
|
||||||
|
if (kind != reflect.Invalid) && (kind != reflect.Interface) {
|
||||||
|
if handled := handleMethods(f.cs, f.fs, v); handled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch kind {
|
||||||
|
case reflect.Invalid:
|
||||||
|
// Do nothing. We should never get here since invalid has already
|
||||||
|
// been handled above.
|
||||||
|
|
||||||
|
case reflect.Bool:
|
||||||
|
printBool(f.fs, v.Bool())
|
||||||
|
|
||||||
|
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
|
||||||
|
printInt(f.fs, v.Int(), 10)
|
||||||
|
|
||||||
|
case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
|
||||||
|
printUint(f.fs, v.Uint(), 10)
|
||||||
|
|
||||||
|
case reflect.Float32:
|
||||||
|
printFloat(f.fs, v.Float(), 32)
|
||||||
|
|
||||||
|
case reflect.Float64:
|
||||||
|
printFloat(f.fs, v.Float(), 64)
|
||||||
|
|
||||||
|
case reflect.Complex64:
|
||||||
|
printComplex(f.fs, v.Complex(), 32)
|
||||||
|
|
||||||
|
case reflect.Complex128:
|
||||||
|
printComplex(f.fs, v.Complex(), 64)
|
||||||
|
|
||||||
|
case reflect.Slice:
|
||||||
|
if v.IsNil() {
|
||||||
|
f.fs.Write(nilAngleBytes)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
|
||||||
|
case reflect.Array:
|
||||||
|
f.fs.Write(openBracketBytes)
|
||||||
|
f.depth++
|
||||||
|
if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
|
||||||
|
f.fs.Write(maxShortBytes)
|
||||||
|
} else {
|
||||||
|
numEntries := v.Len()
|
||||||
|
for i := 0; i < numEntries; i++ {
|
||||||
|
if i > 0 {
|
||||||
|
f.fs.Write(spaceBytes)
|
||||||
|
}
|
||||||
|
f.ignoreNextType = true
|
||||||
|
f.format(f.unpackValue(v.Index(i)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.depth--
|
||||||
|
f.fs.Write(closeBracketBytes)
|
||||||
|
|
||||||
|
case reflect.String:
|
||||||
|
f.fs.Write([]byte(v.String()))
|
||||||
|
|
||||||
|
case reflect.Interface:
|
||||||
|
// The only time we should get here is for nil interfaces due to
|
||||||
|
// unpackValue calls.
|
||||||
|
if v.IsNil() {
|
||||||
|
f.fs.Write(nilAngleBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
case reflect.Ptr:
|
||||||
|
// Do nothing. We should never get here since pointers have already
|
||||||
|
// been handled above.
|
||||||
|
|
||||||
|
case reflect.Map:
|
||||||
|
// nil maps should be indicated as different than empty maps
|
||||||
|
if v.IsNil() {
|
||||||
|
f.fs.Write(nilAngleBytes)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
f.fs.Write(openMapBytes)
|
||||||
|
f.depth++
|
||||||
|
if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
|
||||||
|
f.fs.Write(maxShortBytes)
|
||||||
|
} else {
|
||||||
|
keys := v.MapKeys()
|
||||||
|
if f.cs.SortKeys {
|
||||||
|
sortValues(keys, f.cs)
|
||||||
|
}
|
||||||
|
for i, key := range keys {
|
||||||
|
if i > 0 {
|
||||||
|
f.fs.Write(spaceBytes)
|
||||||
|
}
|
||||||
|
f.ignoreNextType = true
|
||||||
|
f.format(f.unpackValue(key))
|
||||||
|
f.fs.Write(colonBytes)
|
||||||
|
f.ignoreNextType = true
|
||||||
|
f.format(f.unpackValue(v.MapIndex(key)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.depth--
|
||||||
|
f.fs.Write(closeMapBytes)
|
||||||
|
|
||||||
|
case reflect.Struct:
|
||||||
|
numFields := v.NumField()
|
||||||
|
f.fs.Write(openBraceBytes)
|
||||||
|
f.depth++
|
||||||
|
if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
|
||||||
|
f.fs.Write(maxShortBytes)
|
||||||
|
} else {
|
||||||
|
vt := v.Type()
|
||||||
|
for i := 0; i < numFields; i++ {
|
||||||
|
if i > 0 {
|
||||||
|
f.fs.Write(spaceBytes)
|
||||||
|
}
|
||||||
|
vtf := vt.Field(i)
|
||||||
|
if f.fs.Flag('+') || f.fs.Flag('#') {
|
||||||
|
f.fs.Write([]byte(vtf.Name))
|
||||||
|
f.fs.Write(colonBytes)
|
||||||
|
}
|
||||||
|
f.format(f.unpackValue(v.Field(i)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.depth--
|
||||||
|
f.fs.Write(closeBraceBytes)
|
||||||
|
|
||||||
|
case reflect.Uintptr:
|
||||||
|
printHexPtr(f.fs, uintptr(v.Uint()))
|
||||||
|
|
||||||
|
case reflect.UnsafePointer, reflect.Chan, reflect.Func:
|
||||||
|
printHexPtr(f.fs, v.Pointer())
|
||||||
|
|
||||||
|
// There were not any other types at the time this code was written, but
|
||||||
|
// fall back to letting the default fmt package handle it if any get added.
|
||||||
|
default:
|
||||||
|
format := f.buildDefaultFormat()
|
||||||
|
if v.CanInterface() {
|
||||||
|
fmt.Fprintf(f.fs, format, v.Interface())
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(f.fs, format, v.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format satisfies the fmt.Formatter interface. See NewFormatter for usage
|
||||||
|
// details.
|
||||||
|
func (f *formatState) Format(fs fmt.State, verb rune) {
|
||||||
|
f.fs = fs
|
||||||
|
|
||||||
|
// Use standard formatting for verbs that are not v.
|
||||||
|
if verb != 'v' {
|
||||||
|
format := f.constructOrigFormat(verb)
|
||||||
|
fmt.Fprintf(fs, format, f.value)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.value == nil {
|
||||||
|
if fs.Flag('#') {
|
||||||
|
fs.Write(interfaceBytes)
|
||||||
|
}
|
||||||
|
fs.Write(nilAngleBytes)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
f.format(reflect.ValueOf(f.value))
|
||||||
|
}
|
||||||
|
|
||||||
|
// newFormatter is a helper function to consolidate the logic from the various
|
||||||
|
// public methods which take varying config states.
|
||||||
|
func newFormatter(cs *ConfigState, v interface{}) fmt.Formatter {
|
||||||
|
fs := &formatState{value: v, cs: cs}
|
||||||
|
fs.pointers = make(map[uintptr]int)
|
||||||
|
return fs
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
NewFormatter returns a custom formatter that satisfies the fmt.Formatter
|
||||||
|
interface. As a result, it integrates cleanly with standard fmt package
|
||||||
|
printing functions. The formatter is useful for inline printing of smaller data
|
||||||
|
types similar to the standard %v format specifier.
|
||||||
|
|
||||||
|
The custom formatter only responds to the %v (most compact), %+v (adds pointer
|
||||||
|
addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb
|
||||||
|
combinations. Any other verbs such as %x and %q will be sent to the the
|
||||||
|
standard fmt package for formatting. In addition, the custom formatter ignores
|
||||||
|
the width and precision arguments (however they will still work on the format
|
||||||
|
specifiers not handled by the custom formatter).
|
||||||
|
|
||||||
|
Typically this function shouldn't be called directly. It is much easier to make
|
||||||
|
use of the custom formatter by calling one of the convenience functions such as
|
||||||
|
Printf, Println, or Fprintf.
|
||||||
|
*/
|
||||||
|
func NewFormatter(v interface{}) fmt.Formatter {
|
||||||
|
return newFormatter(&Config, v)
|
||||||
|
}
|
||||||
148
vendor/github.com/davecgh/go-spew/spew/spew.go
generated
vendored
Normal file
148
vendor/github.com/davecgh/go-spew/spew/spew.go
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013-2016 Dave Collins <dave@davec.name>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package spew
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the formatted string as a value that satisfies error. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Errorf(format, spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Errorf(format string, a ...interface{}) (err error) {
|
||||||
|
return fmt.Errorf(format, convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Fprint(w, spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Fprint(w io.Writer, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Fprint(w, convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Fprintf(w, format, spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Fprintf(w, format, convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Fprintln(w, spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Fprintln(w, convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print is a wrapper for fmt.Print that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Print(spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Print(a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Print(convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Printf is a wrapper for fmt.Printf that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Printf(format, spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Printf(format string, a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Printf(format, convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Println is a wrapper for fmt.Println that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the number of bytes written and any write error encountered. See
|
||||||
|
// NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Println(spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Println(a ...interface{}) (n int, err error) {
|
||||||
|
return fmt.Println(convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the resulting string. See NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Sprint(spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Sprint(a ...interface{}) string {
|
||||||
|
return fmt.Sprint(convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were
|
||||||
|
// passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the resulting string. See NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Sprintf(format, spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Sprintf(format string, a ...interface{}) string {
|
||||||
|
return fmt.Sprintf(format, convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it
|
||||||
|
// were passed with a default Formatter interface returned by NewFormatter. It
|
||||||
|
// returns the resulting string. See NewFormatter for formatting details.
|
||||||
|
//
|
||||||
|
// This function is shorthand for the following syntax:
|
||||||
|
//
|
||||||
|
// fmt.Sprintln(spew.NewFormatter(a), spew.NewFormatter(b))
|
||||||
|
func Sprintln(a ...interface{}) string {
|
||||||
|
return fmt.Sprintln(convertArgs(a)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertArgs accepts a slice of arguments and returns a slice of the same
|
||||||
|
// length with each argument converted to a default spew Formatter interface.
|
||||||
|
func convertArgs(args []interface{}) (formatters []interface{}) {
|
||||||
|
formatters = make([]interface{}, len(args))
|
||||||
|
for index, arg := range args {
|
||||||
|
formatters[index] = NewFormatter(arg)
|
||||||
|
}
|
||||||
|
return formatters
|
||||||
|
}
|
||||||
41
vendor/github.com/google/uuid/CHANGELOG.md
generated
vendored
Normal file
41
vendor/github.com/google/uuid/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## [1.6.0](https://github.com/google/uuid/compare/v1.5.0...v1.6.0) (2024-01-16)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* add Max UUID constant ([#149](https://github.com/google/uuid/issues/149)) ([c58770e](https://github.com/google/uuid/commit/c58770eb495f55fe2ced6284f93c5158a62e53e3))
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* fix typo in version 7 uuid documentation ([#153](https://github.com/google/uuid/issues/153)) ([016b199](https://github.com/google/uuid/commit/016b199544692f745ffc8867b914129ecb47ef06))
|
||||||
|
* Monotonicity in UUIDv7 ([#150](https://github.com/google/uuid/issues/150)) ([a2b2b32](https://github.com/google/uuid/commit/a2b2b32373ff0b1a312b7fdf6d38a977099698a6))
|
||||||
|
|
||||||
|
## [1.5.0](https://github.com/google/uuid/compare/v1.4.0...v1.5.0) (2023-12-12)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* Validate UUID without creating new UUID ([#141](https://github.com/google/uuid/issues/141)) ([9ee7366](https://github.com/google/uuid/commit/9ee7366e66c9ad96bab89139418a713dc584ae29))
|
||||||
|
|
||||||
|
## [1.4.0](https://github.com/google/uuid/compare/v1.3.1...v1.4.0) (2023-10-26)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* UUIDs slice type with Strings() convenience method ([#133](https://github.com/google/uuid/issues/133)) ([cd5fbbd](https://github.com/google/uuid/commit/cd5fbbdd02f3e3467ac18940e07e062be1f864b4))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
* Clarify that Parse's job is to parse but not necessarily validate strings. (Documents current behavior)
|
||||||
|
|
||||||
|
## [1.3.1](https://github.com/google/uuid/compare/v1.3.0...v1.3.1) (2023-08-18)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* Use .EqualFold() to parse urn prefixed UUIDs ([#118](https://github.com/google/uuid/issues/118)) ([574e687](https://github.com/google/uuid/commit/574e6874943741fb99d41764c705173ada5293f0))
|
||||||
|
|
||||||
|
## Changelog
|
||||||
26
vendor/github.com/google/uuid/CONTRIBUTING.md
generated
vendored
Normal file
26
vendor/github.com/google/uuid/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# How to contribute
|
||||||
|
|
||||||
|
We definitely welcome patches and contribution to this project!
|
||||||
|
|
||||||
|
### Tips
|
||||||
|
|
||||||
|
Commits must be formatted according to the [Conventional Commits Specification](https://www.conventionalcommits.org).
|
||||||
|
|
||||||
|
Always try to include a test case! If it is not possible or not necessary,
|
||||||
|
please explain why in the pull request description.
|
||||||
|
|
||||||
|
### Releasing
|
||||||
|
|
||||||
|
Commits that would precipitate a SemVer change, as described in the Conventional
|
||||||
|
Commits Specification, will trigger [`release-please`](https://github.com/google-github-actions/release-please-action)
|
||||||
|
to create a release candidate pull request. Once submitted, `release-please`
|
||||||
|
will create a release.
|
||||||
|
|
||||||
|
For tips on how to work with `release-please`, see its documentation.
|
||||||
|
|
||||||
|
### Legal requirements
|
||||||
|
|
||||||
|
In order to protect both you and ourselves, you will need to sign the
|
||||||
|
[Contributor License Agreement](https://cla.developers.google.com/clas).
|
||||||
|
|
||||||
|
You may have already signed it for other Google projects.
|
||||||
9
vendor/github.com/google/uuid/CONTRIBUTORS
generated
vendored
Normal file
9
vendor/github.com/google/uuid/CONTRIBUTORS
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
Paul Borman <borman@google.com>
|
||||||
|
bmatsuo
|
||||||
|
shawnps
|
||||||
|
theory
|
||||||
|
jboverfelt
|
||||||
|
dsymonds
|
||||||
|
cd1
|
||||||
|
wallclockbuilder
|
||||||
|
dansouza
|
||||||
27
vendor/github.com/google/uuid/LICENSE
generated
vendored
Normal file
27
vendor/github.com/google/uuid/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c) 2009,2014 Google Inc. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
21
vendor/github.com/google/uuid/README.md
generated
vendored
Normal file
21
vendor/github.com/google/uuid/README.md
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# uuid
|
||||||
|
The uuid package generates and inspects UUIDs based on
|
||||||
|
[RFC 4122](https://datatracker.ietf.org/doc/html/rfc4122)
|
||||||
|
and DCE 1.1: Authentication and Security Services.
|
||||||
|
|
||||||
|
This package is based on the github.com/pborman/uuid package (previously named
|
||||||
|
code.google.com/p/go-uuid). It differs from these earlier packages in that
|
||||||
|
a UUID is a 16 byte array rather than a byte slice. One loss due to this
|
||||||
|
change is the ability to represent an invalid UUID (vs a NIL UUID).
|
||||||
|
|
||||||
|
###### Install
|
||||||
|
```sh
|
||||||
|
go get github.com/google/uuid
|
||||||
|
```
|
||||||
|
|
||||||
|
###### Documentation
|
||||||
|
[](https://pkg.go.dev/github.com/google/uuid)
|
||||||
|
|
||||||
|
Full `go doc` style documentation for the package can be viewed online without
|
||||||
|
installing this package by using the GoDoc site here:
|
||||||
|
http://pkg.go.dev/github.com/google/uuid
|
||||||
80
vendor/github.com/google/uuid/dce.go
generated
vendored
Normal file
80
vendor/github.com/google/uuid/dce.go
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
// Copyright 2016 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package uuid
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A Domain represents a Version 2 domain
|
||||||
|
type Domain byte
|
||||||
|
|
||||||
|
// Domain constants for DCE Security (Version 2) UUIDs.
|
||||||
|
const (
|
||||||
|
Person = Domain(0)
|
||||||
|
Group = Domain(1)
|
||||||
|
Org = Domain(2)
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewDCESecurity returns a DCE Security (Version 2) UUID.
|
||||||
|
//
|
||||||
|
// The domain should be one of Person, Group or Org.
|
||||||
|
// On a POSIX system the id should be the users UID for the Person
|
||||||
|
// domain and the users GID for the Group. The meaning of id for
|
||||||
|
// the domain Org or on non-POSIX systems is site defined.
|
||||||
|
//
|
||||||
|
// For a given domain/id pair the same token may be returned for up to
|
||||||
|
// 7 minutes and 10 seconds.
|
||||||
|
func NewDCESecurity(domain Domain, id uint32) (UUID, error) {
|
||||||
|
uuid, err := NewUUID()
|
||||||
|
if err == nil {
|
||||||
|
uuid[6] = (uuid[6] & 0x0f) | 0x20 // Version 2
|
||||||
|
uuid[9] = byte(domain)
|
||||||
|
binary.BigEndian.PutUint32(uuid[0:], id)
|
||||||
|
}
|
||||||
|
return uuid, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDCEPerson returns a DCE Security (Version 2) UUID in the person
|
||||||
|
// domain with the id returned by os.Getuid.
|
||||||
|
//
|
||||||
|
// NewDCESecurity(Person, uint32(os.Getuid()))
|
||||||
|
func NewDCEPerson() (UUID, error) {
|
||||||
|
return NewDCESecurity(Person, uint32(os.Getuid()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDCEGroup returns a DCE Security (Version 2) UUID in the group
|
||||||
|
// domain with the id returned by os.Getgid.
|
||||||
|
//
|
||||||
|
// NewDCESecurity(Group, uint32(os.Getgid()))
|
||||||
|
func NewDCEGroup() (UUID, error) {
|
||||||
|
return NewDCESecurity(Group, uint32(os.Getgid()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Domain returns the domain for a Version 2 UUID. Domains are only defined
|
||||||
|
// for Version 2 UUIDs.
|
||||||
|
func (uuid UUID) Domain() Domain {
|
||||||
|
return Domain(uuid[9])
|
||||||
|
}
|
||||||
|
|
||||||
|
// ID returns the id for a Version 2 UUID. IDs are only defined for Version 2
|
||||||
|
// UUIDs.
|
||||||
|
func (uuid UUID) ID() uint32 {
|
||||||
|
return binary.BigEndian.Uint32(uuid[0:4])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Domain) String() string {
|
||||||
|
switch d {
|
||||||
|
case Person:
|
||||||
|
return "Person"
|
||||||
|
case Group:
|
||||||
|
return "Group"
|
||||||
|
case Org:
|
||||||
|
return "Org"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("Domain%d", int(d))
|
||||||
|
}
|
||||||
12
vendor/github.com/google/uuid/doc.go
generated
vendored
Normal file
12
vendor/github.com/google/uuid/doc.go
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
// Copyright 2016 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package uuid generates and inspects UUIDs.
|
||||||
|
//
|
||||||
|
// UUIDs are based on RFC 4122 and DCE 1.1: Authentication and Security
|
||||||
|
// Services.
|
||||||
|
//
|
||||||
|
// A UUID is a 16 byte (128 bit) array. UUIDs may be used as keys to
|
||||||
|
// maps or compared directly.
|
||||||
|
package uuid
|
||||||
59
vendor/github.com/google/uuid/hash.go
generated
vendored
Normal file
59
vendor/github.com/google/uuid/hash.go
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
// Copyright 2016 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package uuid
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"crypto/sha1"
|
||||||
|
"hash"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Well known namespace IDs and UUIDs
|
||||||
|
var (
|
||||||
|
NameSpaceDNS = Must(Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c8"))
|
||||||
|
NameSpaceURL = Must(Parse("6ba7b811-9dad-11d1-80b4-00c04fd430c8"))
|
||||||
|
NameSpaceOID = Must(Parse("6ba7b812-9dad-11d1-80b4-00c04fd430c8"))
|
||||||
|
NameSpaceX500 = Must(Parse("6ba7b814-9dad-11d1-80b4-00c04fd430c8"))
|
||||||
|
Nil UUID // empty UUID, all zeros
|
||||||
|
|
||||||
|
// The Max UUID is special form of UUID that is specified to have all 128 bits set to 1.
|
||||||
|
Max = UUID{
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewHash returns a new UUID derived from the hash of space concatenated with
|
||||||
|
// data generated by h. The hash should be at least 16 byte in length. The
|
||||||
|
// first 16 bytes of the hash are used to form the UUID. The version of the
|
||||||
|
// UUID will be the lower 4 bits of version. NewHash is used to implement
|
||||||
|
// NewMD5 and NewSHA1.
|
||||||
|
func NewHash(h hash.Hash, space UUID, data []byte, version int) UUID {
|
||||||
|
h.Reset()
|
||||||
|
h.Write(space[:]) //nolint:errcheck
|
||||||
|
h.Write(data) //nolint:errcheck
|
||||||
|
s := h.Sum(nil)
|
||||||
|
var uuid UUID
|
||||||
|
copy(uuid[:], s)
|
||||||
|
uuid[6] = (uuid[6] & 0x0f) | uint8((version&0xf)<<4)
|
||||||
|
uuid[8] = (uuid[8] & 0x3f) | 0x80 // RFC 4122 variant
|
||||||
|
return uuid
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMD5 returns a new MD5 (Version 3) UUID based on the
|
||||||
|
// supplied name space and data. It is the same as calling:
|
||||||
|
//
|
||||||
|
// NewHash(md5.New(), space, data, 3)
|
||||||
|
func NewMD5(space UUID, data []byte) UUID {
|
||||||
|
return NewHash(md5.New(), space, data, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSHA1 returns a new SHA1 (Version 5) UUID based on the
|
||||||
|
// supplied name space and data. It is the same as calling:
|
||||||
|
//
|
||||||
|
// NewHash(sha1.New(), space, data, 5)
|
||||||
|
func NewSHA1(space UUID, data []byte) UUID {
|
||||||
|
return NewHash(sha1.New(), space, data, 5)
|
||||||
|
}
|
||||||
38
vendor/github.com/google/uuid/marshal.go
generated
vendored
Normal file
38
vendor/github.com/google/uuid/marshal.go
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// Copyright 2016 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package uuid
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
// MarshalText implements encoding.TextMarshaler.
|
||||||
|
func (uuid UUID) MarshalText() ([]byte, error) {
|
||||||
|
var js [36]byte
|
||||||
|
encodeHex(js[:], uuid)
|
||||||
|
return js[:], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalText implements encoding.TextUnmarshaler.
|
||||||
|
func (uuid *UUID) UnmarshalText(data []byte) error {
|
||||||
|
id, err := ParseBytes(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*uuid = id
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalBinary implements encoding.BinaryMarshaler.
|
||||||
|
func (uuid UUID) MarshalBinary() ([]byte, error) {
|
||||||
|
return uuid[:], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalBinary implements encoding.BinaryUnmarshaler.
|
||||||
|
func (uuid *UUID) UnmarshalBinary(data []byte) error {
|
||||||
|
if len(data) != 16 {
|
||||||
|
return fmt.Errorf("invalid UUID (got %d bytes)", len(data))
|
||||||
|
}
|
||||||
|
copy(uuid[:], data)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
90
vendor/github.com/google/uuid/node.go
generated
vendored
Normal file
90
vendor/github.com/google/uuid/node.go
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
// Copyright 2016 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package uuid
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
nodeMu sync.Mutex
|
||||||
|
ifname string // name of interface being used
|
||||||
|
nodeID [6]byte // hardware for version 1 UUIDs
|
||||||
|
zeroID [6]byte // nodeID with only 0's
|
||||||
|
)
|
||||||
|
|
||||||
|
// NodeInterface returns the name of the interface from which the NodeID was
|
||||||
|
// derived. The interface "user" is returned if the NodeID was set by
|
||||||
|
// SetNodeID.
|
||||||
|
func NodeInterface() string {
|
||||||
|
defer nodeMu.Unlock()
|
||||||
|
nodeMu.Lock()
|
||||||
|
return ifname
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNodeInterface selects the hardware address to be used for Version 1 UUIDs.
|
||||||
|
// If name is "" then the first usable interface found will be used or a random
|
||||||
|
// Node ID will be generated. If a named interface cannot be found then false
|
||||||
|
// is returned.
|
||||||
|
//
|
||||||
|
// SetNodeInterface never fails when name is "".
|
||||||
|
func SetNodeInterface(name string) bool {
|
||||||
|
defer nodeMu.Unlock()
|
||||||
|
nodeMu.Lock()
|
||||||
|
return setNodeInterface(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setNodeInterface(name string) bool {
|
||||||
|
iname, addr := getHardwareInterface(name) // null implementation for js
|
||||||
|
if iname != "" && addr != nil {
|
||||||
|
ifname = iname
|
||||||
|
copy(nodeID[:], addr)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// We found no interfaces with a valid hardware address. If name
|
||||||
|
// does not specify a specific interface generate a random Node ID
|
||||||
|
// (section 4.1.6)
|
||||||
|
if name == "" {
|
||||||
|
ifname = "random"
|
||||||
|
randomBits(nodeID[:])
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// NodeID returns a slice of a copy of the current Node ID, setting the Node ID
|
||||||
|
// if not already set.
|
||||||
|
func NodeID() []byte {
|
||||||
|
defer nodeMu.Unlock()
|
||||||
|
nodeMu.Lock()
|
||||||
|
if nodeID == zeroID {
|
||||||
|
setNodeInterface("")
|
||||||
|
}
|
||||||
|
nid := nodeID
|
||||||
|
return nid[:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNodeID sets the Node ID to be used for Version 1 UUIDs. The first 6 bytes
|
||||||
|
// of id are used. If id is less than 6 bytes then false is returned and the
|
||||||
|
// Node ID is not set.
|
||||||
|
func SetNodeID(id []byte) bool {
|
||||||
|
if len(id) < 6 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
defer nodeMu.Unlock()
|
||||||
|
nodeMu.Lock()
|
||||||
|
copy(nodeID[:], id)
|
||||||
|
ifname = "user"
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// NodeID returns the 6 byte node id encoded in uuid. It returns nil if uuid is
|
||||||
|
// not valid. The NodeID is only well defined for version 1 and 2 UUIDs.
|
||||||
|
func (uuid UUID) NodeID() []byte {
|
||||||
|
var node [6]byte
|
||||||
|
copy(node[:], uuid[10:])
|
||||||
|
return node[:]
|
||||||
|
}
|
||||||
12
vendor/github.com/google/uuid/node_js.go
generated
vendored
Normal file
12
vendor/github.com/google/uuid/node_js.go
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
// Copyright 2017 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build js
|
||||||
|
|
||||||
|
package uuid
|
||||||
|
|
||||||
|
// getHardwareInterface returns nil values for the JS version of the code.
|
||||||
|
// This removes the "net" dependency, because it is not used in the browser.
|
||||||
|
// Using the "net" library inflates the size of the transpiled JS code by 673k bytes.
|
||||||
|
func getHardwareInterface(name string) (string, []byte) { return "", nil }
|
||||||
33
vendor/github.com/google/uuid/node_net.go
generated
vendored
Normal file
33
vendor/github.com/google/uuid/node_net.go
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
// Copyright 2017 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !js
|
||||||
|
|
||||||
|
package uuid
|
||||||
|
|
||||||
|
import "net"
|
||||||
|
|
||||||
|
var interfaces []net.Interface // cached list of interfaces
|
||||||
|
|
||||||
|
// getHardwareInterface returns the name and hardware address of interface name.
|
||||||
|
// If name is "" then the name and hardware address of one of the system's
|
||||||
|
// interfaces is returned. If no interfaces are found (name does not exist or
|
||||||
|
// there are no interfaces) then "", nil is returned.
|
||||||
|
//
|
||||||
|
// Only addresses of at least 6 bytes are returned.
|
||||||
|
func getHardwareInterface(name string) (string, []byte) {
|
||||||
|
if interfaces == nil {
|
||||||
|
var err error
|
||||||
|
interfaces, err = net.Interfaces()
|
||||||
|
if err != nil {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, ifs := range interfaces {
|
||||||
|
if len(ifs.HardwareAddr) >= 6 && (name == "" || name == ifs.Name) {
|
||||||
|
return ifs.Name, ifs.HardwareAddr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user