diff --git a/.claude/commands/build.md b/.claude/commands/build.md
deleted file mode 100644
index 0b3b9a6..0000000
--- a/.claude/commands/build.md
+++ /dev/null
@@ -1,5 +0,0 @@
----
-description: Build the RelSpec binary
----
-
-Build the RelSpec project by running `make build`. Report the build status and any errors encountered.
diff --git a/.claude/commands/coverage.md b/.claude/commands/coverage.md
deleted file mode 100644
index 79e11a8..0000000
--- a/.claude/commands/coverage.md
+++ /dev/null
@@ -1,9 +0,0 @@
----
-description: Generate test coverage report
----
-
-Generate and display test coverage for RelSpec:
-1. Run `go test -cover ./...` to get coverage percentage
-2. If detailed coverage is needed, run `go test -coverprofile=coverage.out ./...` and then `go tool cover -html=coverage.out` to generate HTML report
-
-Show coverage statistics and identify areas needing more tests.
diff --git a/.claude/commands/lint.md b/.claude/commands/lint.md
deleted file mode 100644
index 0992c98..0000000
--- a/.claude/commands/lint.md
+++ /dev/null
@@ -1,10 +0,0 @@
----
-description: Run Go linters on the codebase
----
-
-Run linting tools on the RelSpec codebase:
-1. First run `gofmt -l .` to check formatting
-2. If golangci-lint is available, run `golangci-lint run ./...`
-3. Run `go vet ./...` to check for suspicious constructs
-
-Report any issues found and suggest fixes if needed.
diff --git a/.claude/commands/test.md b/.claude/commands/test.md
deleted file mode 100644
index 8375a8e..0000000
--- a/.claude/commands/test.md
+++ /dev/null
@@ -1,5 +0,0 @@
----
-description: Run all tests for the RelSpec project
----
-
-Run `go test ./...` to execute all unit tests in the project. Show a summary of the results and highlight any failures.
diff --git a/.codex b/.codex
new file mode 100644
index 0000000..e69de29
diff --git a/.gitea/workflows/release.yml b/.gitea/workflows/release.yml
new file mode 100644
index 0000000..2d56e9b
--- /dev/null
+++ b/.gitea/workflows/release.yml
@@ -0,0 +1,327 @@
+name: Release
+
+on:
+ push:
+ tags:
+ - 'v*'
+ workflow_dispatch:
+ inputs:
+ tag:
+ description: 'Tag to release (e.g. v1.2.3)'
+ required: true
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - uses: actions/setup-go@v5
+ with:
+ go-version-file: go.mod
+
+ - name: Test
+ run: go test ./...
+
+ - name: Lint
+ run: go vet ./...
+
+ release:
+ needs: test
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/setup-go@v5
+ with:
+ go-version-file: go.mod
+
+ - name: Build release binaries
+ run: |
+ VERSION="${{ github.event.inputs.tag || github.ref_name }}"
+ for target in "linux/amd64" "linux/arm64" "darwin/amd64" "darwin/arm64" "windows/amd64"; do
+ GOOS="${target%/*}"
+ GOARCH="${target#*/}"
+ EXT=""
+ [ "$GOOS" = "windows" ] && EXT=".exe"
+ NAME="relspec-${GOOS}-${GOARCH}${EXT}"
+ GOOS="$GOOS" GOARCH="$GOARCH" go build \
+ -trimpath \
+ -ldflags "-X git.warky.dev/wdevs/relspecgo/cmd/relspec.version=${VERSION}" \
+ -o "$NAME" ./cmd/relspec
+ echo "Built $NAME"
+ done
+
+ - name: Create release and upload assets
+ run: |
+ TAG="${{ github.event.inputs.tag || github.ref_name }}"
+ API="${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/releases"
+
+ # Collect commits since the previous tag (or last 20 if no prior tag)
+ PREV_TAG=$(git tag --sort=-version:refname | grep -v "^${TAG}$" | head -1)
+ if [ -n "$PREV_TAG" ]; then
+ RANGE="${PREV_TAG}..${TAG}"
+ else
+ RANGE="HEAD~20..HEAD"
+ fi
+ NOTES=$(git log "$RANGE" --pretty=format:"- %s" --no-merges)
+ BODY="## What's changed"$'\n'"${NOTES}"
+
+ # Escape for JSON
+ BODY_JSON=$(printf '%s' "$BODY" | python3 -c 'import json,sys; print(json.dumps(sys.stdin.read()))')
+
+ RELEASE=$(curl -s -X POST "$API" \
+ -H "Authorization: token ${GITHUB_TOKEN}" \
+ -H "Content-Type: application/json" \
+ -d "{\"tag_name\":\"${TAG}\",\"name\":\"${TAG}\",\"body\":${BODY_JSON}}")
+
+ UPLOAD_URL=$(echo "$RELEASE" | grep -o '"upload_url":"[^"]*"' | cut -d'"' -f4 | sed 's/{[^}]*}//')
+ if [ -z "$UPLOAD_URL" ]; then
+ echo "Failed to create release: $RELEASE"
+ exit 1
+ fi
+
+ for f in relspec-*; do
+ echo "Uploading $f..."
+ curl -s -X POST "${UPLOAD_URL}?name=${f}" \
+ -H "Authorization: token ${GITHUB_TOKEN}" \
+ -H "Content-Type: application/octet-stream" \
+ --data-binary "@${f}" > /dev/null
+ done
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ pkg-aur:
+ needs: release
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Publish to AUR
+ env:
+ AUR_SSH_KEY: ${{ secrets.AUR_SSH_KEY }}
+ run: |
+ set -euo pipefail
+
+ VERSION="${{ github.event.inputs.tag || github.ref_name }}"
+ PKGVER="${VERSION#v}"
+ AUR_KEY_PATH="$HOME/.ssh/aur"
+ AUR_KNOWN_HOSTS="$HOME/.ssh/known_hosts"
+
+ # Setup SSH for AUR
+ mkdir -p ~/.ssh
+ chmod 700 ~/.ssh
+
+ if [ -z "${AUR_SSH_KEY:-}" ]; then
+ echo "AUR_SSH_KEY is empty"
+ exit 1
+ fi
+
+ # Support raw multiline keys, escaped \\n secrets, or base64-encoded keys.
+ CLEAN_AUR_SSH_KEY="$(printf '%s' "$AUR_SSH_KEY" | tr -d '\r')"
+ if printf '%s' "$CLEAN_AUR_SSH_KEY" | grep -q "^-----BEGIN .*PRIVATE KEY-----$"; then
+ printf '%s\n' "$CLEAN_AUR_SSH_KEY" > "$AUR_KEY_PATH"
+ elif printf '%s' "$CLEAN_AUR_SSH_KEY" | grep -q '\\n'; then
+ printf '%b\n' "$CLEAN_AUR_SSH_KEY" > "$AUR_KEY_PATH"
+ else
+ if printf '%s' "$CLEAN_AUR_SSH_KEY" | tr -d '[:space:]' | base64 --decode > "$AUR_KEY_PATH" 2>/dev/null; then
+ :
+ else
+ printf '%s\n' "$CLEAN_AUR_SSH_KEY" > "$AUR_KEY_PATH"
+ fi
+ fi
+ chmod 600 "$AUR_KEY_PATH"
+
+ if ! ssh-keygen -y -f "$AUR_KEY_PATH" >/dev/null 2>&1; then
+ echo "AUR_SSH_KEY is not a valid private key."
+ echo "Store it as a raw private key, an escaped private key with \\n, or a base64-encoded private key."
+ exit 1
+ fi
+
+ ssh-keyscan -t rsa,ed25519 aur.archlinux.org >> "$AUR_KNOWN_HOSTS"
+ chmod 644 "$AUR_KNOWN_HOSTS"
+
+ # Clone AUR repo
+ GIT_SSH_COMMAND="ssh -o IdentitiesOnly=yes -o StrictHostKeyChecking=yes -o UserKnownHostsFile=$AUR_KNOWN_HOSTS -i $AUR_KEY_PATH" \
+ git clone ssh://aur@aur.archlinux.org/relspec.git aur-repo
+
+ CURRENT_PKGVER=$(awk -F= '/^pkgver=/ {print $2; exit}' aur-repo/PKGBUILD | tr -d "[:space:]")
+ CURRENT_PKGREL=$(awk -F= '/^pkgrel=/ {print $2; exit}' aur-repo/PKGBUILD | tr -d "[:space:]")
+
+ if [ "$CURRENT_PKGVER" = "$PKGVER" ]; then
+ case "$CURRENT_PKGREL" in
+ ''|*[!0-9]*)
+ echo "Unsupported pkgrel in AUR repo: ${CURRENT_PKGREL}"
+ exit 1
+ ;;
+ *)
+ PKGREL=$((CURRENT_PKGREL + 1))
+ ;;
+ esac
+ else
+ PKGREL=1
+ fi
+
+ echo "Publishing AUR package version ${PKGVER}-${PKGREL}"
+
+ # Compute SHA256 of the source archive from the same URL the PKGBUILD will download.
+ SHA=$(curl -fsSL "https://git.warky.dev/wdevs/relspecgo/archive/v${PKGVER}.zip" | sha256sum | cut -d' ' -f1)
+
+ # Update PKGBUILD — keep remote source URL, bump version/checksum, and increment pkgrel for same-version rebuilds.
+ sed -e "s/^pkgver=.*/pkgver=${PKGVER}/" \
+ -e "s/^pkgrel=.*/pkgrel=${PKGREL}/" \
+ -e "s/^sha256sums=.*/sha256sums=('${SHA}')/" \
+ linux/arch/PKGBUILD > aur-repo/PKGBUILD
+
+ # Generate .SRCINFO inside an Arch container (docker cp avoids DinD volume mount issues)
+ CID=$(docker run -d archlinux:latest sleep infinity)
+ docker cp aur-repo/PKGBUILD $CID:/build/PKGBUILD || (docker exec $CID mkdir -p /build && docker cp aur-repo/PKGBUILD $CID:/build/PKGBUILD)
+ docker exec $CID bash -c "
+ pacman -Sy --noconfirm base-devel &&
+ useradd -m builder &&
+ chown -R builder:builder /build &&
+ runuser -u builder -- bash -c 'cd /build && makepkg --printsrcinfo > .SRCINFO'
+ "
+ docker cp $CID:/build/.SRCINFO aur-repo/.SRCINFO
+ docker rm -f $CID
+
+ # Commit and push to AUR master
+ cd aur-repo
+ git config user.email "hein@warky.dev"
+ git config user.name "Hein"
+ git add PKGBUILD .SRCINFO
+ git commit -m "Update to v${PKGVER}-${PKGREL}"
+ GIT_SSH_COMMAND="ssh -o IdentitiesOnly=yes -o StrictHostKeyChecking=yes -o UserKnownHostsFile=$AUR_KNOWN_HOSTS -i $AUR_KEY_PATH" \
+ git push origin HEAD:master
+
+ pkg-deb:
+ needs: release
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/setup-go@v5
+ with:
+ go-version-file: go.mod
+
+ - name: Build Debian packages
+ run: |
+ VERSION="${{ github.event.inputs.tag || github.ref_name }}"
+ PKGVER="${VERSION#v}"
+
+ for GOARCH in amd64 arm64; do
+ GOOS=linux GOARCH=$GOARCH go build \
+ -trimpath \
+ -ldflags "-X git.warky.dev/wdevs/relspecgo/cmd/relspec.version=${PKGVER}" \
+ -o relspec ./cmd/relspec
+
+ PKGDIR="relspec_${PKGVER}_${GOARCH}"
+ mkdir -p "${PKGDIR}/DEBIAN"
+ mkdir -p "${PKGDIR}/usr/bin"
+
+ install -m755 relspec "${PKGDIR}/usr/bin/relspec"
+
+ sed -e "s/VERSION/${PKGVER}/" \
+ -e "s/ARCH/${GOARCH}/" \
+ linux/debian/control > "${PKGDIR}/DEBIAN/control"
+
+ dpkg-deb --build --root-owner-group "${PKGDIR}"
+ echo "Built ${PKGDIR}.deb"
+ done
+
+ - name: Upload to release
+ run: |
+ TAG="${{ github.event.inputs.tag || github.ref_name }}"
+ RELEASE=$(curl -s "${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/releases/tags/${TAG}" \
+ -H "Authorization: token ${GITHUB_TOKEN}")
+ UPLOAD_URL=$(echo "$RELEASE" | grep -o '"upload_url":"[^"]*"' | cut -d'"' -f4 | sed 's/{[^}]*}//')
+ for f in *.deb; do
+ FNAME=$(basename "$f")
+ echo "Uploading $FNAME..."
+ curl -s -X POST "${UPLOAD_URL}?name=${FNAME}" \
+ -H "Authorization: token ${GITHUB_TOKEN}" \
+ -H "Content-Type: application/octet-stream" \
+ --data-binary "@${f}" > /dev/null
+ done
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ pkg-rpm:
+ needs: release
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Build RPM
+ run: |
+ set -euo pipefail
+
+ VERSION="${{ github.event.inputs.tag || github.ref_name }}"
+ PKGVER="${VERSION#v}"
+ GO_VER="$(awk '/^go / { print $2; exit }' go.mod)"
+
+ if [ -z "${GO_VER}" ]; then
+ echo "Failed to determine Go version from go.mod"
+ exit 1
+ fi
+
+ # Source tarball — prefix=relspec-VERSION/ matches RPM %autosetup convention
+ git archive --format=tar.gz --prefix=relspec-${PKGVER}/ HEAD \
+ > relspec-${PKGVER}.tar.gz
+
+ # Patch spec version
+ sed -i "s/^Version:.*/Version: ${PKGVER}/" linux/centos/relspec.spec
+
+ mkdir -p linux/centos/out
+ CID=$(docker create \
+ -e GO_VER="${GO_VER}" \
+ -e PKGVER="${PKGVER}" \
+ -w /build \
+ rockylinux:9 \
+ bash -lc "
+ set -euo pipefail
+ dnf install -y rpm-build git &&
+ curl -fsSL https://go.dev/dl/go\${GO_VER}.linux-amd64.tar.gz | tar -C /usr/local -xz &&
+ export PATH=\$PATH:/usr/local/go/bin &&
+ mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS} &&
+ cp relspec-${PKGVER}.tar.gz ~/rpmbuild/SOURCES/ &&
+ cp linux/centos/relspec.spec ~/rpmbuild/SPECS/ &&
+ rpmbuild --nodeps -ba ~/rpmbuild/SPECS/relspec.spec
+ ")
+
+ cleanup() {
+ docker rm -f "$CID" >/dev/null 2>&1 || true
+ }
+ trap cleanup EXIT
+
+ docker cp relspec-${PKGVER}.tar.gz "$CID:/build/relspec-${PKGVER}.tar.gz"
+ docker cp linux "$CID:/build/linux"
+
+ docker start -a "$CID"
+ docker cp "$CID:/root/rpmbuild/RPMS/." linux/centos/out/
+
+ trap - EXIT
+ cleanup
+
+ - name: Upload to release
+ run: |
+ TAG="${{ github.event.inputs.tag || github.ref_name }}"
+ RELEASE=$(curl -s "${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/releases/tags/${TAG}" \
+ -H "Authorization: token ${GITHUB_TOKEN}")
+ UPLOAD_URL=$(echo "$RELEASE" | grep -o '"upload_url":"[^"]*"' | cut -d'"' -f4 | sed 's/{[^}]*}//')
+ while IFS= read -r f; do
+ FNAME=$(basename "$f")
+ echo "Uploading $FNAME..."
+ curl -s -X POST "${UPLOAD_URL}?name=${FNAME}" \
+ -H "Authorization: token ${GITHUB_TOKEN}" \
+ -H "Content-Type: application/octet-stream" \
+ --data-binary "@${f}" > /dev/null
+ done < <(find linux/centos/out -name "*.rpm")
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
deleted file mode 100644
index 8cccf07..0000000
--- a/.github/workflows/release.yml
+++ /dev/null
@@ -1,117 +0,0 @@
-name: Release
-run-name: "Making Release"
-on:
- push:
- tags:
- - 'v*.*.*'
-
-jobs:
- build-and-release:
- name: Build and Release
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Set up Go
- uses: actions/setup-go@v5
- with:
- go-version: '1.25'
-
- - name: Get version from tag
- id: get_version
- run: |
- echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
- echo "BUILD_DATE=$(date -u '+%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_OUTPUT
- echo "Version: ${GITHUB_REF#refs/tags/}"
-
- - name: Build binaries for multiple platforms
- run: |
- mkdir -p dist
-
- # Linux AMD64
- GOOS=linux GOARCH=amd64 go build -o dist/relspec-linux-amd64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
-
- # Linux ARM64
- GOOS=linux GOARCH=arm64 go build -o dist/relspec-linux-arm64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
-
- # macOS AMD64
- GOOS=darwin GOARCH=amd64 go build -o dist/relspec-darwin-amd64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
-
- # macOS ARM64 (Apple Silicon)
- GOOS=darwin GOARCH=arm64 go build -o dist/relspec-darwin-arm64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
-
- # Windows AMD64
- GOOS=windows GOARCH=amd64 go build -o dist/relspec-windows-amd64.exe -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
-
- # Create checksums
- cd dist
- sha256sum * > checksums.txt
- cd ..
-
- - name: Generate release notes
- id: release_notes
- run: |
- # Get the previous tag
- previous_tag=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
-
- if [ -z "$previous_tag" ]; then
- # No previous tag, get all commits
- commits=$(git log --pretty=format:"- %s (%h)" --no-merges)
- else
- # Get commits since the previous tag
- commits=$(git log "${previous_tag}..HEAD" --pretty=format:"- %s (%h)" --no-merges)
- fi
-
- # Create release notes
- cat > release_notes.md << EOF
- # Release ${{ steps.get_version.outputs.VERSION }}
-
- ## Changes
-
- ${commits}
-
- ## Installation
-
- Download the appropriate binary for your platform:
-
- - **Linux (AMD64)**: \`relspec-linux-amd64\`
- - **Linux (ARM64)**: \`relspec-linux-arm64\`
- - **macOS (Intel)**: \`relspec-darwin-amd64\`
- - **macOS (Apple Silicon)**: \`relspec-darwin-arm64\`
- - **Windows (AMD64)**: \`relspec-windows-amd64.exe\`
-
- Make the binary executable (Linux/macOS):
- \`\`\`bash
- chmod +x relspec-*
- \`\`\`
-
- Verify the download with the provided checksums.
- EOF
-
- - name: Create Release
- uses: softprops/action-gh-release@v1
- with:
- body_path: release_notes.md
- files: |
- dist/relspec-linux-amd64
- dist/relspec-linux-arm64
- dist/relspec-darwin-amd64
- dist/relspec-darwin-arm64
- dist/relspec-windows-amd64.exe
- dist/checksums.txt
- draft: false
- prerelease: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Summary
- run: |
- echo "Release ${{ steps.get_version.outputs.VERSION }} created successfully!"
- echo "Binaries built for:"
- echo " - Linux (amd64, arm64)"
- echo " - macOS (amd64, arm64)"
- echo " - Windows (amd64)"
diff --git a/Makefile b/Makefile
index 29dd9e7..34f3f77 100644
--- a/Makefile
+++ b/Makefile
@@ -204,30 +204,21 @@ release: ## Create and push a new release tag (auto-increments patch version)
git push origin "$$version"; \
echo "Tag $$version created and pushed to remote repository."
-release-version: ## Create and push a release with specific version (use: make release-version VERSION=v1.2.3)
- @if [ -z "$(VERSION)" ]; then \
- echo "Error: VERSION is required. Usage: make release-version VERSION=v1.2.3"; \
- exit 1; \
- fi
- @version="$(VERSION)"; \
- if ! echo "$$version" | grep -q "^v"; then \
- version="v$$version"; \
- fi; \
- echo "Creating release: $$version"; \
- latest_tag=$$(git describe --tags --abbrev=0 2>/dev/null || echo ""); \
- if [ -z "$$latest_tag" ]; then \
- commit_logs=$$(git log --pretty=format:"- %s" --no-merges); \
- else \
- commit_logs=$$(git log "$${latest_tag}..HEAD" --pretty=format:"- %s" --no-merges); \
- fi; \
- if [ -z "$$commit_logs" ]; then \
- tag_message="Release $$version"; \
- else \
- tag_message="Release $$version\n\n$$commit_logs"; \
- fi; \
- git tag -a "$$version" -m "$$tag_message"; \
- git push origin "$$version"; \
- echo "Tag $$version created and pushed to remote repository."
+release-version: ## Auto-increment patch version, update package files, commit, tag, and push
+ @CURRENT=$$(git describe --tags --abbrev=0 2>/dev/null || echo "v0.0.0"); \
+ MAJOR=$$(echo $$CURRENT | sed 's/v\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\).*/\1/'); \
+ MINOR=$$(echo $$CURRENT | sed 's/v\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\).*/\2/'); \
+ PATCH=$$(echo $$CURRENT | sed 's/v\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\).*/\3/'); \
+ NEXT="v$$MAJOR.$$MINOR.$$((PATCH + 1))"; \
+ PKGVER="$$MAJOR.$$MINOR.$$((PATCH + 1))"; \
+ echo "Current: $$CURRENT → Next: $$NEXT"; \
+ sed -i "s/^pkgver=.*/pkgver=$$PKGVER/" linux/arch/PKGBUILD; \
+ sed -i "s/^Version:.*/Version: $$PKGVER/" linux/centos/relspec.spec; \
+ git add linux/arch/PKGBUILD linux/centos/relspec.spec; \
+ git commit -m "chore(release): update package version to $$PKGVER"; \
+ git tag -a "$$NEXT" -m "Release $$NEXT"; \
+ git push origin HEAD "$$NEXT"; \
+ echo "Pushed $$NEXT — release workflow triggered"
help: ## Display this help screen
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
diff --git a/README.md b/README.md
index e1e3af2..0ea4f71 100644
--- a/README.md
+++ b/README.md
@@ -6,264 +6,165 @@
[](https://go.dev/dl/)
[](LICENSE)
-> Database Relations Specification Tool for Go
+> Bidirectional database schema conversion, validation, and templating tool.
-RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
+
-## Overview
-
-RelSpec provides bidirectional conversion, comparison, and validation of database specification formats, allowing you to:
-- Inspect live databases and extract their structure
-- Validate schemas against configurable rules and naming conventions
-- Convert between different ORM models (GORM, Bun, etc.)
-- Transform legacy schema definitions (Clarion DCTX, XML, JSON, etc.)
-- Generate standardized specification files (JSON, YAML, etc.)
-- Compare database schemas and track changes
-
-
-
-## Features
-
-### Readers (Input Formats)
-
-RelSpec can read database schemas from multiple sources:
-
-#### ORM Models
-- [GORM](pkg/readers/gorm/README.md) - Go GORM model definitions
-- [Bun](pkg/readers/bun/README.md) - Go Bun model definitions
-- [Drizzle](pkg/readers/drizzle/README.md) - TypeScript Drizzle ORM schemas
-- [Prisma](pkg/readers/prisma/README.md) - Prisma schema language
-- [TypeORM](pkg/readers/typeorm/README.md) - TypeScript TypeORM entities
-
-#### Database Inspection
-- [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection
-- [SQLite](pkg/readers/sqlite/README.md) - Direct SQLite database introspection
-
-#### Schema Formats
-- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
-- [DCTX](pkg/readers/dctx/README.md) - Clarion database dictionary format
-- [DrawDB](pkg/readers/drawdb/README.md) - DrawDB JSON format
-- [GraphQL](pkg/readers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
-- [JSON](pkg/readers/json/README.md) - RelSpec canonical JSON format
-- [YAML](pkg/readers/yaml/README.md) - RelSpec canonical YAML format
-
-### Writers (Output Formats)
-
-RelSpec can write database schemas to multiple formats:
-
-#### ORM Models
-- [GORM](pkg/writers/gorm/README.md) - Generate GORM-compatible Go structs
-- [Bun](pkg/writers/bun/README.md) - Generate Bun-compatible Go structs
-- [Drizzle](pkg/writers/drizzle/README.md) - Generate Drizzle ORM TypeScript schemas
-- [Prisma](pkg/writers/prisma/README.md) - Generate Prisma schema files
-- [TypeORM](pkg/writers/typeorm/README.md) - Generate TypeORM TypeScript entities
-
-#### Database DDL
-- [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.)
-- [SQLite](pkg/writers/sqlite/README.md) - SQLite DDL with automatic schema flattening
-
-#### Schema Formats
-- [DBML](pkg/writers/dbml/README.md) - Database Markup Language
-- [DCTX](pkg/writers/dctx/README.md) - Clarion database dictionary format
-- [DrawDB](pkg/writers/drawdb/README.md) - DrawDB JSON format
-- [GraphQL](pkg/writers/graphql/README.md) - GraphQL Schema Definition Language (SDL)
-- [JSON](pkg/writers/json/README.md) - RelSpec canonical JSON format
-- [YAML](pkg/writers/yaml/README.md) - RelSpec canonical YAML format
-
-### Inspector (Schema Validation)
-
-RelSpec includes a powerful schema validation and linting tool:
-
-- [Inspector](pkg/inspector/README.md) - Validate database schemas against configurable rules
- - Enforce naming conventions (snake_case, camelCase, custom patterns)
- - Check primary key and foreign key standards
- - Detect missing indexes on foreign keys
- - Prevent use of SQL reserved keywords
- - Ensure schema integrity (missing PKs, orphaned FKs, circular dependencies)
- - Support for custom validation rules
- - Multiple output formats (Markdown with colors, JSON)
- - CI/CD integration ready
-
-## Use of AI
-[Rules and use of AI](./AI_USE.md)
-
-## User Interface
-
-RelSpec provides an interactive terminal-based user interface for managing and editing database schemas. The UI allows you to:
-
-- **Browse Databases** - Navigate through your database structure with an intuitive menu system
-- **Edit Schemas** - Create, modify, and organize database schemas
-- **Manage Tables** - Add, update, or delete tables with full control over structure
-- **Configure Columns** - Define column properties, data types, constraints, and relationships
-- **Interactive Editing** - Real-time validation and feedback as you make changes
-
-The interface supports multiple input formats, making it easy to load, edit, and save your database definitions in various formats.
-
-
-
-
-
-
-
-
-
-
-
-
-## Installation
+## Install
```bash
-go get github.com/wdevs/relspecgo
-
go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
```
-## Usage
+## Supported Formats
-### Interactive Schema Editor
+| Direction | Formats |
+|-----------|---------|
+| **Readers** | `bun` `dbml` `dctx` `drawdb` `drizzle` `gorm` `graphql` `json` `mssql` `pgsql` `prisma` `sqldir` `sqlite` `typeorm` `yaml` |
+| **Writers** | `bun` `dbml` `dctx` `drawdb` `drizzle` `gorm` `graphql` `json` `mssql` `pgsql` `prisma` `sqlexec` `sqlite` `template` `typeorm` `yaml` |
+
+## Commands
+
+### `convert` — Schema conversion
```bash
-# Launch interactive editor with a DBML schema
-relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
+# PostgreSQL → GORM models
+relspec convert --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
+ --to gorm --to-path models/ --package models
-# Edit PostgreSQL database in place
-relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
- --to pgsql --to-conn "postgres://user:pass@localhost/mydb"
+# DBML → PostgreSQL DDL
+relspec convert --from dbml --from-path schema.dbml --to pgsql --to-path schema.sql
-# Edit JSON schema and save as GORM models
-relspec edit --from json --from-path db.json --to gorm --to-path models/
+# PostgreSQL → SQLite (auto flattens schemas)
+relspec convert --from pgsql --from-conn "postgres://..." --to sqlite --to-path schema.sql
+
+# Multiple input files merged
+relspec convert --from json --from-list "a.json,b.json" --to yaml --to-path merged.yaml
```
-The `edit` command launches an interactive terminal user interface where you can:
-- Browse and navigate your database structure
-- Create, modify, and delete schemas, tables, and columns
-- Configure column properties, constraints, and relationships
-- Save changes to various formats
-- Import and merge schemas from other databases
+PostgreSQL connections opened by relspec set `application_name` by default to
+`relspecgo/` (with component suffixes internally, e.g. readers/writers).
+If you need a custom value, provide `application_name` explicitly in the connection
+string query parameters.
-### Schema Merging
+### `merge` — Additive schema merge (never modifies existing items)
```bash
-# Merge two JSON schemas (additive merge - adds missing items only)
+# Merge two JSON schemas
relspec merge --target json --target-path base.json \
--source json --source-path additions.json \
--output json --output-path merged.json
-# Merge PostgreSQL database into JSON, skipping specific tables
+# Merge PostgreSQL into JSON, skipping tables
relspec merge --target json --target-path current.json \
- --source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
+ --source pgsql --source-conn "postgres://user:pass@localhost/db" \
--output json --output-path updated.json \
--skip-tables "audit_log,temp_tables"
-
-# Cross-format merge (DBML + YAML → JSON)
-relspec merge --target dbml --target-path base.dbml \
- --source yaml --source-path additions.yaml \
- --output json --output-path result.json \
- --skip-relations --skip-views
```
-The `merge` command combines two database schemas additively:
-- Adds missing schemas, tables, columns, and other objects
-- Never modifies or deletes existing items (safe operation)
-- Supports selective merging with skip options (domains, relations, enums, views, sequences, specific tables)
-- Works across any combination of supported formats
-- Perfect for integrating multiple schema definitions or applying patches
+Skip flags: `--skip-relations` `--skip-views` `--skip-domains` `--skip-enums` `--skip-sequences`
-### Schema Conversion
+### `inspect` — Schema validation / linting
```bash
-# Convert PostgreSQL database to GORM models
-relspec convert --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
- --to gorm --to-path models/ --package models
-
-# Convert GORM models to Bun
-relspec convert --from gorm --from-path models.go \
- --to bun --to-path bun_models.go --package models
-
-# Export database schema to JSON
-relspec convert --from pgsql --from-conn "postgres://..." \
- --to json --to-path schema.json
-
-# Convert DBML to PostgreSQL SQL
-relspec convert --from dbml --from-path schema.dbml \
- --to pgsql --to-path schema.sql
-
-# Convert PostgreSQL database to SQLite (with automatic schema flattening)
-relspec convert --from pgsql --from-conn "postgres://..." \
- --to sqlite --to-path sqlite_schema.sql
-```
-
-### Schema Validation
-
-```bash
-# Validate a PostgreSQL database with default rules
+# Validate PostgreSQL database
relspec inspect --from pgsql --from-conn "postgres://user:pass@localhost/mydb"
-# Validate DBML file with custom rules
+# Validate DBML with custom rules
relspec inspect --from dbml --from-path schema.dbml --rules .relspec-rules.yaml
-# Generate JSON validation report
-relspec inspect --from json --from-path db.json \
- --output-format json --output report.json
+# JSON report output
+relspec inspect --from json --from-path db.json --output-format json --output report.json
-# Validate specific schema only
+# Filter to specific schema
relspec inspect --from pgsql --from-conn "..." --schema public
```
-### Schema Comparison
+Rules: naming conventions, PK/FK standards, missing indexes, reserved keywords, circular dependencies.
+
+### `diff` — Schema comparison
```bash
-# Compare two database schemas
relspec diff --from pgsql --from-conn "postgres://localhost/db1" \
--to pgsql --to-conn "postgres://localhost/db2"
```
+### `templ` — Custom template rendering
+
+```bash
+# Render database schema to Markdown docs
+relspec templ --from pgsql --from-conn "postgres://user:pass@localhost/db" \
+ --template docs.tmpl --output schema-docs.md
+
+# One TypeScript file per table
+relspec templ --from dbml --from-path schema.dbml \
+ --template ts-model.tmpl --mode table \
+ --output ./models/ --filename-pattern "{{.Name | toCamelCase}}.ts"
+```
+
+Modes: `database` (default) · `schema` · `table` · `script`
+
+Template functions: string utils (`toCamelCase`, `toSnakeCase`, `pluralize`, …), type converters (`sqlToGo`, `sqlToTypeScript`, …), filters, loop helpers, safe access.
+
+### `edit` — Interactive TUI editor
+
+```bash
+# Edit DBML schema interactively
+relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
+
+# Edit live PostgreSQL database
+relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
+ --to pgsql --to-conn "postgres://user:pass@localhost/mydb"
+```
+
+
+
+
+
+
+
+
+
+
+
+## Development
+
+**Prerequisites:** Go 1.24.0+
+
+```bash
+make build # → build/relspec
+make test # race detection + coverage
+make lint # requires golangci-lint
+make coverage # → coverage.html
+make install # → $GOPATH/bin
+```
+
## Project Structure
```
-relspecgo/
-├── cmd/
-│ └── relspec/ # CLI application (convert, inspect, diff, scripts)
-├── pkg/
-│ ├── readers/ # Input format readers (DBML, GORM, PostgreSQL, etc.)
-│ ├── writers/ # Output format writers (GORM, Bun, SQL, etc.)
-│ ├── inspector/ # Schema validation and linting
-│ ├── diff/ # Schema comparison
-│ ├── models/ # Internal data models
-│ ├── transform/ # Transformation logic
-│ └── pgsql/ # PostgreSQL utilities (keywords, data types)
-├── examples/ # Usage examples
-└── tests/ # Test files
+cmd/relspec/ CLI commands
+pkg/readers/ Input format readers
+pkg/writers/ Output format writers
+pkg/inspector/ Schema validation
+pkg/diff/ Schema comparison
+pkg/merge/ Schema merging
+pkg/models/ Internal data models
+pkg/transform/ Transformation logic
+pkg/pgsql/ PostgreSQL utilities
```
-## Todo
-
-[Todo List of Features](./TODO.md)
-
-## Development
-
-### Prerequisites
-- Go 1.21 or higher
-- Access to test databases (optional)
-
-### Building
-
-```bash
-go build -o relspec ./cmd/relspec
-```
-
-### Testing
-
-```bash
-go test ./...
-```
-
-## License
-
-Apache License 2.0 - See [LICENSE](LICENSE) for details.
-
-Copyright 2025 Warky Devs
-
## Contributing
-Contributions welcome. Please open an issue or submit a pull request.
\ No newline at end of file
+1. Register or sign in with GitHub at [git.warky.dev](https://git.warky.dev)
+2. Clone the repository: `git clone https://git.warky.dev/wdevs/relspecgo.git`
+3. Create a feature branch: `git checkout -b feature/your-feature-name`
+4. Commit your changes and push the branch
+5. Open a pull request with a description of the new feature or fix
+
+For questions or discussion, join the Discord: [discord.gg/74rcTujp25](https://discord.gg/74rcTujp25) — `warkyhein`
+
+## Links
+
+- [Todo](./TODO.md)
+- [AI Use Policy](./AI_USE.md)
+- [License](LICENSE) — Apache 2.0 · Copyright 2025 Warky Devs
diff --git a/Story.md b/Story.md
new file mode 100644
index 0000000..eb6fb79
--- /dev/null
+++ b/Story.md
@@ -0,0 +1,219 @@
+
+# From Scripts to RelSpec: What Years of Database Pain Taught Me
+
+It started as a need.
+A problem I’ve carried with me since my early PHP days.
+
+Every project meant doing the same work again. Same patterns, same fixes—just in a different codebase.
+It became frustrating fast.
+
+I wanted something solid. Not another workaround.
+
+## The Early Tools Phase
+
+Like most things in development, it began small.
+
+A simple PHP script.
+Then a few Python scripts.
+
+Just tools—nothing fancy. The goal was straightforward: generate code faster and remove repetitive work. I even experimented with Clarion templates at one point, trying to bend existing systems into something useful.
+
+Then came SQL scripts.
+Then PostgreSQL migration stored procedures.
+Then small Go programs using templates.
+
+Each step was solving a problem I had at the time. Nothing unified. Nothing polished. Just survival tools.
+
+---
+
+## Argitek: The First Real Attempt
+
+Eventually, those scattered ideas turned into something more structured: Argitek.
+
+Argitek powered a few real systems, including Powerbid. On paper, it sounded solid:
+
+> “Argitek Next is a powerful code generation tool designed to streamline your development workflow.”
+
+And technically, it worked.
+
+It could generate code from predefined templates, adapt to different scenarios, and reduce repetitive work. But something was off.
+
+It never felt *complete*.
+Not something I could confidently release.
+
+So I did what many developers do with almost-good-enough tools—I parked it.
+
+---
+
+## The Breaking Point: Database Migrations
+
+Over the years, one problem kept coming back:
+
+Database migrations.
+
+Not the clean, theoretical kind. The real ones.
+
+* PostgreSQL to ORM mismatches
+* DBML to SQL hacks
+* GORM inconsistencies
+* Manual fixes after “automated” migrations failed
+
+It was always messy. Always unpredictable. Always more work than expected.
+
+By 2025, after a particularly tough year, I had accumulated enough of these problems to stop ignoring them.
+
+---
+
+## December 2025: RelSpecGo Begins
+
+In December 2025, I bootstrapped something new:
+
+**RelSpecGo**
+
+It started simple:
+
+* Initial LICENSE
+* Basic configuration
+* A direction
+
+By late December:
+
+* SQL writer implemented
+* Diff command added
+
+January 2026:
+
+* Documentation
+
+February 2026:
+
+* Schema editor UI (focused on relationships)
+* MSSQL DDL writer
+* Template support with `--from-list`
+
+---
+
+## April 2026: A Real Tool Emerges
+
+By April 2026, it became something I could finally stand behind.
+
+RelSpecGo reached version **1.0.44**, with:
+
+* Packaging for AUR, Debian, and RPM
+* Updated documentation and README
+* A full toolchain for:
+
+ * Convert
+ * Merge
+ * Inspect
+ * Diff
+ * Template
+ * Edit
+
+Support includes:
+
+* bun
+* dbml
+* drizzle
+* gorm
+* prisma
+* mssql
+* pgsql
+* sqlite
+
+Plus:
+
+* TUI editor
+* Template engine
+* Bidirectional schema handling
+
+👉 RelSpecGo: [https://git.warky.dev/wdevs/relspecgo](https://git.warky.dev/wdevs/relspecgo)
+
+This wasn’t just another generator anymore.
+It became a system for managing *database truth*.
+
+---
+
+## Lessons Learned (The Hard Way)
+
+This journey wasn’t about tools. It was about understanding databases properly.
+
+Here are the principles that stuck:
+
+### 1. Data Loss Is Not Acceptable
+
+Changing table structures should **never** result in lost data. If it does, the process is broken.
+
+### 2. Minimal Beats Clever
+
+The simpler the system, the easier it is to trust—and to fix.
+
+### 3. Respect the Database
+
+If you fight database rules, you will lose. Stay aligned with them.
+
+### 4. Indexes and Keys Matter More Than You Think
+
+Performance and correctness both depend on them. Ignore them at your own risk.
+
+### 5. Version-Control Your Backend Logic
+
+SQL scripts, functions, migrations—these must live in version control. No exceptions.
+
+### 6. It’s Not Migration—It’s Adaptation
+
+You’re not just moving data. You’re fixing inconsistencies and aligning systems.
+
+### 7. Migrations Never Go as Planned
+
+Always assume something will break. Plan for it.
+
+### 8. One Source of Truth Is Non-Negotiable
+
+Your database schema must have a single, authoritative definition.
+
+### 9. ORM Mapping Is a First-Class Concern
+
+Your application models must reflect the database correctly. Drift causes bugs.
+
+### 10. Audit Trails Are Critical
+
+If you can’t track changes, you can’t trust your system.
+
+### 11. Manage Database Functions Properly
+
+They are part of your system—not an afterthought.
+
+### 12. If It’s Hard to Understand, It’s Too Complex
+
+Clarity is a feature. Complexity is technical debt.
+
+### 13. GUIDs Have Their Place
+
+Especially when moving data across systems. They solve real problems.
+
+### 14. But Simplicity Still Wins
+
+Numbered primary keys are predictable, efficient, and easy to reason about.
+
+### 15. JSON Is Power—Use It Carefully
+
+It adds flexibility, but too much turns structure into chaos.
+
+---
+
+## Closing Thoughts
+
+Looking back, this wasn’t about building a tool.
+
+It was about:
+
+* Reducing friction
+* Making systems predictable
+* Respecting the database as the core of the system
+
+RelSpecGo is just the current result of that journey.
+
+Not the end.
+
+Just the first version that feels *right*.
diff --git a/assets/image/relspec1.jpg b/assets/image/relspec1.jpg
index 4995267..5b14be2 100644
Binary files a/assets/image/relspec1.jpg and b/assets/image/relspec1.jpg differ
diff --git a/assets/image/relspec1_512.jpg b/assets/image/relspec1_512.jpg
index 31362d6..5b14be2 100644
Binary files a/assets/image/relspec1_512.jpg and b/assets/image/relspec1_512.jpg differ
diff --git a/assets/image/relspec2.jpg b/assets/image/relspec2.jpg
deleted file mode 100644
index 334d4d9..0000000
Binary files a/assets/image/relspec2.jpg and /dev/null differ
diff --git a/assets/image/relspec3.jpg b/assets/image/relspec3.jpg
deleted file mode 100644
index 0ac0235..0000000
Binary files a/assets/image/relspec3.jpg and /dev/null differ
diff --git a/cmd/relspec/convert.go b/cmd/relspec/convert.go
index 2a1a093..807b4b4 100644
--- a/cmd/relspec/convert.go
+++ b/cmd/relspec/convert.go
@@ -8,6 +8,7 @@ import (
"github.com/spf13/cobra"
+ "git.warky.dev/wdevs/relspecgo/pkg/merge"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
@@ -45,6 +46,7 @@ var (
convertSourceType string
convertSourcePath string
convertSourceConn string
+ convertFromList []string
convertTargetType string
convertTargetPath string
convertPackageName string
@@ -166,6 +168,7 @@ func init() {
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql, sqlite)")
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for pgsql) or file path (for sqlite)")
+ convertCmd.Flags().StringSliceVar(&convertFromList, "from-list", nil, "Comma-separated list of source file paths to read and merge (mutually exclusive with --from-path)")
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
@@ -191,17 +194,29 @@ func runConvert(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Converter ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
+ // Validate mutually exclusive flags
+ if convertSourcePath != "" && len(convertFromList) > 0 {
+ return fmt.Errorf("--from-path and --from-list are mutually exclusive")
+ }
+
// Read source database
fmt.Fprintf(os.Stderr, "[1/2] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", convertSourceType)
- if convertSourcePath != "" {
- fmt.Fprintf(os.Stderr, " Path: %s\n", convertSourcePath)
- }
- if convertSourceConn != "" {
- fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(convertSourceConn))
- }
- db, err := readDatabaseForConvert(convertSourceType, convertSourcePath, convertSourceConn)
+ var db *models.Database
+ var err error
+
+ if len(convertFromList) > 0 {
+ db, err = readDatabaseListForConvert(convertSourceType, convertFromList)
+ } else {
+ if convertSourcePath != "" {
+ fmt.Fprintf(os.Stderr, " Path: %s\n", convertSourcePath)
+ }
+ if convertSourceConn != "" {
+ fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(convertSourceConn))
+ }
+ db, err = readDatabaseForConvert(convertSourceType, convertSourcePath, convertSourceConn)
+ }
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
@@ -237,6 +252,30 @@ func runConvert(cmd *cobra.Command, args []string) error {
return nil
}
+func readDatabaseListForConvert(dbType string, files []string) (*models.Database, error) {
+ if len(files) == 0 {
+ return nil, fmt.Errorf("file list is empty")
+ }
+
+ fmt.Fprintf(os.Stderr, " Files: %d file(s)\n", len(files))
+
+ var base *models.Database
+ for i, filePath := range files {
+ fmt.Fprintf(os.Stderr, " [%d/%d] %s\n", i+1, len(files), filePath)
+ db, err := readDatabaseForConvert(dbType, filePath, "")
+ if err != nil {
+ return nil, fmt.Errorf("failed to read %s: %w", filePath, err)
+ }
+ if base == nil {
+ base = db
+ } else {
+ merge.MergeDatabases(base, db, &merge.MergeOptions{})
+ }
+ }
+
+ return base, nil
+}
+
func readDatabaseForConvert(dbType, filePath, connString string) (*models.Database, error) {
var reader readers.Reader
diff --git a/cmd/relspec/convert_from_list_test.go b/cmd/relspec/convert_from_list_test.go
new file mode 100644
index 0000000..8922948
--- /dev/null
+++ b/cmd/relspec/convert_from_list_test.go
@@ -0,0 +1,183 @@
+package main
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+func TestReadDatabaseListForConvert_SingleFile(t *testing.T) {
+ dir := t.TempDir()
+ file := filepath.Join(dir, "schema.json")
+ writeTestJSON(t, file, []string{"users"})
+
+ db, err := readDatabaseListForConvert("json", []string{file})
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if len(db.Schemas) == 0 {
+ t.Fatal("expected at least one schema")
+ }
+ if len(db.Schemas[0].Tables) != 1 {
+ t.Errorf("expected 1 table, got %d", len(db.Schemas[0].Tables))
+ }
+}
+
+func TestReadDatabaseListForConvert_MultipleFiles(t *testing.T) {
+ dir := t.TempDir()
+ file1 := filepath.Join(dir, "schema1.json")
+ file2 := filepath.Join(dir, "schema2.json")
+ writeTestJSON(t, file1, []string{"users"})
+ writeTestJSON(t, file2, []string{"comments"})
+
+ db, err := readDatabaseListForConvert("json", []string{file1, file2})
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ total := 0
+ for _, s := range db.Schemas {
+ total += len(s.Tables)
+ }
+ if total != 2 {
+ t.Errorf("expected 2 tables (users + comments), got %d", total)
+ }
+}
+
+func TestReadDatabaseListForConvert_PathWithSpaces(t *testing.T) {
+ spacedDir := filepath.Join(t.TempDir(), "my schema files")
+ if err := os.MkdirAll(spacedDir, 0755); err != nil {
+ t.Fatal(err)
+ }
+ file := filepath.Join(spacedDir, "my users schema.json")
+ writeTestJSON(t, file, []string{"users"})
+
+ db, err := readDatabaseListForConvert("json", []string{file})
+ if err != nil {
+ t.Fatalf("unexpected error with spaced path: %v", err)
+ }
+ if db == nil {
+ t.Fatal("expected non-nil database")
+ }
+}
+
+func TestReadDatabaseListForConvert_MultipleFilesPathWithSpaces(t *testing.T) {
+ spacedDir := filepath.Join(t.TempDir(), "my schema files")
+ if err := os.MkdirAll(spacedDir, 0755); err != nil {
+ t.Fatal(err)
+ }
+ file1 := filepath.Join(spacedDir, "users schema.json")
+ file2 := filepath.Join(spacedDir, "posts schema.json")
+ writeTestJSON(t, file1, []string{"users"})
+ writeTestJSON(t, file2, []string{"posts"})
+
+ db, err := readDatabaseListForConvert("json", []string{file1, file2})
+ if err != nil {
+ t.Fatalf("unexpected error with spaced paths: %v", err)
+ }
+
+ total := 0
+ for _, s := range db.Schemas {
+ total += len(s.Tables)
+ }
+ if total != 2 {
+ t.Errorf("expected 2 tables, got %d", total)
+ }
+}
+
+func TestReadDatabaseListForConvert_EmptyList(t *testing.T) {
+ _, err := readDatabaseListForConvert("json", []string{})
+ if err == nil {
+ t.Error("expected error for empty file list")
+ }
+}
+
+func TestReadDatabaseListForConvert_InvalidFile(t *testing.T) {
+ _, err := readDatabaseListForConvert("json", []string{"/nonexistent/path/file.json"})
+ if err == nil {
+ t.Error("expected error for nonexistent file")
+ }
+}
+
+func TestRunConvert_FromListMutuallyExclusiveWithFromPath(t *testing.T) {
+ saved := saveConvertState()
+ defer restoreConvertState(saved)
+
+ dir := t.TempDir()
+ file := filepath.Join(dir, "schema.json")
+ writeTestJSON(t, file, []string{"users"})
+
+ convertSourceType = "json"
+ convertSourcePath = file
+ convertFromList = []string{file}
+ convertTargetType = "json"
+ convertTargetPath = filepath.Join(dir, "out.json")
+
+ err := runConvert(nil, nil)
+ if err == nil {
+ t.Error("expected error when --from-path and --from-list are both set")
+ }
+}
+
+func TestRunConvert_FromListEndToEnd(t *testing.T) {
+ saved := saveConvertState()
+ defer restoreConvertState(saved)
+
+ dir := t.TempDir()
+ file1 := filepath.Join(dir, "users.json")
+ file2 := filepath.Join(dir, "posts.json")
+ outFile := filepath.Join(dir, "merged.json")
+ writeTestJSON(t, file1, []string{"users"})
+ writeTestJSON(t, file2, []string{"posts"})
+
+ convertSourceType = "json"
+ convertSourcePath = ""
+ convertSourceConn = ""
+ convertFromList = []string{file1, file2}
+ convertTargetType = "json"
+ convertTargetPath = outFile
+ convertPackageName = ""
+ convertSchemaFilter = ""
+ convertFlattenSchema = false
+
+ if err := runConvert(nil, nil); err != nil {
+ t.Fatalf("runConvert() error = %v", err)
+ }
+
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
+
+func TestRunConvert_FromListEndToEndPathWithSpaces(t *testing.T) {
+ saved := saveConvertState()
+ defer restoreConvertState(saved)
+
+ spacedDir := filepath.Join(t.TempDir(), "my schema dir")
+ if err := os.MkdirAll(spacedDir, 0755); err != nil {
+ t.Fatal(err)
+ }
+ file1 := filepath.Join(spacedDir, "users schema.json")
+ file2 := filepath.Join(spacedDir, "posts schema.json")
+ outFile := filepath.Join(spacedDir, "merged output.json")
+ writeTestJSON(t, file1, []string{"users"})
+ writeTestJSON(t, file2, []string{"posts"})
+
+ convertSourceType = "json"
+ convertSourcePath = ""
+ convertSourceConn = ""
+ convertFromList = []string{file1, file2}
+ convertTargetType = "json"
+ convertTargetPath = outFile
+ convertPackageName = ""
+ convertSchemaFilter = ""
+ convertFlattenSchema = false
+
+ if err := runConvert(nil, nil); err != nil {
+ t.Fatalf("runConvert() with spaced paths error = %v", err)
+ }
+
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
diff --git a/cmd/relspec/merge.go b/cmd/relspec/merge.go
index 9c87a78..071b0a3 100644
--- a/cmd/relspec/merge.go
+++ b/cmd/relspec/merge.go
@@ -47,6 +47,7 @@ var (
mergeSourceType string
mergeSourcePath string
mergeSourceConn string
+ mergeFromList []string
mergeOutputType string
mergeOutputPath string
mergeOutputConn string
@@ -109,8 +110,9 @@ func init() {
// Source database flags
mergeCmd.Flags().StringVar(&mergeSourceType, "source", "", "Source format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
- mergeCmd.Flags().StringVar(&mergeSourcePath, "source-path", "", "Source file path (required for file-based formats)")
+ mergeCmd.Flags().StringVar(&mergeSourcePath, "source-path", "", "Source file path (required for file-based formats, mutually exclusive with --from-list)")
mergeCmd.Flags().StringVar(&mergeSourceConn, "source-conn", "", "Source connection string (required for pgsql)")
+ mergeCmd.Flags().StringSliceVar(&mergeFromList, "from-list", nil, "Comma-separated list of source file paths to merge (mutually exclusive with --source-path)")
// Output flags
mergeCmd.Flags().StringVar(&mergeOutputType, "output", "", "Output format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
@@ -144,6 +146,11 @@ func runMerge(cmd *cobra.Command, args []string) error {
return fmt.Errorf("--output format is required")
}
+ // Validate mutually exclusive source flags
+ if mergeSourcePath != "" && len(mergeFromList) > 0 {
+ return fmt.Errorf("--source-path and --from-list are mutually exclusive")
+ }
+
// Validate and expand file paths
if mergeTargetType != "pgsql" {
if mergeTargetPath == "" {
@@ -157,8 +164,8 @@ func runMerge(cmd *cobra.Command, args []string) error {
}
if mergeSourceType != "pgsql" {
- if mergeSourcePath == "" {
- return fmt.Errorf("--source-path is required for %s format", mergeSourceType)
+ if mergeSourcePath == "" && len(mergeFromList) == 0 {
+ return fmt.Errorf("--source-path or --from-list is required for %s format", mergeSourceType)
}
mergeSourcePath = expandPath(mergeSourcePath)
} else if mergeSourceConn == "" {
@@ -189,19 +196,36 @@ func runMerge(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, " ✓ Successfully read target database '%s'\n", targetDB.Name)
printDatabaseStats(targetDB)
- // Step 2: Read source database
+ // Step 2: Read source database(s)
fmt.Fprintf(os.Stderr, "\n[2/3] Reading source database...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeSourceType)
- if mergeSourcePath != "" {
- fmt.Fprintf(os.Stderr, " Path: %s\n", mergeSourcePath)
- }
- if mergeSourceConn != "" {
- fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeSourceConn))
- }
- sourceDB, err := readDatabaseForMerge(mergeSourceType, mergeSourcePath, mergeSourceConn, "Source")
- if err != nil {
- return fmt.Errorf("failed to read source database: %w", err)
+ var sourceDB *models.Database
+ if len(mergeFromList) > 0 {
+ fmt.Fprintf(os.Stderr, " Files: %d file(s)\n", len(mergeFromList))
+ for i, filePath := range mergeFromList {
+ fmt.Fprintf(os.Stderr, " [%d/%d] %s\n", i+1, len(mergeFromList), filePath)
+ db, readErr := readDatabaseForMerge(mergeSourceType, expandPath(filePath), "", "Source")
+ if readErr != nil {
+ return fmt.Errorf("failed to read source file %s: %w", filePath, readErr)
+ }
+ if sourceDB == nil {
+ sourceDB = db
+ } else {
+ merge.MergeDatabases(sourceDB, db, &merge.MergeOptions{})
+ }
+ }
+ } else {
+ if mergeSourcePath != "" {
+ fmt.Fprintf(os.Stderr, " Path: %s\n", mergeSourcePath)
+ }
+ if mergeSourceConn != "" {
+ fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeSourceConn))
+ }
+ sourceDB, err = readDatabaseForMerge(mergeSourceType, mergeSourcePath, mergeSourceConn, "Source")
+ if err != nil {
+ return fmt.Errorf("failed to read source database: %w", err)
+ }
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read source database '%s'\n", sourceDB.Name)
printDatabaseStats(sourceDB)
diff --git a/cmd/relspec/merge_from_list_test.go b/cmd/relspec/merge_from_list_test.go
new file mode 100644
index 0000000..c590ba8
--- /dev/null
+++ b/cmd/relspec/merge_from_list_test.go
@@ -0,0 +1,162 @@
+package main
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+func TestRunMerge_FromListMutuallyExclusiveWithSourcePath(t *testing.T) {
+ saved := saveMergeState()
+ defer restoreMergeState(saved)
+
+ dir := t.TempDir()
+ file := filepath.Join(dir, "schema.json")
+ writeTestJSON(t, file, []string{"users"})
+
+ mergeTargetType = "json"
+ mergeTargetPath = file
+ mergeTargetConn = ""
+ mergeSourceType = "json"
+ mergeSourcePath = file
+ mergeSourceConn = ""
+ mergeFromList = []string{file}
+ mergeOutputType = "json"
+ mergeOutputPath = filepath.Join(dir, "out.json")
+ mergeOutputConn = ""
+ mergeSkipTables = ""
+ mergeReportPath = ""
+
+ err := runMerge(nil, nil)
+ if err == nil {
+ t.Error("expected error when --source-path and --from-list are both set")
+ }
+}
+
+func TestRunMerge_FromListSingleFile(t *testing.T) {
+ saved := saveMergeState()
+ defer restoreMergeState(saved)
+
+ dir := t.TempDir()
+ targetFile := filepath.Join(dir, "target.json")
+ sourceFile := filepath.Join(dir, "source.json")
+ outFile := filepath.Join(dir, "output.json")
+ writeTestJSON(t, targetFile, []string{"users"})
+ writeTestJSON(t, sourceFile, []string{"posts"})
+
+ mergeTargetType = "json"
+ mergeTargetPath = targetFile
+ mergeTargetConn = ""
+ mergeSourceType = "json"
+ mergeSourcePath = ""
+ mergeSourceConn = ""
+ mergeFromList = []string{sourceFile}
+ mergeOutputType = "json"
+ mergeOutputPath = outFile
+ mergeOutputConn = ""
+ mergeSkipTables = ""
+ mergeReportPath = ""
+
+ if err := runMerge(nil, nil); err != nil {
+ t.Fatalf("runMerge() error = %v", err)
+ }
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
+
+func TestRunMerge_FromListMultipleFiles(t *testing.T) {
+ saved := saveMergeState()
+ defer restoreMergeState(saved)
+
+ dir := t.TempDir()
+ targetFile := filepath.Join(dir, "target.json")
+ source1 := filepath.Join(dir, "source1.json")
+ source2 := filepath.Join(dir, "source2.json")
+ outFile := filepath.Join(dir, "output.json")
+ writeTestJSON(t, targetFile, []string{"users"})
+ writeTestJSON(t, source1, []string{"posts"})
+ writeTestJSON(t, source2, []string{"comments"})
+
+ mergeTargetType = "json"
+ mergeTargetPath = targetFile
+ mergeTargetConn = ""
+ mergeSourceType = "json"
+ mergeSourcePath = ""
+ mergeSourceConn = ""
+ mergeFromList = []string{source1, source2}
+ mergeOutputType = "json"
+ mergeOutputPath = outFile
+ mergeOutputConn = ""
+ mergeSkipTables = ""
+ mergeReportPath = ""
+
+ if err := runMerge(nil, nil); err != nil {
+ t.Fatalf("runMerge() error = %v", err)
+ }
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
+
+func TestRunMerge_FromListPathWithSpaces(t *testing.T) {
+ saved := saveMergeState()
+ defer restoreMergeState(saved)
+
+ spacedDir := filepath.Join(t.TempDir(), "my schema files")
+ if err := os.MkdirAll(spacedDir, 0755); err != nil {
+ t.Fatal(err)
+ }
+ targetFile := filepath.Join(spacedDir, "target schema.json")
+ sourceFile := filepath.Join(spacedDir, "source schema.json")
+ outFile := filepath.Join(spacedDir, "merged output.json")
+ writeTestJSON(t, targetFile, []string{"users"})
+ writeTestJSON(t, sourceFile, []string{"comments"})
+
+ mergeTargetType = "json"
+ mergeTargetPath = targetFile
+ mergeTargetConn = ""
+ mergeSourceType = "json"
+ mergeSourcePath = ""
+ mergeSourceConn = ""
+ mergeFromList = []string{sourceFile}
+ mergeOutputType = "json"
+ mergeOutputPath = outFile
+ mergeOutputConn = ""
+ mergeSkipTables = ""
+ mergeReportPath = ""
+
+ if err := runMerge(nil, nil); err != nil {
+ t.Fatalf("runMerge() with spaced paths error = %v", err)
+ }
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
+
+func TestRunMerge_FromListMissingSourceType(t *testing.T) {
+ saved := saveMergeState()
+ defer restoreMergeState(saved)
+
+ dir := t.TempDir()
+ file := filepath.Join(dir, "schema.json")
+ writeTestJSON(t, file, []string{"users"})
+
+ mergeTargetType = "json"
+ mergeTargetPath = file
+ mergeTargetConn = ""
+ mergeSourceType = "json"
+ mergeSourcePath = ""
+ mergeSourceConn = ""
+ mergeFromList = []string{} // empty list, no source-path either
+ mergeOutputType = "json"
+ mergeOutputPath = filepath.Join(dir, "out.json")
+ mergeOutputConn = ""
+ mergeSkipTables = ""
+ mergeReportPath = ""
+
+ err := runMerge(nil, nil)
+ if err == nil {
+ t.Error("expected error when neither --source-path nor --from-list is provided")
+ }
+}
diff --git a/cmd/relspec/templ.go b/cmd/relspec/templ.go
index 3697b37..b2ad631 100644
--- a/cmd/relspec/templ.go
+++ b/cmd/relspec/templ.go
@@ -15,6 +15,7 @@ var (
templSourceType string
templSourcePath string
templSourceConn string
+ templFromList []string
templTemplatePath string
templOutputPath string
templSchemaFilter string
@@ -78,8 +79,9 @@ Examples:
func init() {
templCmd.Flags().StringVar(&templSourceType, "from", "", "Source format (dbml, pgsql, json, etc.)")
- templCmd.Flags().StringVar(&templSourcePath, "from-path", "", "Source file path (for file-based sources)")
+ templCmd.Flags().StringVar(&templSourcePath, "from-path", "", "Source file path (for file-based sources, mutually exclusive with --from-list)")
templCmd.Flags().StringVar(&templSourceConn, "from-conn", "", "Source connection string (for database sources)")
+ templCmd.Flags().StringSliceVar(&templFromList, "from-list", nil, "Comma-separated list of source file paths to read and merge (mutually exclusive with --from-path)")
templCmd.Flags().StringVar(&templTemplatePath, "template", "", "Template file path (required)")
templCmd.Flags().StringVar(&templOutputPath, "output", "", "Output path (file or directory, empty for stdout)")
templCmd.Flags().StringVar(&templSchemaFilter, "schema", "", "Filter to specific schema")
@@ -95,9 +97,20 @@ func runTempl(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "=== RelSpec Template Execution ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
+ // Validate mutually exclusive flags
+ if templSourcePath != "" && len(templFromList) > 0 {
+ return fmt.Errorf("--from-path and --from-list are mutually exclusive")
+ }
+
// Read database using the same function as convert
fmt.Fprintf(os.Stderr, "Reading from %s...\n", templSourceType)
- db, err := readDatabaseForConvert(templSourceType, templSourcePath, templSourceConn)
+ var db *models.Database
+ var err error
+ if len(templFromList) > 0 {
+ db, err = readDatabaseListForConvert(templSourceType, templFromList)
+ } else {
+ db, err = readDatabaseForConvert(templSourceType, templSourcePath, templSourceConn)
+ }
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
diff --git a/cmd/relspec/templ_from_list_test.go b/cmd/relspec/templ_from_list_test.go
new file mode 100644
index 0000000..d01754e
--- /dev/null
+++ b/cmd/relspec/templ_from_list_test.go
@@ -0,0 +1,134 @@
+package main
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+// writeTestTemplate writes a minimal Go text template file.
+func writeTestTemplate(t *testing.T, path string) {
+ t.Helper()
+ content := []byte(`{{.Name}}`)
+ if err := os.WriteFile(path, content, 0644); err != nil {
+ t.Fatalf("failed to write template file %s: %v", path, err)
+ }
+}
+
+func TestRunTempl_FromListMutuallyExclusiveWithFromPath(t *testing.T) {
+ saved := saveTemplState()
+ defer restoreTemplState(saved)
+
+ dir := t.TempDir()
+ file := filepath.Join(dir, "schema.json")
+ tmpl := filepath.Join(dir, "tmpl.tmpl")
+ writeTestJSON(t, file, []string{"users"})
+ writeTestTemplate(t, tmpl)
+
+ templSourceType = "json"
+ templSourcePath = file
+ templFromList = []string{file}
+ templTemplatePath = tmpl
+ templOutputPath = ""
+ templMode = "database"
+ templFilenamePattern = "{{.Name}}.txt"
+
+ err := runTempl(nil, nil)
+ if err == nil {
+ t.Error("expected error when --from-path and --from-list are both set")
+ }
+}
+
+func TestRunTempl_FromListSingleFile(t *testing.T) {
+ saved := saveTemplState()
+ defer restoreTemplState(saved)
+
+ dir := t.TempDir()
+ file := filepath.Join(dir, "schema.json")
+ tmpl := filepath.Join(dir, "tmpl.tmpl")
+ outFile := filepath.Join(dir, "output.txt")
+ writeTestJSON(t, file, []string{"users"})
+ writeTestTemplate(t, tmpl)
+
+ templSourceType = "json"
+ templSourcePath = ""
+ templSourceConn = ""
+ templFromList = []string{file}
+ templTemplatePath = tmpl
+ templOutputPath = outFile
+ templSchemaFilter = ""
+ templMode = "database"
+ templFilenamePattern = "{{.Name}}.txt"
+
+ if err := runTempl(nil, nil); err != nil {
+ t.Fatalf("runTempl() error = %v", err)
+ }
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
+
+func TestRunTempl_FromListMultipleFiles(t *testing.T) {
+ saved := saveTemplState()
+ defer restoreTemplState(saved)
+
+ dir := t.TempDir()
+ file1 := filepath.Join(dir, "users.json")
+ file2 := filepath.Join(dir, "posts.json")
+ tmpl := filepath.Join(dir, "tmpl.tmpl")
+ outFile := filepath.Join(dir, "output.txt")
+ writeTestJSON(t, file1, []string{"users"})
+ writeTestJSON(t, file2, []string{"posts"})
+ writeTestTemplate(t, tmpl)
+
+ templSourceType = "json"
+ templSourcePath = ""
+ templSourceConn = ""
+ templFromList = []string{file1, file2}
+ templTemplatePath = tmpl
+ templOutputPath = outFile
+ templSchemaFilter = ""
+ templMode = "database"
+ templFilenamePattern = "{{.Name}}.txt"
+
+ if err := runTempl(nil, nil); err != nil {
+ t.Fatalf("runTempl() error = %v", err)
+ }
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
+
+func TestRunTempl_FromListPathWithSpaces(t *testing.T) {
+ saved := saveTemplState()
+ defer restoreTemplState(saved)
+
+ spacedDir := filepath.Join(t.TempDir(), "my schema files")
+ if err := os.MkdirAll(spacedDir, 0755); err != nil {
+ t.Fatal(err)
+ }
+ file1 := filepath.Join(spacedDir, "users schema.json")
+ file2 := filepath.Join(spacedDir, "posts schema.json")
+ tmpl := filepath.Join(spacedDir, "my template.tmpl")
+ outFile := filepath.Join(spacedDir, "output file.txt")
+ writeTestJSON(t, file1, []string{"users"})
+ writeTestJSON(t, file2, []string{"posts"})
+ writeTestTemplate(t, tmpl)
+
+ templSourceType = "json"
+ templSourcePath = ""
+ templSourceConn = ""
+ templFromList = []string{file1, file2}
+ templTemplatePath = tmpl
+ templOutputPath = outFile
+ templSchemaFilter = ""
+ templMode = "database"
+ templFilenamePattern = "{{.Name}}.txt"
+
+ if err := runTempl(nil, nil); err != nil {
+ t.Fatalf("runTempl() with spaced paths error = %v", err)
+ }
+ if _, err := os.Stat(outFile); os.IsNotExist(err) {
+ t.Error("expected output file to be created")
+ }
+}
diff --git a/cmd/relspec/testhelpers_test.go b/cmd/relspec/testhelpers_test.go
new file mode 100644
index 0000000..fa79bee
--- /dev/null
+++ b/cmd/relspec/testhelpers_test.go
@@ -0,0 +1,219 @@
+package main
+
+import (
+ "encoding/json"
+ "os"
+ "testing"
+)
+
+// minimalColumn is used to build test JSON fixtures.
+type minimalColumn struct {
+ Name string `json:"name"`
+ Table string `json:"table"`
+ Schema string `json:"schema"`
+ Type string `json:"type"`
+ NotNull bool `json:"not_null"`
+ IsPrimaryKey bool `json:"is_primary_key"`
+ AutoIncrement bool `json:"auto_increment"`
+}
+
+type minimalTable struct {
+ Name string `json:"name"`
+ Schema string `json:"schema"`
+ Columns map[string]minimalColumn `json:"columns"`
+}
+
+type minimalSchema struct {
+ Name string `json:"name"`
+ Tables []minimalTable `json:"tables"`
+}
+
+type minimalDatabase struct {
+ Name string `json:"name"`
+ Schemas []minimalSchema `json:"schemas"`
+}
+
+// writeTestJSON writes a minimal JSON database file with one schema ("public")
+// containing tables with the given names. Each table has a single "id" PK column.
+func writeTestJSON(t *testing.T, path string, tableNames []string) {
+ t.Helper()
+
+ tables := make([]minimalTable, len(tableNames))
+ for i, name := range tableNames {
+ tables[i] = minimalTable{
+ Name: name,
+ Schema: "public",
+ Columns: map[string]minimalColumn{
+ "id": {
+ Name: "id",
+ Table: name,
+ Schema: "public",
+ Type: "bigint",
+ NotNull: true,
+ IsPrimaryKey: true,
+ AutoIncrement: true,
+ },
+ },
+ }
+ }
+
+ db := minimalDatabase{
+ Name: "test_db",
+ Schemas: []minimalSchema{{Name: "public", Tables: tables}},
+ }
+
+ data, err := json.Marshal(db)
+ if err != nil {
+ t.Fatalf("failed to marshal test JSON: %v", err)
+ }
+ if err := os.WriteFile(path, data, 0644); err != nil {
+ t.Fatalf("failed to write test file %s: %v", path, err)
+ }
+}
+
+// convertState captures and restores all convert global vars.
+type convertState struct {
+ sourceType string
+ sourcePath string
+ sourceConn string
+ fromList []string
+ targetType string
+ targetPath string
+ packageName string
+ schemaFilter string
+ flattenSchema bool
+}
+
+func saveConvertState() convertState {
+ return convertState{
+ sourceType: convertSourceType,
+ sourcePath: convertSourcePath,
+ sourceConn: convertSourceConn,
+ fromList: convertFromList,
+ targetType: convertTargetType,
+ targetPath: convertTargetPath,
+ packageName: convertPackageName,
+ schemaFilter: convertSchemaFilter,
+ flattenSchema: convertFlattenSchema,
+ }
+}
+
+func restoreConvertState(s convertState) {
+ convertSourceType = s.sourceType
+ convertSourcePath = s.sourcePath
+ convertSourceConn = s.sourceConn
+ convertFromList = s.fromList
+ convertTargetType = s.targetType
+ convertTargetPath = s.targetPath
+ convertPackageName = s.packageName
+ convertSchemaFilter = s.schemaFilter
+ convertFlattenSchema = s.flattenSchema
+}
+
+// templState captures and restores all templ global vars.
+type templState struct {
+ sourceType string
+ sourcePath string
+ sourceConn string
+ fromList []string
+ templatePath string
+ outputPath string
+ schemaFilter string
+ mode string
+ filenamePattern string
+}
+
+func saveTemplState() templState {
+ return templState{
+ sourceType: templSourceType,
+ sourcePath: templSourcePath,
+ sourceConn: templSourceConn,
+ fromList: templFromList,
+ templatePath: templTemplatePath,
+ outputPath: templOutputPath,
+ schemaFilter: templSchemaFilter,
+ mode: templMode,
+ filenamePattern: templFilenamePattern,
+ }
+}
+
+func restoreTemplState(s templState) {
+ templSourceType = s.sourceType
+ templSourcePath = s.sourcePath
+ templSourceConn = s.sourceConn
+ templFromList = s.fromList
+ templTemplatePath = s.templatePath
+ templOutputPath = s.outputPath
+ templSchemaFilter = s.schemaFilter
+ templMode = s.mode
+ templFilenamePattern = s.filenamePattern
+}
+
+// mergeState captures and restores all merge global vars.
+type mergeState struct {
+ targetType string
+ targetPath string
+ targetConn string
+ sourceType string
+ sourcePath string
+ sourceConn string
+ fromList []string
+ outputType string
+ outputPath string
+ outputConn string
+ skipDomains bool
+ skipRelations bool
+ skipEnums bool
+ skipViews bool
+ skipSequences bool
+ skipTables string
+ verbose bool
+ reportPath string
+ flattenSchema bool
+}
+
+func saveMergeState() mergeState {
+ return mergeState{
+ targetType: mergeTargetType,
+ targetPath: mergeTargetPath,
+ targetConn: mergeTargetConn,
+ sourceType: mergeSourceType,
+ sourcePath: mergeSourcePath,
+ sourceConn: mergeSourceConn,
+ fromList: mergeFromList,
+ outputType: mergeOutputType,
+ outputPath: mergeOutputPath,
+ outputConn: mergeOutputConn,
+ skipDomains: mergeSkipDomains,
+ skipRelations: mergeSkipRelations,
+ skipEnums: mergeSkipEnums,
+ skipViews: mergeSkipViews,
+ skipSequences: mergeSkipSequences,
+ skipTables: mergeSkipTables,
+ verbose: mergeVerbose,
+ reportPath: mergeReportPath,
+ flattenSchema: mergeFlattenSchema,
+ }
+}
+
+func restoreMergeState(s mergeState) {
+ mergeTargetType = s.targetType
+ mergeTargetPath = s.targetPath
+ mergeTargetConn = s.targetConn
+ mergeSourceType = s.sourceType
+ mergeSourcePath = s.sourcePath
+ mergeSourceConn = s.sourceConn
+ mergeFromList = s.fromList
+ mergeOutputType = s.outputType
+ mergeOutputPath = s.outputPath
+ mergeOutputConn = s.outputConn
+ mergeSkipDomains = s.skipDomains
+ mergeSkipRelations = s.skipRelations
+ mergeSkipEnums = s.skipEnums
+ mergeSkipViews = s.skipViews
+ mergeSkipSequences = s.skipSequences
+ mergeSkipTables = s.skipTables
+ mergeVerbose = s.verbose
+ mergeReportPath = s.reportPath
+ mergeFlattenSchema = s.flattenSchema
+}
diff --git a/linux/arch/PKGBUILD b/linux/arch/PKGBUILD
new file mode 100644
index 0000000..b72793e
--- /dev/null
+++ b/linux/arch/PKGBUILD
@@ -0,0 +1,35 @@
+# Maintainer: Hein (Warky Devs)
+pkgname=relspec
+pkgver=1.0.44
+pkgrel=1
+pkgdesc="RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs."
+arch=('x86_64' 'aarch64')
+url="https://git.warky.dev/wdevs/relspecgo"
+license=('MIT')
+makedepends=('go')
+source=("$pkgname-$pkgver.zip::$url/archive/v$pkgver.zip")
+sha256sums=('SKIP')
+
+build() {
+ cd "relspecgo"
+ export CGO_ENABLED=0
+ go build \
+ -trimpath \
+ -ldflags "-X git.warky.dev/wdevs/relspecgo/cmd/relspec.version=$pkgver" \
+ -o "$pkgname" ./cmd/relspec
+}
+
+check() {
+ cd "relspecgo"
+ go test ./...
+}
+
+package() {
+ cd "relspecgo"
+
+ # Binary
+ install -Dm755 "$pkgname" "$pkgdir/usr/bin/$pkgname"
+
+ # Default config dir
+ install -dm755 "$pkgdir/etc/relspec"
+}
diff --git a/linux/centos/relspec.spec b/linux/centos/relspec.spec
new file mode 100644
index 0000000..8811aed
--- /dev/null
+++ b/linux/centos/relspec.spec
@@ -0,0 +1,43 @@
+Name: relspec
+Version: 1.0.44
+Release: 1%{?dist}
+Summary: RelSpec is a comprehensive database relations management tool that reads, transforms, and writes database table specifications across multiple formats and ORMs.
+
+License: MIT
+URL: https://git.warky.dev/wdevs/relspecgo
+Source0: %{name}-%{version}.tar.gz
+
+BuildRequires: golang >= 1.24
+
+%global debug_package %{nil}
+%define _debugsource_packages 0
+%define _debuginfo_subpackages 0
+
+%description
+RelSpec provides bidirectional conversion between various database schema
+formats including PostgreSQL, MySQL, SQLite, Prisma, TypeORM, GORM, Drizzle,
+DBML, GraphQL, and more.
+
+%prep
+%autosetup
+
+%build
+export CGO_ENABLED=0
+go build \
+ -trimpath \
+ -ldflags "-X git.warky.dev/wdevs/relspecgo/cmd/relspec.version=%{version}" \
+ -o %{name} ./cmd/relspec
+
+%install
+install -Dm755 %{name} %{buildroot}%{_bindir}/%{name}
+install -Dm644 LICENSE %{buildroot}%{_licensedir}/%{name}/LICENSE
+install -dm755 %{buildroot}%{_sysconfdir}/relspec
+
+%files
+%license LICENSE
+%{_bindir}/%{name}
+%dir %{_sysconfdir}/relspec
+
+%changelog
+* Wed Apr 08 2026 Hein (Warky Devs) - 1.0.42-1
+- Initial package
diff --git a/linux/debian/control b/linux/debian/control
new file mode 100644
index 0000000..5d572d4
--- /dev/null
+++ b/linux/debian/control
@@ -0,0 +1,11 @@
+Package: relspec
+Version: VERSION
+Architecture: ARCH
+Maintainer: Hein (Warky Devs)
+Section: database
+Priority: optional
+Homepage: https://git.warky.dev/wdevs/relspecgo
+Description: Database schema conversion and analysis tool
+ RelSpec provides bidirectional conversion between various database schema
+ formats including PostgreSQL, MySQL, SQLite, Prisma, TypeORM, GORM, Drizzle,
+ DBML, GraphQL, and more.
diff --git a/pkg/inspector/report.go b/pkg/inspector/report.go
index ae57f81..33d4599 100644
--- a/pkg/inspector/report.go
+++ b/pkg/inspector/report.go
@@ -60,19 +60,19 @@ func (f *MarkdownFormatter) Format(report *InspectorReport) (string, error) {
// Summary
sb.WriteString(f.formatHeader("Summary"))
sb.WriteString("\n")
- sb.WriteString(fmt.Sprintf("- Rules Checked: %d\n", report.Summary.RulesChecked))
+ fmt.Fprintf(&sb, "- Rules Checked: %d\n", report.Summary.RulesChecked)
// Color-code error and warning counts
if report.Summary.ErrorCount > 0 {
sb.WriteString(f.colorize(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount), colorRed))
} else {
- sb.WriteString(fmt.Sprintf("- Errors: %d\n", report.Summary.ErrorCount))
+ fmt.Fprintf(&sb, "- Errors: %d\n", report.Summary.ErrorCount)
}
if report.Summary.WarningCount > 0 {
sb.WriteString(f.colorize(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount), colorYellow))
} else {
- sb.WriteString(fmt.Sprintf("- Warnings: %d\n", report.Summary.WarningCount))
+ fmt.Fprintf(&sb, "- Warnings: %d\n", report.Summary.WarningCount)
}
if report.Summary.PassedCount > 0 {
diff --git a/pkg/pgsql/connection.go b/pkg/pgsql/connection.go
new file mode 100644
index 0000000..17d9b63
--- /dev/null
+++ b/pkg/pgsql/connection.go
@@ -0,0 +1,85 @@
+package pgsql
+
+import (
+ "context"
+ "fmt"
+ "runtime/debug"
+ "strings"
+
+ "github.com/jackc/pgx/v5"
+)
+
+const (
+ defaultApplicationPrefix = "relspecgo"
+ postgresIdentifierMaxLen = 63
+)
+
+// BuildApplicationName returns a PostgreSQL application_name in the form:
+// relspecgo/[:]
+func BuildApplicationName(component string) string {
+ appName := fmt.Sprintf("%s/%s", defaultApplicationPrefix, relspecVersion())
+ component = strings.TrimSpace(component)
+ if component != "" {
+ appName = appName + ":" + component
+ }
+ if len(appName) > postgresIdentifierMaxLen {
+ appName = appName[:postgresIdentifierMaxLen]
+ }
+ return appName
+}
+
+// ParseConfigWithApplicationName parses a connection string and applies a default
+// application_name when one is not explicitly provided by the caller.
+func ParseConfigWithApplicationName(connString, component string) (*pgx.ConnConfig, error) {
+ cfg, err := pgx.ParseConfig(connString)
+ if err != nil {
+ return nil, err
+ }
+
+ if cfg.RuntimeParams == nil {
+ cfg.RuntimeParams = map[string]string{}
+ }
+
+ if strings.TrimSpace(cfg.RuntimeParams["application_name"]) == "" {
+ cfg.RuntimeParams["application_name"] = BuildApplicationName(component)
+ }
+
+ return cfg, nil
+}
+
+// Connect establishes a PostgreSQL connection with a default relspec
+// application_name when the caller does not provide one in the DSN.
+func Connect(ctx context.Context, connString, component string) (*pgx.Conn, error) {
+ cfg, err := ParseConfigWithApplicationName(connString, component)
+ if err != nil {
+ return nil, err
+ }
+
+ return pgx.ConnectConfig(ctx, cfg)
+}
+
+func relspecVersion() string {
+ info, ok := debug.ReadBuildInfo()
+ if !ok {
+ return "dev"
+ }
+
+ version := strings.TrimSpace(info.Main.Version)
+ if version != "" && version != "(devel)" {
+ return version
+ }
+
+ for _, setting := range info.Settings {
+ if setting.Key == "vcs.revision" {
+ revision := strings.TrimSpace(setting.Value)
+ if len(revision) >= 7 {
+ return revision[:7]
+ }
+ if revision != "" {
+ return revision
+ }
+ }
+ }
+
+ return "dev"
+}
diff --git a/pkg/pgsql/connection_test.go b/pkg/pgsql/connection_test.go
new file mode 100644
index 0000000..c517fc7
--- /dev/null
+++ b/pkg/pgsql/connection_test.go
@@ -0,0 +1,53 @@
+package pgsql
+
+import (
+ "strings"
+ "testing"
+)
+
+func TestBuildApplicationName_IncludesVersion(t *testing.T) {
+ got := BuildApplicationName("")
+ if !strings.HasPrefix(got, "relspecgo/") {
+ t.Fatalf("BuildApplicationName() = %q, expected prefix relspecgo/", got)
+ }
+}
+
+func TestBuildApplicationName_IncludesComponent(t *testing.T) {
+ got := BuildApplicationName("reader-pgsql")
+ if !strings.Contains(got, ":reader-pgsql") {
+ t.Fatalf("BuildApplicationName(component) = %q, expected component suffix", got)
+ }
+}
+
+func TestBuildApplicationName_RespectsPostgresLengthLimit(t *testing.T) {
+ got := BuildApplicationName(strings.Repeat("x", 200))
+ if len(got) > 63 {
+ t.Fatalf("BuildApplicationName() length = %d, expected <= 63", len(got))
+ }
+}
+
+func TestParseConfigWithApplicationName_AddsWhenMissing(t *testing.T) {
+ cfg, err := ParseConfigWithApplicationName("postgres://user:pass@localhost:5432/db", "reader-pgsql")
+ if err != nil {
+ t.Fatalf("ParseConfigWithApplicationName() error = %v", err)
+ }
+
+ appName := cfg.RuntimeParams["application_name"]
+ if appName == "" {
+ t.Fatal("expected application_name to be set")
+ }
+ if !strings.HasPrefix(appName, "relspecgo/") {
+ t.Fatalf("application_name = %q, expected relspecgo/ prefix", appName)
+ }
+}
+
+func TestParseConfigWithApplicationName_PreservesExplicitValue(t *testing.T) {
+ cfg, err := ParseConfigWithApplicationName("postgres://user:pass@localhost:5432/db?application_name=custom-app", "reader-pgsql")
+ if err != nil {
+ t.Fatalf("ParseConfigWithApplicationName() error = %v", err)
+ }
+
+ if got := cfg.RuntimeParams["application_name"]; got != "custom-app" {
+ t.Fatalf("application_name = %q, expected %q", got, "custom-app")
+ }
+}
diff --git a/pkg/pgsql/types_registry.go b/pkg/pgsql/types_registry.go
new file mode 100644
index 0000000..d0d507d
--- /dev/null
+++ b/pkg/pgsql/types_registry.go
@@ -0,0 +1,250 @@
+package pgsql
+
+import (
+ "sort"
+ "strings"
+)
+
+// TypeSpec describes PostgreSQL type capabilities used by parsers/writers.
+type TypeSpec struct {
+ SupportsLength bool
+ SupportsPrecision bool
+}
+
+var postgresBaseTypes = map[string]TypeSpec{
+ // Numeric types
+ "smallint": {},
+ "integer": {},
+ "bigint": {},
+ "decimal": {SupportsPrecision: true},
+ "numeric": {SupportsPrecision: true},
+ "real": {},
+ "double precision": {},
+ "smallserial": {},
+ "serial": {},
+ "bigserial": {},
+ "money": {},
+
+ // Character types
+ "char": {SupportsLength: true},
+ "character": {SupportsLength: true},
+ "varchar": {SupportsLength: true},
+ "character varying": {SupportsLength: true},
+ "text": {},
+ "name": {},
+
+ // Binary
+ "bytea": {},
+
+ // Date/time
+ "timestamp": {SupportsPrecision: true},
+ "timestamp without time zone": {SupportsPrecision: true},
+ "timestamp with time zone": {SupportsPrecision: true},
+ "time": {SupportsPrecision: true},
+ "time without time zone": {SupportsPrecision: true},
+ "time with time zone": {SupportsPrecision: true},
+ "date": {},
+ "interval": {SupportsPrecision: true},
+
+ // Boolean
+ "boolean": {},
+
+ // Geometric
+ "point": {},
+ "line": {},
+ "lseg": {},
+ "box": {},
+ "path": {},
+ "polygon": {},
+ "circle": {},
+
+ // Network
+ "cidr": {},
+ "inet": {},
+ "macaddr": {},
+ "macaddr8": {},
+
+ // Bit string
+ "bit": {SupportsLength: true},
+ "bit varying": {SupportsLength: true},
+ "varbit": {SupportsLength: true},
+
+ // Text search
+ "tsvector": {},
+ "tsquery": {},
+
+ // UUID/XML/JSON
+ "uuid": {},
+ "xml": {},
+ "json": {},
+ "jsonb": {},
+
+ // Range
+ "int4range": {},
+ "int8range": {},
+ "numrange": {},
+ "tsrange": {},
+ "tstzrange": {},
+ "daterange": {},
+ "int4multirange": {},
+ "int8multirange": {},
+ "nummultirange": {},
+ "tsmultirange": {},
+ "tstzmultirange": {},
+ "datemultirange": {},
+
+ // Object identifier
+ "oid": {},
+ "regclass": {},
+ "regproc": {},
+ "regtype": {},
+
+ // Pseudo-ish/common built-ins seen in schemas
+ "record": {},
+ "void": {},
+
+ // Common extensions
+ "citext": {},
+ "hstore": {},
+ "ltree": {},
+ "lquery": {},
+ "ltxtquery": {},
+ "vector": {}, // pgvector: keep explicit modifier form (vector(dim))
+ "halfvec": {}, // pgvector: keep explicit modifier form (halfvec(dim))
+ "sparsevec": {}, // pgvector: keep explicit modifier form (sparsevec(dim))
+}
+
+var postgresTypeAliases = map[string]string{
+ // Integer aliases
+ "int2": "smallint",
+ "int4": "integer",
+ "int8": "bigint",
+ "int": "integer",
+
+ // Serial aliases
+ "serial2": "smallserial",
+ "serial4": "serial",
+ "serial8": "bigserial",
+
+ // Character aliases
+ "bpchar": "char",
+
+ // Float aliases
+ "float4": "real",
+ "float8": "double precision",
+ "float": "double precision",
+
+ // Time aliases
+ "timestamptz": "timestamp with time zone",
+ "timetz": "time with time zone",
+
+ // Bit alias
+ "varbit": "bit varying",
+
+ // Boolean alias
+ "bool": "boolean",
+}
+
+// GetPostgresBaseTypes returns a sorted-ish stable list of registered base type names.
+func GetPostgresBaseTypes() []string {
+ result := make([]string, 0, len(postgresBaseTypes))
+ for t := range postgresBaseTypes {
+ result = append(result, t)
+ }
+ sort.Strings(result)
+ return result
+}
+
+// GetPostgresTypes returns the registered PostgreSQL types.
+// When includeArrays is true, each base type also includes an array variant ("type[]").
+func GetPostgresTypes(includeArrays bool) []string {
+ base := GetPostgresBaseTypes()
+ if !includeArrays {
+ return base
+ }
+
+ result := make([]string, 0, len(base)*2)
+ result = append(result, base...)
+ for _, t := range base {
+ result = append(result, t+"[]")
+ }
+ return result
+}
+
+// ExtractBaseType returns the type without outer array suffixes and modifiers.
+// Examples:
+// - varchar(255) -> varchar
+// - text[] -> text
+// - numeric(10,2)[] -> numeric
+func ExtractBaseType(sqlType string) string {
+ t := normalizeTypeToken(sqlType)
+ t = strings.TrimSpace(stripArraySuffixes(t))
+ if idx := strings.Index(t, "("); idx > 0 {
+ t = strings.TrimSpace(t[:idx])
+ }
+ return t
+}
+
+// ExtractBaseTypeLower is ExtractBaseType with lowercase normalization.
+func ExtractBaseTypeLower(sqlType string) string {
+ return strings.ToLower(ExtractBaseType(sqlType))
+}
+
+// IsArrayType reports whether the SQL type has one or more [] suffixes.
+func IsArrayType(sqlType string) bool {
+ t := normalizeTypeToken(sqlType)
+ return strings.HasSuffix(t, "[]")
+}
+
+// ElementType returns the underlying element type for array types.
+// For non-array types, it returns the input unchanged.
+func ElementType(sqlType string) string {
+ t := normalizeTypeToken(sqlType)
+ return stripArraySuffixes(t)
+}
+
+// CanonicalizeBaseType resolves aliases to canonical PostgreSQL type names.
+func CanonicalizeBaseType(baseType string) string {
+ base := strings.ToLower(normalizeTypeToken(baseType))
+ if canonical, ok := postgresTypeAliases[base]; ok {
+ return canonical
+ }
+ return base
+}
+
+// IsKnownPostgresType reports whether a type (including array forms) exists in the registry.
+func IsKnownPostgresType(sqlType string) bool {
+ base := CanonicalizeBaseType(ExtractBaseTypeLower(sqlType))
+ _, ok := postgresBaseTypes[base]
+ return ok
+}
+
+// SupportsLength reports if this SQL type accepts a single length/dimension modifier.
+func SupportsLength(sqlType string) bool {
+ base := CanonicalizeBaseType(ExtractBaseTypeLower(sqlType))
+ spec, ok := postgresBaseTypes[base]
+ return ok && spec.SupportsLength
+}
+
+// SupportsPrecision reports if this SQL type accepts precision (and possibly scale).
+func SupportsPrecision(sqlType string) bool {
+ base := CanonicalizeBaseType(ExtractBaseTypeLower(sqlType))
+ spec, ok := postgresBaseTypes[base]
+ return ok && spec.SupportsPrecision
+}
+
+// HasExplicitTypeModifier reports if the type already includes "(...)".
+func HasExplicitTypeModifier(sqlType string) bool {
+ return strings.Contains(sqlType, "(")
+}
+
+func stripArraySuffixes(t string) string {
+ for strings.HasSuffix(t, "[]") {
+ t = strings.TrimSpace(strings.TrimSuffix(t, "[]"))
+ }
+ return t
+}
+
+func normalizeTypeToken(t string) string {
+ return strings.Join(strings.Fields(strings.TrimSpace(t)), " ")
+}
diff --git a/pkg/pgsql/types_registry_test.go b/pkg/pgsql/types_registry_test.go
new file mode 100644
index 0000000..3a6c104
--- /dev/null
+++ b/pkg/pgsql/types_registry_test.go
@@ -0,0 +1,99 @@
+package pgsql
+
+import "testing"
+
+func TestPostgresTypeRegistry_MasterListIncludesRequestedTypes(t *testing.T) {
+ required := []string{
+ "vector",
+ "integer",
+ "citext",
+ }
+
+ types := make(map[string]bool)
+ for _, typ := range GetPostgresTypes(true) {
+ types[typ] = true
+ }
+
+ for _, typ := range required {
+ if !types[typ] {
+ t.Fatalf("master type list missing %q", typ)
+ }
+ if !types[typ+"[]"] {
+ t.Fatalf("master type list missing array variant %q", typ+"[]")
+ }
+ }
+}
+
+func TestPostgresTypeRegistry_TypeParsingAndCapabilities(t *testing.T) {
+ tests := []struct {
+ input string
+ wantBase string
+ wantCanonicalBase string
+ wantArray bool
+ wantKnown bool
+ wantLength bool
+ wantPrecision bool
+ }{
+ {
+ input: "integer[]",
+ wantBase: "integer",
+ wantCanonicalBase: "integer",
+ wantArray: true,
+ wantKnown: true,
+ },
+ {
+ input: "citext[]",
+ wantBase: "citext",
+ wantCanonicalBase: "citext",
+ wantArray: true,
+ wantKnown: true,
+ },
+ {
+ input: "vector(1536)",
+ wantBase: "vector",
+ wantCanonicalBase: "vector",
+ wantKnown: true,
+ wantLength: false,
+ },
+ {
+ input: "numeric(10,2)",
+ wantBase: "numeric",
+ wantCanonicalBase: "numeric",
+ wantKnown: true,
+ wantPrecision: true,
+ },
+ {
+ input: "int4",
+ wantBase: "int4",
+ wantCanonicalBase: "integer",
+ wantKnown: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.input, func(t *testing.T) {
+ base := ExtractBaseTypeLower(tt.input)
+ if base != tt.wantBase {
+ t.Fatalf("ExtractBaseTypeLower(%q) = %q, want %q", tt.input, base, tt.wantBase)
+ }
+
+ canonical := CanonicalizeBaseType(base)
+ if canonical != tt.wantCanonicalBase {
+ t.Fatalf("CanonicalizeBaseType(%q) = %q, want %q", base, canonical, tt.wantCanonicalBase)
+ }
+
+ if IsArrayType(tt.input) != tt.wantArray {
+ t.Fatalf("IsArrayType(%q) = %v, want %v", tt.input, IsArrayType(tt.input), tt.wantArray)
+ }
+ if IsKnownPostgresType(tt.input) != tt.wantKnown {
+ t.Fatalf("IsKnownPostgresType(%q) = %v, want %v", tt.input, IsKnownPostgresType(tt.input), tt.wantKnown)
+ }
+ if SupportsLength(tt.input) != tt.wantLength {
+ t.Fatalf("SupportsLength(%q) = %v, want %v", tt.input, SupportsLength(tt.input), tt.wantLength)
+ }
+ if SupportsPrecision(tt.input) != tt.wantPrecision {
+ t.Fatalf("SupportsPrecision(%q) = %v, want %v", tt.input, SupportsPrecision(tt.input), tt.wantPrecision)
+ }
+ })
+ }
+}
diff --git a/pkg/readers/bun/reader.go b/pkg/readers/bun/reader.go
index b3281de..efcb263 100644
--- a/pkg/readers/bun/reader.go
+++ b/pkg/readers/bun/reader.go
@@ -12,6 +12,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
@@ -700,16 +701,22 @@ func (r *Reader) extractBunTag(tag string) string {
// parseTypeWithLength parses a type string and extracts length if present
// e.g., "varchar(255)" returns ("varchar", 255)
func (r *Reader) parseTypeWithLength(typeStr string) (baseType string, length int) {
+ typeStr = strings.TrimSpace(typeStr)
+ baseType = typeStr
+
// Check for type with length: varchar(255), char(10), etc.
re := regexp.MustCompile(`^([a-zA-Z\s]+)\((\d+)\)$`)
matches := re.FindStringSubmatch(typeStr)
if len(matches) == 3 {
- if _, err := fmt.Sscanf(matches[2], "%d", &length); err == nil {
- baseType = strings.TrimSpace(matches[1])
- return
+ rawBaseType := strings.TrimSpace(matches[1])
+ if pgsql.SupportsLength(rawBaseType) {
+ if _, err := fmt.Sscanf(matches[2], "%d", &length); err == nil {
+ baseType = pgsql.CanonicalizeBaseType(rawBaseType)
+ return
+ }
}
}
- baseType = typeStr
+
return
}
diff --git a/pkg/readers/bun/reader_test.go b/pkg/readers/bun/reader_test.go
index 10fb64c..62cf501 100644
--- a/pkg/readers/bun/reader_test.go
+++ b/pkg/readers/bun/reader_test.go
@@ -71,8 +71,11 @@ func TestReader_ReadDatabase_Simple(t *testing.T) {
if !emailCol.NotNull {
t.Error("Column 'email' should be NOT NULL (explicit 'notnull' tag)")
}
- if emailCol.Type != "varchar" || emailCol.Length != 255 {
- t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
+ if emailCol.Type != "varchar" && emailCol.Type != "varchar(255)" {
+ t.Errorf("Expected email type 'varchar' or 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
+ }
+ if emailCol.Length != 255 {
+ t.Errorf("Expected email length 255, got %d", emailCol.Length)
}
// Verify name column - primitive string type should be NOT NULL by default in Bun
@@ -356,6 +359,33 @@ func TestReader_ReadDatabase_Complex(t *testing.T) {
}
}
+func TestParseTypeWithLength_PreservesExplicitTypeModifiers(t *testing.T) {
+ reader := &Reader{}
+
+ tests := []struct {
+ input string
+ wantType string
+ wantLength int
+ }{
+ {"varchar(255)", "varchar", 255},
+ {"character varying(120)", "character varying", 120},
+ {"vector(1536)", "vector(1536)", 0},
+ {"numeric(10,2)", "numeric(10,2)", 0},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.input, func(t *testing.T) {
+ gotType, gotLength := reader.parseTypeWithLength(tt.input)
+ if gotType != tt.wantType {
+ t.Fatalf("parseTypeWithLength(%q) type = %q, want %q", tt.input, gotType, tt.wantType)
+ }
+ if gotLength != tt.wantLength {
+ t.Fatalf("parseTypeWithLength(%q) length = %d, want %d", tt.input, gotLength, tt.wantLength)
+ }
+ })
+ }
+}
+
func TestReader_ReadSchema(t *testing.T) {
opts := &readers.ReaderOptions{
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "bun", "simple.go"),
@@ -485,9 +515,9 @@ func TestReader_NullableTypes(t *testing.T) {
// Test all nullability scenarios
tests := []struct {
- column string
- notNull bool
- reason string
+ column string
+ notNull bool
+ reason string
}{
{"id", true, "primary key"},
{"user_id", true, "explicit notnull tag"},
diff --git a/pkg/readers/dbml/reader.go b/pkg/readers/dbml/reader.go
index fe15908..c89a752 100644
--- a/pkg/readers/dbml/reader.go
+++ b/pkg/readers/dbml/reader.go
@@ -567,110 +567,182 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
// parseColumn parses a DBML column definition
func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column, *models.Constraint) {
// Format: column_name type [attributes] // comment
- parts := strings.Fields(line)
- if len(parts) < 2 {
+ lineNoComment, inlineComment := splitInlineComment(line)
+ signature, attrs := splitColumnSignatureAndAttrs(lineNoComment)
+ columnName, columnType, ok := parseColumnSignature(signature)
+ if !ok {
return nil, nil
}
- columnName := stripQuotes(parts[0])
- columnType := stripQuotes(parts[1])
-
column := models.InitColumn(columnName, tableName, schemaName)
column.Type = columnType
var constraint *models.Constraint
// Parse attributes in brackets
- if strings.Contains(line, "[") && strings.Contains(line, "]") {
- attrStart := strings.Index(line, "[")
- attrEnd := strings.Index(line, "]")
- if attrStart < attrEnd {
- attrs := line[attrStart+1 : attrEnd]
- attrList := strings.Split(attrs, ",")
+ if attrs != "" {
+ attrList := strings.Split(attrs, ",")
- for _, attr := range attrList {
- attr = strings.TrimSpace(attr)
+ for _, attr := range attrList {
+ attr = strings.TrimSpace(attr)
- if strings.Contains(attr, "primary key") || attr == "pk" {
- column.IsPrimaryKey = true
- column.NotNull = true
- } else if strings.Contains(attr, "not null") {
- column.NotNull = true
- } else if attr == "increment" {
- column.AutoIncrement = true
- } else if strings.HasPrefix(attr, "default:") {
- defaultVal := strings.TrimSpace(strings.TrimPrefix(attr, "default:"))
- column.Default = strings.Trim(defaultVal, "'\"")
- } else if attr == "unique" {
- // Create a unique constraint
- // Clean table name by removing leading underscores to avoid double underscores
- cleanTableName := strings.TrimLeft(tableName, "_")
- uniqueConstraint := models.InitConstraint(
- fmt.Sprintf("ukey_%s_%s", cleanTableName, columnName),
- models.UniqueConstraint,
- )
- uniqueConstraint.Schema = schemaName
- uniqueConstraint.Table = tableName
- uniqueConstraint.Columns = []string{columnName}
- // Store it to be added later
- if constraint == nil {
- constraint = uniqueConstraint
- }
- } else if strings.HasPrefix(attr, "note:") {
- // Parse column note/comment
- note := strings.TrimSpace(strings.TrimPrefix(attr, "note:"))
- column.Comment = strings.Trim(note, "'\"")
- } else if strings.HasPrefix(attr, "ref:") {
- // Parse inline reference
- // DBML semantics depend on context:
- // - On FK column: ref: < target means "this FK references target"
- // - On PK column: ref: < source means "source references this PK" (reverse notation)
- refStr := strings.TrimSpace(strings.TrimPrefix(attr, "ref:"))
-
- // Check relationship direction operator
- refOp := strings.TrimSpace(refStr)
- var isReverse bool
- if strings.HasPrefix(refOp, "<") {
- // < means "is referenced by" - only makes sense on PK columns
- isReverse = column.IsPrimaryKey
- }
- // > means "references" - always a forward FK, never reverse
-
- constraint = r.parseRef(refStr)
- if constraint != nil {
- if isReverse {
- // Reverse: parsed ref is SOURCE, current column is TARGET
- // Constraint should be ON the source table
- constraint.Schema = constraint.ReferencedSchema
- constraint.Table = constraint.ReferencedTable
- constraint.Columns = constraint.ReferencedColumns
- constraint.ReferencedSchema = schemaName
- constraint.ReferencedTable = tableName
- constraint.ReferencedColumns = []string{columnName}
- } else {
- // Forward: current column is SOURCE, parsed ref is TARGET
- // Standard FK: constraint is ON current table
- constraint.Schema = schemaName
- constraint.Table = tableName
- constraint.Columns = []string{columnName}
- }
- // Generate constraint name based on table and columns
- constraint.Name = fmt.Sprintf("fk_%s_%s", constraint.Table, strings.Join(constraint.Columns, "_"))
+ if strings.Contains(attr, "primary key") || attr == "pk" {
+ column.IsPrimaryKey = true
+ column.NotNull = true
+ } else if strings.Contains(attr, "not null") {
+ column.NotNull = true
+ } else if attr == "increment" {
+ column.AutoIncrement = true
+ } else if strings.HasPrefix(attr, "default:") {
+ defaultVal := strings.TrimSpace(strings.TrimPrefix(attr, "default:"))
+ column.Default = strings.Trim(defaultVal, "'\"")
+ } else if attr == "unique" {
+ // Create a unique constraint
+ // Clean table name by removing leading underscores to avoid double underscores
+ cleanTableName := strings.TrimLeft(tableName, "_")
+ uniqueConstraint := models.InitConstraint(
+ fmt.Sprintf("ukey_%s_%s", cleanTableName, columnName),
+ models.UniqueConstraint,
+ )
+ uniqueConstraint.Schema = schemaName
+ uniqueConstraint.Table = tableName
+ uniqueConstraint.Columns = []string{columnName}
+ // Store it to be added later
+ if constraint == nil {
+ constraint = uniqueConstraint
+ }
+ } else if strings.HasPrefix(attr, "note:") {
+ // Parse column note/comment
+ note := strings.TrimSpace(strings.TrimPrefix(attr, "note:"))
+ column.Comment = strings.Trim(note, "'\"")
+ } else if strings.HasPrefix(attr, "ref:") {
+ // Parse inline reference
+ // DBML semantics depend on context:
+ // - On FK column: ref: < target means "this FK references target"
+ // - On PK column: ref: < source means "source references this PK" (reverse notation)
+ refStr := strings.TrimSpace(strings.TrimPrefix(attr, "ref:"))
+
+ // Check relationship direction operator
+ refOp := strings.TrimSpace(refStr)
+ var isReverse bool
+ if strings.HasPrefix(refOp, "<") {
+ // < means "is referenced by" - only makes sense on PK columns
+ isReverse = column.IsPrimaryKey
+ }
+ // > means "references" - always a forward FK, never reverse
+
+ constraint = r.parseRef(refStr)
+ if constraint != nil {
+ if isReverse {
+ // Reverse: parsed ref is SOURCE, current column is TARGET
+ // Constraint should be ON the source table
+ constraint.Schema = constraint.ReferencedSchema
+ constraint.Table = constraint.ReferencedTable
+ constraint.Columns = constraint.ReferencedColumns
+ constraint.ReferencedSchema = schemaName
+ constraint.ReferencedTable = tableName
+ constraint.ReferencedColumns = []string{columnName}
+ } else {
+ // Forward: current column is SOURCE, parsed ref is TARGET
+ // Standard FK: constraint is ON current table
+ constraint.Schema = schemaName
+ constraint.Table = tableName
+ constraint.Columns = []string{columnName}
}
+ // Generate constraint name based on table and columns
+ constraint.Name = fmt.Sprintf("fk_%s_%s", constraint.Table, strings.Join(constraint.Columns, "_"))
}
}
}
}
// Parse inline comment
- if strings.Contains(line, "//") {
- commentStart := strings.Index(line, "//")
- column.Comment = strings.TrimSpace(line[commentStart+2:])
+ if inlineComment != "" {
+ column.Comment = inlineComment
}
return column, constraint
}
+func splitInlineComment(line string) (content string, inlineComment string) {
+ commentStart := strings.Index(line, "//")
+ if commentStart == -1 {
+ return line, ""
+ }
+
+ return strings.TrimSpace(line[:commentStart]), strings.TrimSpace(line[commentStart+2:])
+}
+
+func splitColumnSignatureAndAttrs(line string) (signature string, attrs string) {
+ trimmed := strings.TrimSpace(line)
+ if trimmed == "" || !strings.HasSuffix(trimmed, "]") {
+ return trimmed, ""
+ }
+
+ bracketDepth := 0
+ for i := len(trimmed) - 1; i >= 0; i-- {
+ switch trimmed[i] {
+ case ']':
+ bracketDepth++
+ case '[':
+ bracketDepth--
+ if bracketDepth == 0 {
+ // DBML attributes are a trailing [ ... ] block preceded by whitespace.
+ // This avoids confusing array types like text[] with attribute blocks.
+ if i > 0 && (trimmed[i-1] == ' ' || trimmed[i-1] == '\t') {
+ return strings.TrimSpace(trimmed[:i]), strings.TrimSpace(trimmed[i+1 : len(trimmed)-1])
+ }
+ }
+ }
+ }
+
+ return trimmed, ""
+}
+
+func parseColumnSignature(signature string) (columnName string, columnType string, ok bool) {
+ signature = strings.TrimSpace(signature)
+ if signature == "" {
+ return "", "", false
+ }
+
+ var splitAt int
+ if signature[0] == '"' || signature[0] == '\'' {
+ quote := signature[0]
+ splitAt = 1
+ for splitAt < len(signature) {
+ if signature[splitAt] == quote {
+ splitAt++
+ break
+ }
+ splitAt++
+ }
+ } else {
+ for splitAt < len(signature) && signature[splitAt] != ' ' && signature[splitAt] != '\t' {
+ splitAt++
+ }
+ }
+
+ if splitAt <= 0 || splitAt >= len(signature) {
+ return "", "", false
+ }
+
+ columnName = stripQuotes(strings.TrimSpace(signature[:splitAt]))
+ columnType = stripWrappingQuotes(strings.TrimSpace(signature[splitAt:]))
+ if columnName == "" || columnType == "" {
+ return "", "", false
+ }
+
+ return columnName, columnType, true
+}
+
+func stripWrappingQuotes(s string) string {
+ s = strings.TrimSpace(s)
+ if len(s) >= 2 && ((s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'')) {
+ return s[1 : len(s)-1]
+ }
+ return s
+}
+
// parseIndex parses a DBML index definition
func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
// Format: (columns) [attributes] OR columnname [attributes]
@@ -832,7 +904,11 @@ func (r *Reader) parseRef(refStr string) *models.Constraint {
for _, action := range actionList {
action = strings.TrimSpace(action)
- if strings.HasPrefix(action, "ondelete:") {
+ if strings.HasPrefix(action, "delete:") {
+ constraint.OnDelete = strings.TrimSpace(strings.TrimPrefix(action, "delete:"))
+ } else if strings.HasPrefix(action, "update:") {
+ constraint.OnUpdate = strings.TrimSpace(strings.TrimPrefix(action, "update:"))
+ } else if strings.HasPrefix(action, "ondelete:") {
constraint.OnDelete = strings.TrimSpace(strings.TrimPrefix(action, "ondelete:"))
} else if strings.HasPrefix(action, "onupdate:") {
constraint.OnUpdate = strings.TrimSpace(strings.TrimPrefix(action, "onupdate:"))
diff --git a/pkg/readers/dbml/reader_test.go b/pkg/readers/dbml/reader_test.go
index 1e360dc..e9e39e3 100644
--- a/pkg/readers/dbml/reader_test.go
+++ b/pkg/readers/dbml/reader_test.go
@@ -839,6 +839,67 @@ func TestConstraintNaming(t *testing.T) {
}
}
+func TestParseColumn_PostgresTypes(t *testing.T) {
+ reader := &Reader{}
+
+ tests := []struct {
+ name string
+ line string
+ wantName string
+ wantType string
+ wantNotNull bool
+ wantComment string
+ }{
+ {
+ name: "array type with attrs",
+ line: "tags text[] [not null]",
+ wantName: "tags",
+ wantType: "text[]",
+ wantNotNull: true,
+ },
+ {
+ name: "vector with dimension",
+ line: "embedding vector(1536)",
+ wantName: "embedding",
+ wantType: "vector(1536)",
+ },
+ {
+ name: "multi word timestamp type",
+ line: "published_at timestamp with time zone",
+ wantName: "published_at",
+ wantType: "timestamp with time zone",
+ },
+ {
+ name: "array type with inline comment",
+ line: "labels varchar(20)[] // column labels",
+ wantName: "labels",
+ wantType: "varchar(20)[]",
+ wantComment: "column labels",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ col, _ := reader.parseColumn(tt.line, "events", "public")
+ if col == nil {
+ t.Fatalf("parseColumn() returned nil column")
+ }
+ if col.Name != tt.wantName {
+ t.Errorf("column name = %q, want %q", col.Name, tt.wantName)
+ }
+ if col.Type != tt.wantType {
+ t.Errorf("column type = %q, want %q", col.Type, tt.wantType)
+ }
+ if col.NotNull != tt.wantNotNull {
+ t.Errorf("column not null = %v, want %v", col.NotNull, tt.wantNotNull)
+ }
+ if col.Comment != tt.wantComment {
+ t.Errorf("column comment = %q, want %q", col.Comment, tt.wantComment)
+ }
+ })
+ }
+}
+
func getKeys[V any](m map[string]V) []string {
keys := make([]string, 0, len(m))
for k := range m {
diff --git a/pkg/readers/dctx/reader.go b/pkg/readers/dctx/reader.go
index c0fce64..2509fc1 100644
--- a/pkg/readers/dctx/reader.go
+++ b/pkg/readers/dctx/reader.go
@@ -7,6 +7,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
@@ -232,7 +233,19 @@ func (r *Reader) convertField(dctxField *models.DCTXField, tableName string) ([]
// mapDataType maps Clarion data types to SQL types
func (r *Reader) mapDataType(clarionType string, size int) (sqlType string, precision int) {
- switch strings.ToUpper(clarionType) {
+ trimmedType := strings.TrimSpace(clarionType)
+
+ // Preserve known PostgreSQL types (including arrays and extension types)
+ // from DCTX input instead of coercing them to generic text.
+ if pgsql.IsKnownPostgresType(trimmedType) {
+ pgType := canonicalizePostgresType(trimmedType)
+ if !pgsql.HasExplicitTypeModifier(pgType) && size > 0 && pgsql.SupportsLength(pgType) {
+ return pgType, size
+ }
+ return pgType, 0
+ }
+
+ switch strings.ToUpper(trimmedType) {
case "LONG":
if size == 8 {
return "bigint", 0
@@ -306,6 +319,32 @@ func (r *Reader) mapDataType(clarionType string, size int) (sqlType string, prec
}
}
+func canonicalizePostgresType(typeStr string) string {
+ t := strings.ToLower(strings.Join(strings.Fields(strings.TrimSpace(typeStr)), " "))
+ if t == "" {
+ return ""
+ }
+
+ // Handle array suffixes
+ arrayCount := 0
+ for strings.HasSuffix(t, "[]") {
+ arrayCount++
+ t = strings.TrimSpace(strings.TrimSuffix(t, "[]"))
+ }
+
+ // Handle optional type modifier
+ modifier := ""
+ if idx := strings.Index(t, "("); idx > 0 {
+ if end := strings.LastIndex(t, ")"); end > idx {
+ modifier = t[idx : end+1]
+ t = strings.TrimSpace(t[:idx])
+ }
+ }
+
+ base := pgsql.CanonicalizeBaseType(t)
+ return base + modifier + strings.Repeat("[]", arrayCount)
+}
+
// processKeys processes DCTX keys and converts them to indexes and primary keys
func (r *Reader) processKeys(dctxTable *models.DCTXTable, table *models.Table, fieldGuidMap map[string]string) error {
for _, dctxKey := range dctxTable.Keys {
diff --git a/pkg/readers/dctx/reader_test.go b/pkg/readers/dctx/reader_test.go
index 2bfc98a..a82b682 100644
--- a/pkg/readers/dctx/reader_test.go
+++ b/pkg/readers/dctx/reader_test.go
@@ -493,3 +493,55 @@ func TestRelationships(t *testing.T) {
}
}
}
+
+func TestMapDataType_PostgresTypes(t *testing.T) {
+ reader := &Reader{}
+
+ tests := []struct {
+ name string
+ inputType string
+ size int
+ wantType string
+ wantLength int
+ }{
+ {
+ name: "integer array preserved",
+ inputType: "integer[]",
+ wantType: "integer[]",
+ },
+ {
+ name: "citext array preserved",
+ inputType: "citext[]",
+ wantType: "citext[]",
+ },
+ {
+ name: "vector modifier preserved",
+ inputType: "vector(1536)",
+ wantType: "vector(1536)",
+ },
+ {
+ name: "alias canonicalized in array",
+ inputType: "int4[]",
+ wantType: "integer[]",
+ },
+ {
+ name: "varchar length from size",
+ inputType: "varchar",
+ size: 120,
+ wantType: "varchar",
+ wantLength: 120,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotType, gotLength := reader.mapDataType(tt.inputType, tt.size)
+ if gotType != tt.wantType {
+ t.Fatalf("mapDataType(%q, %d) type = %q, want %q", tt.inputType, tt.size, gotType, tt.wantType)
+ }
+ if gotLength != tt.wantLength {
+ t.Fatalf("mapDataType(%q, %d) length = %d, want %d", tt.inputType, tt.size, gotLength, tt.wantLength)
+ }
+ })
+ }
+}
diff --git a/pkg/readers/drawdb/reader.go b/pkg/readers/drawdb/reader.go
index f42f2c8..33d36d0 100644
--- a/pkg/readers/drawdb/reader.go
+++ b/pkg/readers/drawdb/reader.go
@@ -8,6 +8,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
)
@@ -231,30 +232,35 @@ func (r *Reader) convertToColumn(field *drawdb.DrawDBField, tableName, schemaNam
// Parse type and dimensions
typeStr := field.Type
+ typeStr = strings.TrimSpace(typeStr)
column.Type = typeStr
// Try to extract length/precision from type string like "varchar(255)" or "decimal(10,2)"
if strings.Contains(typeStr, "(") {
parts := strings.Split(typeStr, "(")
- column.Type = parts[0]
+ baseType := strings.TrimSpace(parts[0])
if len(parts) > 1 {
dimensions := strings.TrimSuffix(parts[1], ")")
if strings.Contains(dimensions, ",") {
- // Precision and scale (e.g., decimal(10,2))
- dims := strings.Split(dimensions, ",")
- if precision, err := strconv.Atoi(strings.TrimSpace(dims[0])); err == nil {
- column.Precision = precision
- }
- if len(dims) > 1 {
- if scale, err := strconv.Atoi(strings.TrimSpace(dims[1])); err == nil {
- column.Scale = scale
+ // Precision and scale (e.g., decimal(10,2), numeric(10,2))
+ if pgsql.SupportsPrecision(baseType) {
+ dims := strings.Split(dimensions, ",")
+ if precision, err := strconv.Atoi(strings.TrimSpace(dims[0])); err == nil {
+ column.Precision = precision
+ }
+ if len(dims) > 1 {
+ if scale, err := strconv.Atoi(strings.TrimSpace(dims[1])); err == nil {
+ column.Scale = scale
+ }
}
}
} else {
// Just length (e.g., varchar(255))
- if length, err := strconv.Atoi(dimensions); err == nil {
- column.Length = length
+ if pgsql.SupportsLength(baseType) {
+ if length, err := strconv.Atoi(dimensions); err == nil {
+ column.Length = length
+ }
}
}
}
diff --git a/pkg/readers/drawdb/reader_test.go b/pkg/readers/drawdb/reader_test.go
index fbdf1ab..d5a736a 100644
--- a/pkg/readers/drawdb/reader_test.go
+++ b/pkg/readers/drawdb/reader_test.go
@@ -6,6 +6,7 @@ import (
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
+ "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
)
func TestReader_ReadDatabase_Simple(t *testing.T) {
@@ -288,6 +289,61 @@ func TestReader_ReadDatabase_Complex(t *testing.T) {
}
}
+func TestConvertToColumn_PreservesExplicitTypeModifiers(t *testing.T) {
+ reader := &Reader{}
+
+ tests := []struct {
+ name string
+ fieldType string
+ wantType string
+ wantLength int
+ wantPrecision int
+ wantScale int
+ }{
+ {
+ name: "varchar with length",
+ fieldType: "varchar(255)",
+ wantType: "varchar(255)",
+ wantLength: 255,
+ },
+ {
+ name: "numeric precision/scale",
+ fieldType: "numeric(10,2)",
+ wantType: "numeric(10,2)",
+ wantPrecision: 10,
+ wantScale: 2,
+ },
+ {
+ name: "custom vector modifier",
+ fieldType: "vector(1536)",
+ wantType: "vector(1536)",
+ wantLength: 0,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ field := &drawdb.DrawDBField{
+ Name: tt.name,
+ Type: tt.fieldType,
+ }
+ col := reader.convertToColumn(field, "events", "public")
+ if col.Type != tt.wantType {
+ t.Fatalf("column type = %q, want %q", col.Type, tt.wantType)
+ }
+ if col.Length != tt.wantLength {
+ t.Fatalf("column length = %d, want %d", col.Length, tt.wantLength)
+ }
+ if col.Precision != tt.wantPrecision {
+ t.Fatalf("column precision = %d, want %d", col.Precision, tt.wantPrecision)
+ }
+ if col.Scale != tt.wantScale {
+ t.Fatalf("column scale = %d, want %d", col.Scale, tt.wantScale)
+ }
+ })
+ }
+}
+
func TestReader_ReadSchema(t *testing.T) {
opts := &readers.ReaderOptions{
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "drawdb", "simple.json"),
diff --git a/pkg/readers/gorm/reader.go b/pkg/readers/gorm/reader.go
index ff56c1c..213d0cf 100644
--- a/pkg/readers/gorm/reader.go
+++ b/pkg/readers/gorm/reader.go
@@ -12,6 +12,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
@@ -773,11 +774,14 @@ func (r *Reader) extractGormTag(tag string) string {
// parseTypeWithLength parses a type string and extracts length if present
// e.g., "varchar(255)" returns ("varchar", 255)
func (r *Reader) parseTypeWithLength(typeStr string) (baseType string, length int) {
+ typeStr = strings.TrimSpace(typeStr)
+ baseType = typeStr
+
// Check for type with length: varchar(255), char(10), etc.
// Also handle precision/scale: numeric(10,2)
if strings.Contains(typeStr, "(") {
idx := strings.Index(typeStr, "(")
- baseType = strings.TrimSpace(typeStr[:idx])
+ rawBaseType := strings.TrimSpace(typeStr[:idx])
// Extract numbers from parentheses
parens := typeStr[idx+1:]
@@ -785,14 +789,16 @@ func (r *Reader) parseTypeWithLength(typeStr string) (baseType string, length in
parens = parens[:endIdx]
}
- // For now, just handle single number (length)
- if !strings.Contains(parens, ",") {
+ // Only treat as "length" for text-ish SQL types.
+ // This avoids converting custom modifiers like vector(1536) into Length.
+ if pgsql.SupportsLength(rawBaseType) && !strings.Contains(parens, ",") {
if _, err := fmt.Sscanf(parens, "%d", &length); err == nil {
+ baseType = pgsql.CanonicalizeBaseType(rawBaseType)
return
}
}
}
- baseType = typeStr
+
return
}
diff --git a/pkg/readers/gorm/reader_test.go b/pkg/readers/gorm/reader_test.go
index 76f53d0..9a5de45 100644
--- a/pkg/readers/gorm/reader_test.go
+++ b/pkg/readers/gorm/reader_test.go
@@ -71,8 +71,11 @@ func TestReader_ReadDatabase_Simple(t *testing.T) {
if !emailCol.NotNull {
t.Error("Column 'email' should be NOT NULL (explicit 'not null' tag)")
}
- if emailCol.Type != "varchar" || emailCol.Length != 255 {
- t.Errorf("Expected email type 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
+ if emailCol.Type != "varchar" && emailCol.Type != "varchar(255)" {
+ t.Errorf("Expected email type 'varchar' or 'varchar(255)', got '%s' with length %d", emailCol.Type, emailCol.Length)
+ }
+ if emailCol.Length != 255 {
+ t.Errorf("Expected email length 255, got %d", emailCol.Length)
}
// Verify name column - primitive string type should be NOT NULL by default
@@ -363,6 +366,33 @@ func TestReader_ReadDatabase_Complex(t *testing.T) {
}
}
+func TestParseTypeWithLength_PreservesExplicitTypeModifiers(t *testing.T) {
+ reader := &Reader{}
+
+ tests := []struct {
+ input string
+ wantType string
+ wantLength int
+ }{
+ {"varchar(255)", "varchar", 255},
+ {"character varying(120)", "character varying", 120},
+ {"vector(1536)", "vector(1536)", 0},
+ {"numeric(10,2)", "numeric(10,2)", 0},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.input, func(t *testing.T) {
+ gotType, gotLength := reader.parseTypeWithLength(tt.input)
+ if gotType != tt.wantType {
+ t.Fatalf("parseTypeWithLength(%q) type = %q, want %q", tt.input, gotType, tt.wantType)
+ }
+ if gotLength != tt.wantLength {
+ t.Fatalf("parseTypeWithLength(%q) length = %d, want %d", tt.input, gotLength, tt.wantLength)
+ }
+ })
+ }
+}
+
func TestReader_ReadSchema(t *testing.T) {
opts := &readers.ReaderOptions{
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "gorm", "simple.go"),
diff --git a/pkg/readers/pgsql/README.md b/pkg/readers/pgsql/README.md
index 258e284..78de0c4 100644
--- a/pkg/readers/pgsql/README.md
+++ b/pkg/readers/pgsql/README.md
@@ -89,6 +89,10 @@ postgres://user@localhost/mydb?sslmode=disable
postgres://user:pass@db.example.com:5432/production?sslmode=require
```
+By default, relspec sets `application_name` to `relspecgo/` for PostgreSQL
+sessions so they are identifiable in `pg_stat_activity`. If you provide
+`application_name` in the connection string, your explicit value is preserved.
+
## Extracted Information
### Tables
diff --git a/pkg/readers/pgsql/queries.go b/pkg/readers/pgsql/queries.go
index 668cecb..e43d417 100644
--- a/pkg/readers/pgsql/queries.go
+++ b/pkg/readers/pgsql/queries.go
@@ -206,8 +206,19 @@ func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.
c.numeric_precision,
c.numeric_scale,
c.udt_name,
+ pg_catalog.format_type(a.atttypid, a.atttypmod) as formatted_data_type,
col_description((c.table_schema||'.'||c.table_name)::regclass, c.ordinal_position) as description
FROM information_schema.columns c
+ JOIN pg_catalog.pg_namespace n
+ ON n.nspname = c.table_schema
+ JOIN pg_catalog.pg_class cls
+ ON cls.relname = c.table_name
+ AND cls.relnamespace = n.oid
+ JOIN pg_catalog.pg_attribute a
+ ON a.attrelid = cls.oid
+ AND a.attname = c.column_name
+ AND a.attnum > 0
+ AND NOT a.attisdropped
WHERE c.table_schema = $1
ORDER BY c.table_schema, c.table_name, c.ordinal_position
`
@@ -221,12 +232,12 @@ func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.
columnsMap := make(map[string]map[string]*models.Column)
for rows.Next() {
- var schema, tableName, columnName, isNullable, dataType, udtName string
+ var schema, tableName, columnName, isNullable, dataType, udtName, formattedDataType string
var ordinalPosition int
var columnDefault, description *string
var charMaxLength, numPrecision, numScale *int
- if err := rows.Scan(&schema, &tableName, &columnName, &ordinalPosition, &columnDefault, &isNullable, &dataType, &charMaxLength, &numPrecision, &numScale, &udtName, &description); err != nil {
+ if err := rows.Scan(&schema, &tableName, &columnName, &ordinalPosition, &columnDefault, &isNullable, &dataType, &charMaxLength, &numPrecision, &numScale, &udtName, &formattedDataType, &description); err != nil {
return nil, err
}
@@ -246,7 +257,7 @@ func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.
}
// Map data type, preserving serial types when detected
- column.Type = r.mapDataType(dataType, udtName, hasNextval)
+ column.Type = r.mapDataType(dataType, udtName, formattedDataType, hasNextval)
column.NotNull = (isNullable == "NO")
column.Sequence = uint(ordinalPosition)
diff --git a/pkg/readers/pgsql/reader.go b/pkg/readers/pgsql/reader.go
index a60cd44..04a00ca 100644
--- a/pkg/readers/pgsql/reader.go
+++ b/pkg/readers/pgsql/reader.go
@@ -244,7 +244,7 @@ func (r *Reader) ReadTable() (*models.Table, error) {
// connect establishes a connection to the PostgreSQL database
func (r *Reader) connect() error {
- conn, err := pgx.Connect(r.ctx, r.options.ConnectionString)
+ conn, err := pgsql.Connect(r.ctx, r.options.ConnectionString, "reader-pgsql")
if err != nil {
return err
}
@@ -259,12 +259,14 @@ func (r *Reader) close() {
}
}
-// mapDataType maps PostgreSQL data types to canonical types
-func (r *Reader) mapDataType(pgType, udtName string, hasNextval bool) string {
+// mapDataType maps PostgreSQL data types while preserving exact type text when available.
+func (r *Reader) mapDataType(pgType, udtName, formattedType string, hasNextval bool) string {
+ normalizedPGType := strings.ToLower(strings.TrimSpace(pgType))
+
// If the column has a nextval default, it's likely a serial type
// Map to the appropriate serial type instead of the base integer type
if hasNextval {
- switch strings.ToLower(pgType) {
+ switch normalizedPGType {
case "integer", "int", "int4":
return "serial"
case "bigint", "int8":
@@ -274,6 +276,17 @@ func (r *Reader) mapDataType(pgType, udtName string, hasNextval bool) string {
}
}
+ // Prefer the database-provided formatted type; this preserves arrays/custom
+ // types/modifiers like text[], vector(1536), numeric(10,2), etc.
+ if strings.TrimSpace(formattedType) != "" {
+ return formattedType
+ }
+
+ // information_schema reports arrays generically as "ARRAY" with udt_name like "_text".
+ if strings.EqualFold(pgType, "ARRAY") && strings.HasPrefix(udtName, "_") && len(udtName) > 1 {
+ return udtName[1:] + "[]"
+ }
+
// Map common PostgreSQL types
typeMap := map[string]string{
"integer": "integer",
@@ -320,7 +333,7 @@ func (r *Reader) mapDataType(pgType, udtName string, hasNextval bool) string {
}
// Try mapped type first
- if mapped, exists := typeMap[pgType]; exists {
+ if mapped, exists := typeMap[normalizedPGType]; exists {
return mapped
}
@@ -329,8 +342,11 @@ func (r *Reader) mapDataType(pgType, udtName string, hasNextval bool) string {
return pgsql.GetSQLType(pgType)
}
- // Return UDT name for custom types
+ // Return UDT name for custom types (including array fallback when needed)
if udtName != "" {
+ if strings.HasPrefix(udtName, "_") && len(udtName) > 1 {
+ return udtName[1:] + "[]"
+ }
return udtName
}
diff --git a/pkg/readers/pgsql/reader_test.go b/pkg/readers/pgsql/reader_test.go
index e496b47..e7eb09c 100644
--- a/pkg/readers/pgsql/reader_test.go
+++ b/pkg/readers/pgsql/reader_test.go
@@ -173,35 +173,39 @@ func TestMapDataType(t *testing.T) {
reader := &Reader{}
tests := []struct {
- pgType string
- udtName string
- expected string
+ pgType string
+ udtName string
+ formattedType string
+ expected string
}{
- {"integer", "int4", "integer"},
- {"bigint", "int8", "bigint"},
- {"smallint", "int2", "smallint"},
- {"character varying", "varchar", "varchar"},
- {"text", "text", "text"},
- {"boolean", "bool", "boolean"},
- {"timestamp without time zone", "timestamp", "timestamp"},
- {"timestamp with time zone", "timestamptz", "timestamptz"},
- {"json", "json", "json"},
- {"jsonb", "jsonb", "jsonb"},
- {"uuid", "uuid", "uuid"},
- {"numeric", "numeric", "numeric"},
- {"real", "float4", "real"},
- {"double precision", "float8", "double precision"},
- {"date", "date", "date"},
- {"time without time zone", "time", "time"},
- {"bytea", "bytea", "bytea"},
- {"unknown_type", "custom", "custom"}, // Should return UDT name
+ {"integer", "int4", "", "integer"},
+ {"bigint", "int8", "", "bigint"},
+ {"smallint", "int2", "", "smallint"},
+ {"character varying", "varchar", "", "varchar"},
+ {"text", "text", "", "text"},
+ {"boolean", "bool", "", "boolean"},
+ {"timestamp without time zone", "timestamp", "", "timestamp"},
+ {"timestamp with time zone", "timestamptz", "", "timestamptz"},
+ {"json", "json", "", "json"},
+ {"jsonb", "jsonb", "", "jsonb"},
+ {"uuid", "uuid", "", "uuid"},
+ {"numeric", "numeric", "", "numeric"},
+ {"real", "float4", "", "real"},
+ {"double precision", "float8", "", "double precision"},
+ {"date", "date", "", "date"},
+ {"time without time zone", "time", "", "time"},
+ {"bytea", "bytea", "", "bytea"},
+ {"unknown_type", "custom", "", "custom"}, // Should return UDT name
+ {"ARRAY", "_text", "", "text[]"},
+ {"USER-DEFINED", "vector", "vector(1536)", "vector(1536)"},
+ {"character varying", "varchar", "character varying(255)", "character varying(255)"},
}
for _, tt := range tests {
t.Run(tt.pgType, func(t *testing.T) {
- result := reader.mapDataType(tt.pgType, tt.udtName, false)
+ result := reader.mapDataType(tt.pgType, tt.udtName, tt.formattedType, false)
if result != tt.expected {
- t.Errorf("mapDataType(%s, %s) = %s, expected %s", tt.pgType, tt.udtName, result, tt.expected)
+ t.Errorf("mapDataType(%s, %s, %s) = %s, expected %s", tt.pgType, tt.udtName, tt.formattedType, result, tt.expected)
}
})
}
@@ -218,9 +222,9 @@ func TestMapDataType(t *testing.T) {
for _, tt := range serialTests {
t.Run(tt.pgType+"_with_nextval", func(t *testing.T) {
- result := reader.mapDataType(tt.pgType, "", true)
+ result := reader.mapDataType(tt.pgType, "", "", true)
if result != tt.expected {
- t.Errorf("mapDataType(%s, '', true) = %s, expected %s", tt.pgType, result, tt.expected)
+ t.Errorf("mapDataType(%s, '', '', true) = %s, expected %s", tt.pgType, result, tt.expected)
}
})
}
@@ -230,63 +234,63 @@ func TestParseIndexDefinition(t *testing.T) {
reader := &Reader{}
tests := []struct {
- name string
- indexName string
- tableName string
- schema string
- indexDef string
- wantType string
- wantUnique bool
+ name string
+ indexName string
+ tableName string
+ schema string
+ indexDef string
+ wantType string
+ wantUnique bool
wantColumns int
}{
{
- name: "simple btree index",
- indexName: "idx_users_email",
- tableName: "users",
- schema: "public",
- indexDef: "CREATE INDEX idx_users_email ON public.users USING btree (email)",
- wantType: "btree",
- wantUnique: false,
+ name: "simple btree index",
+ indexName: "idx_users_email",
+ tableName: "users",
+ schema: "public",
+ indexDef: "CREATE INDEX idx_users_email ON public.users USING btree (email)",
+ wantType: "btree",
+ wantUnique: false,
wantColumns: 1,
},
{
- name: "unique index",
- indexName: "idx_users_username",
- tableName: "users",
- schema: "public",
- indexDef: "CREATE UNIQUE INDEX idx_users_username ON public.users USING btree (username)",
- wantType: "btree",
- wantUnique: true,
+ name: "unique index",
+ indexName: "idx_users_username",
+ tableName: "users",
+ schema: "public",
+ indexDef: "CREATE UNIQUE INDEX idx_users_username ON public.users USING btree (username)",
+ wantType: "btree",
+ wantUnique: true,
wantColumns: 1,
},
{
- name: "composite index",
- indexName: "idx_users_name",
- tableName: "users",
- schema: "public",
- indexDef: "CREATE INDEX idx_users_name ON public.users USING btree (first_name, last_name)",
- wantType: "btree",
- wantUnique: false,
+ name: "composite index",
+ indexName: "idx_users_name",
+ tableName: "users",
+ schema: "public",
+ indexDef: "CREATE INDEX idx_users_name ON public.users USING btree (first_name, last_name)",
+ wantType: "btree",
+ wantUnique: false,
wantColumns: 2,
},
{
- name: "gin index",
- indexName: "idx_posts_tags",
- tableName: "posts",
- schema: "public",
- indexDef: "CREATE INDEX idx_posts_tags ON public.posts USING gin (tags)",
- wantType: "gin",
- wantUnique: false,
+ name: "gin index",
+ indexName: "idx_posts_tags",
+ tableName: "posts",
+ schema: "public",
+ indexDef: "CREATE INDEX idx_posts_tags ON public.posts USING gin (tags)",
+ wantType: "gin",
+ wantUnique: false,
wantColumns: 1,
},
{
- name: "partial index with where clause",
- indexName: "idx_users_active",
- tableName: "users",
- schema: "public",
- indexDef: "CREATE INDEX idx_users_active ON public.users USING btree (id) WHERE (active = true)",
- wantType: "btree",
- wantUnique: false,
+ name: "partial index with where clause",
+ indexName: "idx_users_active",
+ tableName: "users",
+ schema: "public",
+ indexDef: "CREATE INDEX idx_users_active ON public.users USING btree (id) WHERE (active = true)",
+ wantType: "btree",
+ wantUnique: false,
wantColumns: 1,
},
}
diff --git a/pkg/readers/typeorm/reader.go b/pkg/readers/typeorm/reader.go
index 3e1f01c..660a8db 100644
--- a/pkg/readers/typeorm/reader.go
+++ b/pkg/readers/typeorm/reader.go
@@ -5,9 +5,11 @@ import (
"fmt"
"os"
"regexp"
+ "strconv"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
@@ -549,6 +551,41 @@ func (r *Reader) parseColumnOptions(decorator string, column *models.Column, tab
}
}
+ // Preserve explicit type modifiers from options where present.
+ // Example: @Column({ type: 'varchar', length: 255 }) -> varchar(255)
+ if column.Type != "" && !strings.Contains(column.Type, "(") {
+ lengthRegex := regexp.MustCompile(`length:\s*(\d+)`)
+ precisionRegex := regexp.MustCompile(`precision:\s*(\d+)`)
+ scaleRegex := regexp.MustCompile(`scale:\s*(\d+)`)
+
+ baseType := strings.ToLower(strings.TrimSpace(column.Type))
+
+ if pgsql.SupportsLength(baseType) {
+ if matches := lengthRegex.FindStringSubmatch(content); len(matches) == 2 {
+ if n, err := strconv.Atoi(matches[1]); err == nil && n > 0 {
+ column.Length = n
+ column.Type = fmt.Sprintf("%s(%d)", column.Type, n)
+ }
+ }
+ }
+
+ if pgsql.SupportsPrecision(baseType) {
+ if matches := precisionRegex.FindStringSubmatch(content); len(matches) == 2 {
+ if p, err := strconv.Atoi(matches[1]); err == nil && p > 0 {
+ column.Precision = p
+ if sm := scaleRegex.FindStringSubmatch(content); len(sm) == 2 {
+ if s, err := strconv.Atoi(sm[1]); err == nil && s >= 0 {
+ column.Scale = s
+ column.Type = fmt.Sprintf("%s(%d,%d)", column.Type, p, s)
+ }
+ } else {
+ column.Type = fmt.Sprintf("%s(%d)", column.Type, p)
+ }
+ }
+ }
+ }
+ }
+
if strings.Contains(content, "nullable: true") || strings.Contains(content, "nullable:true") {
column.NotNull = false
}
diff --git a/pkg/readers/typeorm/reader_test.go b/pkg/readers/typeorm/reader_test.go
new file mode 100644
index 0000000..4e98c5a
--- /dev/null
+++ b/pkg/readers/typeorm/reader_test.go
@@ -0,0 +1,60 @@
+package typeorm
+
+import (
+ "testing"
+
+ "git.warky.dev/wdevs/relspecgo/pkg/models"
+)
+
+func TestParseColumnOptions_PreservesTypeModifiers(t *testing.T) {
+ reader := &Reader{}
+ table := models.InitTable("users", "public")
+
+ tests := []struct {
+ name string
+ decorator string
+ wantType string
+ wantLength int
+ wantPrecision int
+ wantScale int
+ }{
+ {
+ name: "varchar with length",
+ decorator: `@Column({ type: 'varchar', length: 255 })`,
+ wantType: "varchar(255)",
+ wantLength: 255,
+ },
+ {
+ name: "numeric with precision and scale",
+ decorator: `@Column({ type: 'numeric', precision: 10, scale: 2 })`,
+ wantType: "numeric(10,2)",
+ wantPrecision: 10,
+ wantScale: 2,
+ },
+ {
+ name: "custom type with explicit modifier is preserved",
+ decorator: `@Column({ type: 'vector(1536)' })`,
+ wantType: "vector(1536)",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ col := models.InitColumn("sample", table.Name, table.Schema)
+ reader.parseColumnOptions(tt.decorator, col, table)
+
+ if col.Type != tt.wantType {
+ t.Fatalf("column type = %q, want %q", col.Type, tt.wantType)
+ }
+ if col.Length != tt.wantLength {
+ t.Fatalf("column length = %d, want %d", col.Length, tt.wantLength)
+ }
+ if col.Precision != tt.wantPrecision {
+ t.Fatalf("column precision = %d, want %d", col.Precision, tt.wantPrecision)
+ }
+ if col.Scale != tt.wantScale {
+ t.Fatalf("column scale = %d, want %d", col.Scale, tt.wantScale)
+ }
+ })
+ }
+}
diff --git a/pkg/writers/bun/template_data.go b/pkg/writers/bun/template_data.go
index 4789942..2f017c7 100644
--- a/pkg/writers/bun/template_data.go
+++ b/pkg/writers/bun/template_data.go
@@ -216,6 +216,21 @@ func resolveFieldNameCollision(fieldName string) string {
return fieldName
}
+// sortConstraints sorts constraints by sequence, then by name
+func sortConstraints(constraints map[string]*models.Constraint) []*models.Constraint {
+ result := make([]*models.Constraint, 0, len(constraints))
+ for _, c := range constraints {
+ result = append(result, c)
+ }
+ sort.Slice(result, func(i, j int) bool {
+ if result[i].Sequence > 0 && result[j].Sequence > 0 {
+ return result[i].Sequence < result[j].Sequence
+ }
+ return result[i].Name < result[j].Name
+ })
+ return result
+}
+
// sortColumns sorts columns by sequence, then by name
func sortColumns(columns map[string]*models.Column) []*models.Column {
result := make([]*models.Column, 0, len(columns))
diff --git a/pkg/writers/bun/type_mapper.go b/pkg/writers/bun/type_mapper.go
index 5767849..e7c136b 100644
--- a/pkg/writers/bun/type_mapper.go
+++ b/pkg/writers/bun/type_mapper.go
@@ -5,6 +5,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
@@ -39,14 +40,7 @@ func (tm *TypeMapper) SQLTypeToGoType(sqlType string, notNull bool) string {
// extractBaseType extracts the base type from a SQL type string
func (tm *TypeMapper) extractBaseType(sqlType string) string {
- sqlType = strings.ToLower(strings.TrimSpace(sqlType))
-
- // Remove everything after '('
- if idx := strings.Index(sqlType, "("); idx > 0 {
- sqlType = sqlType[:idx]
- }
-
- return sqlType
+ return pgsql.CanonicalizeBaseType(pgsql.ExtractBaseTypeLower(sqlType))
}
// isSimpleType checks if a type should use base Go type when NOT NULL
@@ -184,9 +178,10 @@ func (tm *TypeMapper) BuildBunTag(column *models.Column, table *models.Table) st
if column.Type != "" {
// Sanitize type to remove backticks
typeStr := writers.SanitizeStructTagValue(column.Type)
- if column.Length > 0 {
+ hasExplicitTypeModifier := pgsql.HasExplicitTypeModifier(typeStr)
+ if !hasExplicitTypeModifier && column.Length > 0 {
typeStr = fmt.Sprintf("%s(%d)", typeStr, column.Length)
- } else if column.Precision > 0 {
+ } else if !hasExplicitTypeModifier && column.Precision > 0 {
if column.Scale > 0 {
typeStr = fmt.Sprintf("%s(%d,%d)", typeStr, column.Precision, column.Scale)
} else {
diff --git a/pkg/writers/bun/writer.go b/pkg/writers/bun/writer.go
index 174ab41..f77ec41 100644
--- a/pkg/writers/bun/writer.go
+++ b/pkg/writers/bun/writer.go
@@ -242,7 +242,7 @@ func (w *Writer) addRelationshipFields(modelData *ModelData, table *models.Table
usedFieldNames := make(map[string]int)
// For each foreign key in this table, add a belongs-to/has-one relationship
- for _, constraint := range table.Constraints {
+ for _, constraint := range sortConstraints(table.Constraints) {
if constraint.Type != models.ForeignKeyConstraint {
continue
}
@@ -275,7 +275,7 @@ func (w *Writer) addRelationshipFields(modelData *ModelData, table *models.Table
continue // Skip self
}
- for _, constraint := range otherTable.Constraints {
+ for _, constraint := range sortConstraints(otherTable.Constraints) {
if constraint.Type != models.ForeignKeyConstraint {
continue
}
diff --git a/pkg/writers/bun/writer_test.go b/pkg/writers/bun/writer_test.go
index 2a7862a..287b122 100644
--- a/pkg/writers/bun/writer_test.go
+++ b/pkg/writers/bun/writer_test.go
@@ -698,3 +698,23 @@ func TestTypeMapper_BuildBunTag(t *testing.T) {
})
}
}
+
+func TestTypeMapper_BuildBunTag_PreservesExplicitTypeModifiers(t *testing.T) {
+ mapper := NewTypeMapper()
+
+ col := &models.Column{
+ Name: "embedding",
+ Type: "vector(1536)",
+ Length: 1536,
+ Precision: 0,
+ Scale: 0,
+ }
+
+ tag := mapper.BuildBunTag(col, nil)
+ if !strings.Contains(tag, "type:vector(1536),") {
+ t.Fatalf("expected explicit modifier to be preserved, got %q", tag)
+ }
+ if strings.Contains(tag, ")(") {
+ t.Fatalf("type modifier appears duplicated in %q", tag)
+ }
+}
diff --git a/pkg/writers/dbml/writer.go b/pkg/writers/dbml/writer.go
index 886defd..198b740 100644
--- a/pkg/writers/dbml/writer.go
+++ b/pkg/writers/dbml/writer.go
@@ -62,10 +62,10 @@ func (w *Writer) databaseToDBML(d *models.Database) string {
var sb strings.Builder
if d.Description != "" {
- sb.WriteString(fmt.Sprintf("// %s\n", d.Description))
+ fmt.Fprintf(&sb, "// %s\n", d.Description)
}
if d.Comment != "" {
- sb.WriteString(fmt.Sprintf("// %s\n", d.Comment))
+ fmt.Fprintf(&sb, "// %s\n", d.Comment)
}
if d.Description != "" || d.Comment != "" {
sb.WriteString("\n")
@@ -94,7 +94,7 @@ func (w *Writer) schemaToDBML(schema *models.Schema) string {
var sb strings.Builder
if schema.Description != "" {
- sb.WriteString(fmt.Sprintf("// Schema: %s - %s\n", schema.Name, schema.Description))
+ fmt.Fprintf(&sb, "// Schema: %s - %s\n", schema.Name, schema.Description)
}
for _, table := range schema.Tables {
@@ -110,10 +110,10 @@ func (w *Writer) tableToDBML(t *models.Table) string {
var sb strings.Builder
tableName := fmt.Sprintf("%s.%s", t.Schema, t.Name)
- sb.WriteString(fmt.Sprintf("Table %s {\n", tableName))
+ fmt.Fprintf(&sb, "Table %s {\n", tableName)
for _, column := range t.Columns {
- sb.WriteString(fmt.Sprintf(" %s %s", column.Name, column.Type))
+ fmt.Fprintf(&sb, " %s %s", column.Name, column.Type)
var attrs []string
if column.IsPrimaryKey {
@@ -138,11 +138,11 @@ func (w *Writer) tableToDBML(t *models.Table) string {
}
if len(attrs) > 0 {
- sb.WriteString(fmt.Sprintf(" [%s]", strings.Join(attrs, ", ")))
+ fmt.Fprintf(&sb, " [%s]", strings.Join(attrs, ", "))
}
if column.Comment != "" {
- sb.WriteString(fmt.Sprintf(" // %s", column.Comment))
+ fmt.Fprintf(&sb, " // %s", column.Comment)
}
sb.WriteString("\n")
}
@@ -161,9 +161,9 @@ func (w *Writer) tableToDBML(t *models.Table) string {
indexAttrs = append(indexAttrs, fmt.Sprintf("type: %s", index.Type))
}
- sb.WriteString(fmt.Sprintf(" (%s)", strings.Join(index.Columns, ", ")))
+ fmt.Fprintf(&sb, " (%s)", strings.Join(index.Columns, ", "))
if len(indexAttrs) > 0 {
- sb.WriteString(fmt.Sprintf(" [%s]", strings.Join(indexAttrs, ", ")))
+ fmt.Fprintf(&sb, " [%s]", strings.Join(indexAttrs, ", "))
}
sb.WriteString("\n")
}
@@ -172,7 +172,7 @@ func (w *Writer) tableToDBML(t *models.Table) string {
note := strings.TrimSpace(t.Description + " " + t.Comment)
if note != "" {
- sb.WriteString(fmt.Sprintf("\n Note: '%s'\n", note))
+ fmt.Fprintf(&sb, "\n Note: '%s'\n", note)
}
sb.WriteString("}\n")
diff --git a/pkg/writers/dctx/writer.go b/pkg/writers/dctx/writer.go
index 7ae0368..0c29077 100644
--- a/pkg/writers/dctx/writer.go
+++ b/pkg/writers/dctx/writer.go
@@ -4,6 +4,7 @@ import (
"encoding/xml"
"fmt"
"os"
+ "sort"
"strings"
"github.com/google/uuid"
@@ -155,8 +156,15 @@ func (w *Writer) mapTableFields(table *models.Table) models.DCTXTable {
},
}
+ columnNames := make([]string, 0, len(table.Columns))
+ for name := range table.Columns {
+ columnNames = append(columnNames, name)
+ }
+ sort.Strings(columnNames)
+
i := 0
- for _, column := range table.Columns {
+ for _, colName := range columnNames {
+ column := table.Columns[colName]
dctxTable.Fields[i] = w.mapField(column)
i++
}
@@ -165,12 +173,27 @@ func (w *Writer) mapTableFields(table *models.Table) models.DCTXTable {
}
func (w *Writer) mapTableKeys(table *models.Table) []models.DCTXKey {
- keys := make([]models.DCTXKey, len(table.Indexes))
- i := 0
+ indexes := make([]*models.Index, 0, len(table.Indexes))
for _, index := range table.Indexes {
- keys[i] = w.mapKey(index, table)
- i++
+ indexes = append(indexes, index)
}
+
+ // Stable ordering for deterministic output and test reproducibility:
+ // primary keys first, then lexicographic by index name.
+ sort.Slice(indexes, func(i, j int) bool {
+ iPrimary := strings.HasSuffix(indexes[i].Name, "_pkey")
+ jPrimary := strings.HasSuffix(indexes[j].Name, "_pkey")
+ if iPrimary != jPrimary {
+ return iPrimary
+ }
+ return indexes[i].Name < indexes[j].Name
+ })
+
+ keys := make([]models.DCTXKey, len(indexes))
+ for i, index := range indexes {
+ keys[i] = w.mapKey(index, table)
+ }
+
return keys
}
diff --git a/pkg/writers/drizzle/type_mapper.go b/pkg/writers/drizzle/type_mapper.go
index 97998bd..475d3df 100644
--- a/pkg/writers/drizzle/type_mapper.go
+++ b/pkg/writers/drizzle/type_mapper.go
@@ -5,6 +5,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
)
// TypeMapper handles SQL to Drizzle type conversions
@@ -18,7 +19,7 @@ func NewTypeMapper() *TypeMapper {
// SQLTypeToDrizzle converts SQL types to Drizzle column type functions
// Returns the Drizzle column constructor (e.g., "integer", "varchar", "text")
func (tm *TypeMapper) SQLTypeToDrizzle(sqlType string) string {
- sqlTypeLower := strings.ToLower(sqlType)
+ sqlTypeLower := pgsql.CanonicalizeBaseType(pgsql.ExtractBaseTypeLower(sqlType))
// PostgreSQL type mapping to Drizzle
typeMap := map[string]string{
@@ -87,13 +88,6 @@ func (tm *TypeMapper) SQLTypeToDrizzle(sqlType string) string {
return drizzleType
}
- // Check for partial matches (e.g., "varchar(255)" -> "varchar")
- for sqlPattern, drizzleType := range typeMap {
- if strings.HasPrefix(sqlTypeLower, sqlPattern) {
- return drizzleType
- }
- }
-
// Default to text for unknown types
return "text"
}
diff --git a/pkg/writers/gorm/template_data.go b/pkg/writers/gorm/template_data.go
index f773ac4..ae4f1c4 100644
--- a/pkg/writers/gorm/template_data.go
+++ b/pkg/writers/gorm/template_data.go
@@ -213,6 +213,21 @@ func resolveFieldNameCollision(fieldName string) string {
return fieldName
}
+// sortConstraints sorts constraints by sequence, then by name
+func sortConstraints(constraints map[string]*models.Constraint) []*models.Constraint {
+ result := make([]*models.Constraint, 0, len(constraints))
+ for _, c := range constraints {
+ result = append(result, c)
+ }
+ sort.Slice(result, func(i, j int) bool {
+ if result[i].Sequence > 0 && result[j].Sequence > 0 {
+ return result[i].Sequence < result[j].Sequence
+ }
+ return result[i].Name < result[j].Name
+ })
+ return result
+}
+
// sortColumns sorts columns by sequence, then by name
func sortColumns(columns map[string]*models.Column) []*models.Column {
result := make([]*models.Column, 0, len(columns))
diff --git a/pkg/writers/gorm/type_mapper.go b/pkg/writers/gorm/type_mapper.go
index 097d503..a61e643 100644
--- a/pkg/writers/gorm/type_mapper.go
+++ b/pkg/writers/gorm/type_mapper.go
@@ -5,6 +5,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
@@ -39,14 +40,7 @@ func (tm *TypeMapper) SQLTypeToGoType(sqlType string, notNull bool) string {
// extractBaseType extracts the base type from a SQL type string
// Examples: varchar(100) → varchar, numeric(10,2) → numeric
func (tm *TypeMapper) extractBaseType(sqlType string) string {
- sqlType = strings.ToLower(strings.TrimSpace(sqlType))
-
- // Remove everything after '('
- if idx := strings.Index(sqlType, "("); idx > 0 {
- sqlType = sqlType[:idx]
- }
-
- return sqlType
+ return pgsql.CanonicalizeBaseType(pgsql.ExtractBaseTypeLower(sqlType))
}
// baseGoType returns the base Go type for a SQL type (not null)
@@ -209,9 +203,10 @@ func (tm *TypeMapper) BuildGormTag(column *models.Column, table *models.Table) s
// Include length, precision, scale if present
// Sanitize type to remove backticks
typeStr := writers.SanitizeStructTagValue(column.Type)
- if column.Length > 0 {
+ hasExplicitTypeModifier := pgsql.HasExplicitTypeModifier(typeStr)
+ if !hasExplicitTypeModifier && column.Length > 0 {
typeStr = fmt.Sprintf("%s(%d)", typeStr, column.Length)
- } else if column.Precision > 0 {
+ } else if !hasExplicitTypeModifier && column.Precision > 0 {
if column.Scale > 0 {
typeStr = fmt.Sprintf("%s(%d,%d)", typeStr, column.Precision, column.Scale)
} else {
diff --git a/pkg/writers/gorm/writer.go b/pkg/writers/gorm/writer.go
index 47695fd..a91796c 100644
--- a/pkg/writers/gorm/writer.go
+++ b/pkg/writers/gorm/writer.go
@@ -236,7 +236,7 @@ func (w *Writer) addRelationshipFields(modelData *ModelData, table *models.Table
usedFieldNames := make(map[string]int)
// For each foreign key in this table, add a belongs-to relationship
- for _, constraint := range table.Constraints {
+ for _, constraint := range sortConstraints(table.Constraints) {
if constraint.Type != models.ForeignKeyConstraint {
continue
}
@@ -269,7 +269,7 @@ func (w *Writer) addRelationshipFields(modelData *ModelData, table *models.Table
continue // Skip self
}
- for _, constraint := range otherTable.Constraints {
+ for _, constraint := range sortConstraints(otherTable.Constraints) {
if constraint.Type != models.ForeignKeyConstraint {
continue
}
diff --git a/pkg/writers/gorm/writer_test.go b/pkg/writers/gorm/writer_test.go
index 65ab0e6..b90a67a 100644
--- a/pkg/writers/gorm/writer_test.go
+++ b/pkg/writers/gorm/writer_test.go
@@ -14,12 +14,12 @@ func TestWriter_WriteTable(t *testing.T) {
// Create a simple table
table := models.InitTable("users", "public")
table.Columns["id"] = &models.Column{
- Name: "id",
- Type: "bigint",
- NotNull: true,
- IsPrimaryKey: true,
+ Name: "id",
+ Type: "bigint",
+ NotNull: true,
+ IsPrimaryKey: true,
AutoIncrement: true,
- Sequence: 1,
+ Sequence: 1,
}
table.Columns["email"] = &models.Column{
Name: "email",
@@ -444,10 +444,10 @@ func TestWriter_MultipleHasManyRelationships(t *testing.T) {
// Verify all has-many relationships have unique names
hasManyExpectations := []string{
- "RelRIDAPIProviderOrgLogins", // Has many via Login
+ "RelRIDAPIProviderOrgLogins", // Has many via Login
"RelRIDAPIProviderOrgFilepointers", // Has many via Filepointer
- "RelRIDAPIProviderOrgAPIEvents", // Has many via APIEvent
- "RelRIDOwner", // Belongs to via rid_owner
+ "RelRIDAPIProviderOrgAPIEvents", // Has many via APIEvent
+ "RelRIDOwner", // Belongs to via rid_owner
}
for _, exp := range hasManyExpectations {
@@ -669,3 +669,23 @@ func TestTypeMapper_SQLTypeToGoType(t *testing.T) {
})
}
}
+
+func TestTypeMapper_BuildGormTag_PreservesExplicitTypeModifiers(t *testing.T) {
+ mapper := NewTypeMapper()
+
+ col := &models.Column{
+ Name: "embedding",
+ Type: "vector(1536)",
+ Length: 1536,
+ Precision: 0,
+ Scale: 0,
+ }
+
+ tag := mapper.BuildGormTag(col, nil)
+ if !strings.Contains(tag, "type:vector(1536)") {
+ t.Fatalf("expected explicit modifier to be preserved, got %q", tag)
+ }
+ if strings.Contains(tag, ")(") {
+ t.Fatalf("type modifier appears duplicated in %q", tag)
+ }
+}
diff --git a/pkg/writers/graphql/type_mapping.go b/pkg/writers/graphql/type_mapping.go
index c252cea..142a95e 100644
--- a/pkg/writers/graphql/type_mapping.go
+++ b/pkg/writers/graphql/type_mapping.go
@@ -4,6 +4,7 @@ import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
)
func (w *Writer) sqlTypeToGraphQL(sqlType string, column *models.Column, table *models.Table, schema *models.Schema) string {
@@ -33,12 +34,11 @@ func (w *Writer) sqlTypeToGraphQL(sqlType string, column *models.Column, table *
}
// Standard type mappings
- baseType := strings.Split(sqlType, "(")[0] // Remove length/precision
- baseType = strings.TrimSpace(baseType)
+ baseType := pgsql.CanonicalizeBaseType(pgsql.ExtractBaseTypeLower(sqlType))
// Handle array types
- if strings.HasSuffix(baseType, "[]") {
- elemType := strings.TrimSuffix(baseType, "[]")
+ if pgsql.IsArrayType(sqlType) {
+ elemType := pgsql.CanonicalizeBaseType(pgsql.ExtractBaseTypeLower(pgsql.ElementType(sqlType)))
gqlType := w.mapBaseTypeToGraphQL(elemType)
return "[" + gqlType + "]"
}
@@ -108,8 +108,7 @@ func (w *Writer) sqlTypeToCustomScalar(sqlType string) string {
"date": "Date",
}
- baseType := strings.Split(sqlType, "(")[0]
- baseType = strings.TrimSpace(baseType)
+ baseType := pgsql.CanonicalizeBaseType(pgsql.ExtractBaseTypeLower(sqlType))
if scalar, ok := scalarMap[baseType]; ok {
return scalar
@@ -132,8 +131,7 @@ func (w *Writer) isIntegerType(sqlType string) bool {
"smallserial": true,
}
- baseType := strings.Split(sqlType, "(")[0]
- baseType = strings.TrimSpace(baseType)
+ baseType := pgsql.CanonicalizeBaseType(pgsql.ExtractBaseTypeLower(sqlType))
return intTypes[baseType]
}
diff --git a/pkg/writers/graphql/writer.go b/pkg/writers/graphql/writer.go
index b6d3fb9..715f34e 100644
--- a/pkg/writers/graphql/writer.go
+++ b/pkg/writers/graphql/writer.go
@@ -52,7 +52,7 @@ func (w *Writer) databaseToGraphQL(db *models.Database) string {
if w.shouldIncludeComments() {
sb.WriteString("# Generated GraphQL Schema\n")
if db.Name != "" {
- sb.WriteString(fmt.Sprintf("# Database: %s\n", db.Name))
+ fmt.Fprintf(&sb, "# Database: %s\n", db.Name)
}
sb.WriteString("\n")
}
@@ -62,7 +62,7 @@ func (w *Writer) databaseToGraphQL(db *models.Database) string {
scalars := w.collectCustomScalars(db)
if len(scalars) > 0 {
for _, scalar := range scalars {
- sb.WriteString(fmt.Sprintf("scalar %s\n", scalar))
+ fmt.Fprintf(&sb, "scalar %s\n", scalar)
}
sb.WriteString("\n")
}
@@ -176,9 +176,9 @@ func (w *Writer) isJoinTable(table *models.Table) bool {
func (w *Writer) enumToGraphQL(enum *models.Enum) string {
var sb strings.Builder
- sb.WriteString(fmt.Sprintf("enum %s {\n", enum.Name))
+ fmt.Fprintf(&sb, "enum %s {\n", enum.Name)
for _, value := range enum.Values {
- sb.WriteString(fmt.Sprintf(" %s\n", value))
+ fmt.Fprintf(&sb, " %s\n", value)
}
sb.WriteString("}\n")
@@ -197,10 +197,10 @@ func (w *Writer) tableToGraphQL(table *models.Table, db *models.Database, schema
if desc == "" {
desc = table.Comment
}
- sb.WriteString(fmt.Sprintf("# %s\n", desc))
+ fmt.Fprintf(&sb, "# %s\n", desc)
}
- sb.WriteString(fmt.Sprintf("type %s {\n", typeName))
+ fmt.Fprintf(&sb, "type %s {\n", typeName)
// Collect and categorize fields
var idFields, scalarFields, relationFields []string
diff --git a/pkg/writers/pgsql/writer.go b/pkg/writers/pgsql/writer.go
index 726be72..732273d 100644
--- a/pkg/writers/pgsql/writer.go
+++ b/pkg/writers/pgsql/writer.go
@@ -10,8 +10,6 @@ import (
"strings"
"time"
- "github.com/jackc/pgx/v5"
-
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
@@ -493,18 +491,19 @@ func (w *Writer) generateColumnDefinition(col *models.Column) string {
// Type with length/precision - convert to valid PostgreSQL type
baseType := pgsql.ConvertSQLType(col.Type)
typeStr := baseType
+ hasExplicitTypeModifier := pgsql.HasExplicitTypeModifier(baseType)
// Only add size specifiers for types that support them
- if col.Length > 0 && col.Precision == 0 {
- if supportsLength(baseType) {
+ if !hasExplicitTypeModifier && col.Length > 0 && col.Precision == 0 {
+ if pgsql.SupportsLength(baseType) {
typeStr = fmt.Sprintf("%s(%d)", baseType, col.Length)
} else if isTextTypeWithoutLength(baseType) {
// Convert text with length to varchar
typeStr = fmt.Sprintf("varchar(%d)", col.Length)
}
// For types that don't support length (integer, bigint, etc.), ignore the length
- } else if col.Precision > 0 {
- if supportsPrecision(baseType) {
+ } else if !hasExplicitTypeModifier && col.Precision > 0 {
+ if pgsql.SupportsPrecision(baseType) {
if col.Scale > 0 {
typeStr = fmt.Sprintf("%s(%d,%d)", baseType, col.Precision, col.Scale)
} else {
@@ -1268,30 +1267,6 @@ func isTextType(colType string) bool {
return false
}
-// supportsLength checks if a PostgreSQL type supports length specification
-func supportsLength(colType string) bool {
- lengthTypes := []string{"varchar", "character varying", "char", "character", "bit", "bit varying", "varbit"}
- lowerType := strings.ToLower(colType)
- for _, t := range lengthTypes {
- if lowerType == t || strings.HasPrefix(lowerType, t+"(") {
- return true
- }
- }
- return false
-}
-
-// supportsPrecision checks if a PostgreSQL type supports precision/scale specification
-func supportsPrecision(colType string) bool {
- precisionTypes := []string{"numeric", "decimal", "time", "timestamp", "timestamptz", "timestamp with time zone", "timestamp without time zone", "time with time zone", "time without time zone", "interval"}
- lowerType := strings.ToLower(colType)
- for _, t := range precisionTypes {
- if lowerType == t || strings.HasPrefix(lowerType, t+"(") {
- return true
- }
- }
- return false
-}
-
// isTextTypeWithoutLength checks if type is text (which should convert to varchar when length is specified)
func isTextTypeWithoutLength(colType string) bool {
return strings.EqualFold(colType, "text")
@@ -1376,7 +1351,7 @@ func (w *Writer) executeDatabaseSQL(db *models.Database, connString string) erro
// Connect to database
ctx := context.Background()
- conn, err := pgx.Connect(ctx, connString)
+ conn, err := pgsql.Connect(ctx, connString, "writer-pgsql")
if err != nil {
return fmt.Errorf("failed to connect to database: %w", err)
}
diff --git a/pkg/writers/pgsql/writer_test.go b/pkg/writers/pgsql/writer_test.go
index 06adcdc..776f6f1 100644
--- a/pkg/writers/pgsql/writer_test.go
+++ b/pkg/writers/pgsql/writer_test.go
@@ -426,11 +426,11 @@ func TestWriteAllConstraintTypes(t *testing.T) {
// Verify all constraint types are present
expectedConstraints := map[string]string{
- "Primary Key": "PRIMARY KEY",
- "Unique": "ADD CONSTRAINT uq_order_number UNIQUE (order_number)",
- "Check (total)": "ADD CONSTRAINT ck_total_positive CHECK (total > 0)",
- "Check (status)": "ADD CONSTRAINT ck_status_valid CHECK (status IN ('pending', 'completed', 'cancelled'))",
- "Foreign Key": "FOREIGN KEY",
+ "Primary Key": "PRIMARY KEY",
+ "Unique": "ADD CONSTRAINT uq_order_number UNIQUE (order_number)",
+ "Check (total)": "ADD CONSTRAINT ck_total_positive CHECK (total > 0)",
+ "Check (status)": "ADD CONSTRAINT ck_status_valid CHECK (status IN ('pending', 'completed', 'cancelled'))",
+ "Foreign Key": "FOREIGN KEY",
}
for name, expected := range expectedConstraints {
@@ -715,11 +715,11 @@ func TestColumnSizeSpecifiers(t *testing.T) {
// Verify valid patterns ARE present
validPatterns := []string{
- "integer", // without size
- "bigint", // without size
- "smallint", // without size
- "varchar(100)", // text converted to varchar with length
- "varchar(50)", // varchar with length
+ "integer", // without size
+ "bigint", // without size
+ "smallint", // without size
+ "varchar(100)", // text converted to varchar with length
+ "varchar(50)", // varchar with length
"decimal(19,4)", // decimal with precision and scale
}
for _, pattern := range validPatterns {
@@ -729,6 +729,56 @@ func TestColumnSizeSpecifiers(t *testing.T) {
}
}
+func TestGenerateColumnDefinition_PreservesExplicitTypeModifiers(t *testing.T) {
+ writer := NewWriter(&writers.WriterOptions{})
+
+ cases := []struct {
+ name string
+ colType string
+ length int
+ precision int
+ scale int
+ wantType string
+ }{
+ {
+ name: "character varying already includes length",
+ colType: "character varying(50)",
+ length: 50,
+ wantType: "character varying(50)",
+ },
+ {
+ name: "numeric already includes precision",
+ colType: "numeric(10,2)",
+ precision: 10,
+ scale: 2,
+ wantType: "numeric(10,2)",
+ },
+ {
+ name: "custom vector modifier preserved",
+ colType: "vector(1536)",
+ wantType: "vector(1536)",
+ },
+ }
+
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ col := models.InitColumn("sample", "events", "public")
+ col.Type = tc.colType
+ col.Length = tc.length
+ col.Precision = tc.precision
+ col.Scale = tc.scale
+
+ def := writer.generateColumnDefinition(col)
+ if !strings.Contains(def, " "+tc.wantType+" ") && !strings.HasSuffix(def, " "+tc.wantType) {
+ t.Fatalf("generated definition %q does not contain expected type %q", def, tc.wantType)
+ }
+ if strings.Contains(def, ")(") {
+ t.Fatalf("generated definition %q appears to duplicate modifiers", def)
+ }
+ })
+ }
+}
+
func TestGenerateAddColumnStatements(t *testing.T) {
// Create a test database with tables that have new columns
db := models.InitDatabase("testdb")
diff --git a/pkg/writers/prisma/writer.go b/pkg/writers/prisma/writer.go
index c1471c9..ce253a4 100644
--- a/pkg/writers/prisma/writer.go
+++ b/pkg/writers/prisma/writer.go
@@ -125,9 +125,9 @@ func (w *Writer) generateGenerator() string {
func (w *Writer) enumToPrisma(enum *models.Enum) string {
var sb strings.Builder
- sb.WriteString(fmt.Sprintf("enum %s {\n", enum.Name))
+ fmt.Fprintf(&sb, "enum %s {\n", enum.Name)
for _, value := range enum.Values {
- sb.WriteString(fmt.Sprintf(" %s\n", value))
+ fmt.Fprintf(&sb, " %s\n", value)
}
sb.WriteString("}\n")
@@ -179,7 +179,7 @@ func (w *Writer) identifyJoinTables(schema *models.Schema) map[string]bool {
func (w *Writer) tableToPrisma(table *models.Table, schema *models.Schema, joinTables map[string]bool) string {
var sb strings.Builder
- sb.WriteString(fmt.Sprintf("model %s {\n", table.Name))
+ fmt.Fprintf(&sb, "model %s {\n", table.Name)
// Collect columns to write
columns := make([]*models.Column, 0, len(table.Columns))
@@ -219,11 +219,11 @@ func (w *Writer) columnToField(col *models.Column, table *models.Table, schema *
var sb strings.Builder
// Field name
- sb.WriteString(fmt.Sprintf(" %s", col.Name))
+ fmt.Fprintf(&sb, " %s", col.Name)
// Field type
prismaType := w.sqlTypeToPrisma(col.Type, schema)
- sb.WriteString(fmt.Sprintf(" %s", prismaType))
+ fmt.Fprintf(&sb, " %s", prismaType)
// Optional modifier
if !col.NotNull && !col.IsPrimaryKey {
@@ -413,7 +413,7 @@ func (w *Writer) generateRelationFields(table *models.Table, schema *models.Sche
relationName = relationName[:len(relationName)-1]
}
- sb.WriteString(fmt.Sprintf(" %s %s", strings.ToLower(relationName), relationType))
+ fmt.Fprintf(&sb, " %s %s", strings.ToLower(relationName), relationType)
if isOptional {
sb.WriteString("?")
@@ -479,8 +479,8 @@ func (w *Writer) generateInverseRelations(table *models.Table, schema *models.Sc
if fk.ReferencedTable != table.Name {
// This is the other side
otherSide := fk.ReferencedTable
- sb.WriteString(fmt.Sprintf(" %ss %s[]\n",
- strings.ToLower(otherSide), otherSide))
+ fmt.Fprintf(&sb, " %ss %s[]\n",
+ strings.ToLower(otherSide), otherSide)
break
}
}
@@ -497,8 +497,8 @@ func (w *Writer) generateInverseRelations(table *models.Table, schema *models.Sc
pluralName += "s"
}
- sb.WriteString(fmt.Sprintf(" %s %s[]\n",
- strings.ToLower(pluralName), otherTable.Name))
+ fmt.Fprintf(&sb, " %s %s[]\n",
+ strings.ToLower(pluralName), otherTable.Name)
}
}
}
@@ -530,20 +530,20 @@ func (w *Writer) generateBlockAttributes(table *models.Table) string {
if len(pkCols) > 1 {
sort.Strings(pkCols)
- sb.WriteString(fmt.Sprintf(" @@id([%s])\n", strings.Join(pkCols, ", ")))
+ fmt.Fprintf(&sb, " @@id([%s])\n", strings.Join(pkCols, ", "))
}
// @@unique for multi-column unique constraints
for _, constraint := range table.Constraints {
if constraint.Type == models.UniqueConstraint && len(constraint.Columns) > 1 {
- sb.WriteString(fmt.Sprintf(" @@unique([%s])\n", strings.Join(constraint.Columns, ", ")))
+ fmt.Fprintf(&sb, " @@unique([%s])\n", strings.Join(constraint.Columns, ", "))
}
}
// @@index for indexes
for _, index := range table.Indexes {
if !index.Unique { // Unique indexes are handled by @@unique
- sb.WriteString(fmt.Sprintf(" @@index([%s])\n", strings.Join(index.Columns, ", ")))
+ fmt.Fprintf(&sb, " @@index([%s])\n", strings.Join(index.Columns, ", "))
}
}
diff --git a/pkg/writers/sqlexec/writer.go b/pkg/writers/sqlexec/writer.go
index 35feb9a..267f340 100644
--- a/pkg/writers/sqlexec/writer.go
+++ b/pkg/writers/sqlexec/writer.go
@@ -8,6 +8,7 @@ import (
"github.com/jackc/pgx/v5"
"git.warky.dev/wdevs/relspecgo/pkg/models"
+ "git.warky.dev/wdevs/relspecgo/pkg/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
)
@@ -42,7 +43,7 @@ func (w *Writer) WriteDatabase(db *models.Database) error {
// Connect to database
ctx := context.Background()
- conn, err := pgx.Connect(ctx, connString)
+ conn, err := pgsql.Connect(ctx, connString, "writer-sqlexec")
if err != nil {
return fmt.Errorf("failed to connect to database: %w", err)
}
@@ -72,7 +73,7 @@ func (w *Writer) WriteSchema(schema *models.Schema) error {
// Connect to database
ctx := context.Background()
- conn, err := pgx.Connect(ctx, connString)
+ conn, err := pgsql.Connect(ctx, connString, "writer-sqlexec")
if err != nil {
return fmt.Errorf("failed to connect to database: %w", err)
}
diff --git a/pkg/writers/typeorm/writer.go b/pkg/writers/typeorm/writer.go
index 30a47b5..8af58d7 100644
--- a/pkg/writers/typeorm/writer.go
+++ b/pkg/writers/typeorm/writer.go
@@ -207,7 +207,7 @@ func (w *Writer) tableToEntity(table *models.Table, schema *models.Schema, joinT
// Generate @Entity decorator with options
entityOptions := w.buildEntityOptions(table)
- sb.WriteString(fmt.Sprintf("@Entity({\n%s\n})\n", entityOptions))
+ fmt.Fprintf(&sb, "@Entity({\n%s\n})\n", entityOptions)
// Get class name (from metadata if different from table name)
className := table.Name
@@ -219,7 +219,7 @@ func (w *Writer) tableToEntity(table *models.Table, schema *models.Schema, joinT
}
}
- sb.WriteString(fmt.Sprintf("export class %s {\n", className))
+ fmt.Fprintf(&sb, "export class %s {\n", className)
// Collect and sort columns
columns := make([]*models.Column, 0, len(table.Columns))
@@ -272,7 +272,7 @@ func (w *Writer) viewToEntity(view *models.View) string {
sb.WriteString("})\n")
// Generate class
- sb.WriteString(fmt.Sprintf("export class %s {\n", view.Name))
+ fmt.Fprintf(&sb, "export class %s {\n", view.Name)
// Generate field definitions (without decorators for view fields)
columns := make([]*models.Column, 0, len(view.Columns))
@@ -285,7 +285,7 @@ func (w *Writer) viewToEntity(view *models.View) string {
for _, col := range columns {
tsType := w.sqlTypeToTypeScript(col.Type)
- sb.WriteString(fmt.Sprintf(" %s: %s;\n", col.Name, tsType))
+ fmt.Fprintf(&sb, " %s: %s;\n", col.Name, tsType)
}
sb.WriteString("}\n")
@@ -314,7 +314,7 @@ func (w *Writer) columnToField(col *models.Column, table *models.Table) string {
// Regular @Column decorator
options := w.buildColumnOptions(col, table)
if options != "" {
- sb.WriteString(fmt.Sprintf(" @Column({ %s })\n", options))
+ fmt.Fprintf(&sb, " @Column({ %s })\n", options)
} else {
sb.WriteString(" @Column()\n")
}
@@ -327,7 +327,7 @@ func (w *Writer) columnToField(col *models.Column, table *models.Table) string {
nullable = " | null"
}
- sb.WriteString(fmt.Sprintf(" %s: %s%s;", col.Name, tsType, nullable))
+ fmt.Fprintf(&sb, " %s: %s%s;", col.Name, tsType, nullable)
return sb.String()
}
@@ -464,17 +464,17 @@ func (w *Writer) generateRelationFields(table *models.Table, schema *models.Sche
inverseField := w.findInverseFieldName(table.Name, relatedTable, schema)
if inverseField != "" {
- sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s, %s => %s.%s)\n",
- relatedTable, strings.ToLower(relatedTable), strings.ToLower(relatedTable), inverseField))
+ fmt.Fprintf(&sb, " @ManyToOne(() => %s, %s => %s.%s)\n",
+ relatedTable, strings.ToLower(relatedTable), strings.ToLower(relatedTable), inverseField)
} else {
if isNullable {
- sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s, { nullable: true })\n", relatedTable))
+ fmt.Fprintf(&sb, " @ManyToOne(() => %s, { nullable: true })\n", relatedTable)
} else {
- sb.WriteString(fmt.Sprintf(" @ManyToOne(() => %s)\n", relatedTable))
+ fmt.Fprintf(&sb, " @ManyToOne(() => %s)\n", relatedTable)
}
}
- sb.WriteString(fmt.Sprintf(" %s: %s%s;\n", fieldName, relatedTable, nullable))
+ fmt.Fprintf(&sb, " %s: %s%s;\n", fieldName, relatedTable, nullable)
sb.WriteString("\n")
}
diff --git a/tests/assets/dbml/complex.dbml b/tests/assets/dbml/complex.dbml
index 31feef5..2ccf410 100644
--- a/tests/assets/dbml/complex.dbml
+++ b/tests/assets/dbml/complex.dbml
@@ -56,7 +56,7 @@ Table admin.audit_logs {
}
// Relationships
-Ref: public.posts.user_id > public.users.id [ondelete: CASCADE, onupdate: CASCADE]
-Ref: public.comments.post_id > public.posts.id [ondelete: CASCADE]
-Ref: public.comments.user_id > public.users.id [ondelete: SET NULL]
-Ref: admin.audit_logs.user_id > public.users.id [ondelete: SET NULL]
+Ref: public.posts.user_id > public.users.id [delete: CASCADE, update: CASCADE]
+Ref: public.comments.post_id > public.posts.id [delete: CASCADE]
+Ref: public.comments.user_id > public.users.id [delete: SET NULL]
+Ref: admin.audit_logs.user_id > public.users.id [delete: SET NULL]