feat(mssql): add MSSQL writer for generating DDL from database schema
All checks were successful
All checks were successful
- Implement MSSQL writer to generate SQL scripts for creating schemas, tables, and constraints. - Support for identity columns, indexes, and extended properties. - Add tests for column definitions, table creation, primary keys, foreign keys, and comments. - Include testing guide and sample schema for integration tests.
This commit is contained in:
@@ -6,9 +6,9 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
|
|||||||
|
|
||||||
RelSpec is a database relations specification tool that provides bidirectional conversion between various database schema formats. It reads database schemas from multiple sources and writes them to various formats.
|
RelSpec is a database relations specification tool that provides bidirectional conversion between various database schema formats. It reads database schemas from multiple sources and writes them to various formats.
|
||||||
|
|
||||||
**Supported Readers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, PostgreSQL, Prisma, SQL Directory, SQLite, TypeORM, YAML
|
**Supported Readers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, MSSQL, PostgreSQL, Prisma, SQL Directory, SQLite, TypeORM, YAML
|
||||||
|
|
||||||
**Supported Writers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, PostgreSQL, Prisma, SQL Exec, SQLite, Template, TypeORM, YAML
|
**Supported Writers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, MSSQL, PostgreSQL, Prisma, SQL Exec, SQLite, Template, TypeORM, YAML
|
||||||
|
|
||||||
## Build Commands
|
## Build Commands
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import (
|
|||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/mssql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
|
||||||
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
|
||||||
@@ -32,6 +33,7 @@ import (
|
|||||||
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
|
||||||
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
|
||||||
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
|
||||||
|
wmssql "git.warky.dev/wdevs/relspecgo/pkg/writers/mssql"
|
||||||
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
|
||||||
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
|
||||||
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
|
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
|
||||||
@@ -72,6 +74,7 @@ Input formats:
|
|||||||
- prisma: Prisma schema files (.prisma)
|
- prisma: Prisma schema files (.prisma)
|
||||||
- typeorm: TypeORM entity files (TypeScript)
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL database (live connection)
|
- pgsql: PostgreSQL database (live connection)
|
||||||
|
- mssql: Microsoft SQL Server database (live connection)
|
||||||
- sqlite: SQLite database file
|
- sqlite: SQLite database file
|
||||||
|
|
||||||
Output formats:
|
Output formats:
|
||||||
@@ -87,6 +90,7 @@ Output formats:
|
|||||||
- prisma: Prisma schema files (.prisma)
|
- prisma: Prisma schema files (.prisma)
|
||||||
- typeorm: TypeORM entity files (TypeScript)
|
- typeorm: TypeORM entity files (TypeScript)
|
||||||
- pgsql: PostgreSQL SQL schema
|
- pgsql: PostgreSQL SQL schema
|
||||||
|
- mssql: Microsoft SQL Server SQL schema
|
||||||
- sqlite: SQLite SQL schema (with automatic schema flattening)
|
- sqlite: SQLite SQL schema (with automatic schema flattening)
|
||||||
|
|
||||||
Connection String Examples:
|
Connection String Examples:
|
||||||
@@ -309,6 +313,12 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
|
|||||||
}
|
}
|
||||||
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
|
||||||
|
|
||||||
|
case "mssql", "sqlserver", "mssql2016", "mssql2017", "mssql2019", "mssql2022":
|
||||||
|
if connString == "" {
|
||||||
|
return nil, fmt.Errorf("connection string is required for MSSQL format")
|
||||||
|
}
|
||||||
|
reader = mssql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
|
||||||
|
|
||||||
case "sqlite", "sqlite3":
|
case "sqlite", "sqlite3":
|
||||||
// SQLite can use either file path or connection string
|
// SQLite can use either file path or connection string
|
||||||
dbPath := filePath
|
dbPath := filePath
|
||||||
@@ -375,6 +385,9 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
|
|||||||
case "pgsql", "postgres", "postgresql", "sql":
|
case "pgsql", "postgres", "postgresql", "sql":
|
||||||
writer = wpgsql.NewWriter(writerOpts)
|
writer = wpgsql.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
case "mssql", "sqlserver", "mssql2016", "mssql2017", "mssql2019", "mssql2022":
|
||||||
|
writer = wmssql.NewWriter(writerOpts)
|
||||||
|
|
||||||
case "sqlite", "sqlite3":
|
case "sqlite", "sqlite3":
|
||||||
writer = wsqlite.NewWriter(writerOpts)
|
writer = wsqlite.NewWriter(writerOpts)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,21 @@
|
|||||||
version: '3.8'
|
version: '3.8'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
mssql:
|
||||||
|
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||||
|
environment:
|
||||||
|
- ACCEPT_EULA=Y
|
||||||
|
- SA_PASSWORD=StrongPassword123!
|
||||||
|
- MSSQL_PID=Express
|
||||||
|
ports:
|
||||||
|
- "1433:1433"
|
||||||
|
volumes:
|
||||||
|
- ./test_data/mssql/test_schema.sql:/test_schema.sql
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "/opt/mssql-tools/bin/sqlcmd", "-S", "localhost", "-U", "sa", "-P", "StrongPassword123!", "-Q", "SELECT 1"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 3s
|
||||||
|
retries: 10
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
container_name: relspec-test-postgres
|
container_name: relspec-test-postgres
|
||||||
|
|||||||
99
pkg/mssql/README.md
Normal file
99
pkg/mssql/README.md
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# MSSQL Package
|
||||||
|
|
||||||
|
Provides utilities for working with Microsoft SQL Server data types and conversions.
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
### Type Mapping
|
||||||
|
|
||||||
|
Provides bidirectional conversion between canonical types and MSSQL types:
|
||||||
|
|
||||||
|
- **CanonicalToMSSQL**: Convert abstract types to MSSQL-specific types
|
||||||
|
- **MSSQLToCanonical**: Convert MSSQL types to abstract representation
|
||||||
|
|
||||||
|
## Type Conversion Tables
|
||||||
|
|
||||||
|
### Canonical → MSSQL
|
||||||
|
|
||||||
|
| Canonical | MSSQL | Notes |
|
||||||
|
|-----------|-------|-------|
|
||||||
|
| int | INT | 32-bit signed integer |
|
||||||
|
| int64 | BIGINT | 64-bit signed integer |
|
||||||
|
| int32 | INT | 32-bit signed integer |
|
||||||
|
| int16 | SMALLINT | 16-bit signed integer |
|
||||||
|
| int8 | TINYINT | 8-bit unsigned integer |
|
||||||
|
| bool | BIT | 0 (false) or 1 (true) |
|
||||||
|
| float32 | REAL | Single precision floating point |
|
||||||
|
| float64 | FLOAT | Double precision floating point |
|
||||||
|
| decimal | NUMERIC | Fixed-point decimal number |
|
||||||
|
| string | NVARCHAR(255) | Unicode variable-length string |
|
||||||
|
| text | NVARCHAR(MAX) | Unicode large text |
|
||||||
|
| timestamp | DATETIME2 | Date and time without timezone |
|
||||||
|
| timestamptz | DATETIMEOFFSET | Date and time with timezone offset |
|
||||||
|
| uuid | UNIQUEIDENTIFIER | GUID/UUID type |
|
||||||
|
| bytea | VARBINARY(MAX) | Variable-length binary data |
|
||||||
|
| date | DATE | Date only |
|
||||||
|
| time | TIME | Time only |
|
||||||
|
| json | NVARCHAR(MAX) | Stored as text (MSSQL v2016+) |
|
||||||
|
| jsonb | NVARCHAR(MAX) | Stored as text (MSSQL v2016+) |
|
||||||
|
|
||||||
|
### MSSQL → Canonical
|
||||||
|
|
||||||
|
| MSSQL | Canonical | Notes |
|
||||||
|
|-------|-----------|-------|
|
||||||
|
| INT, INTEGER | int | Standard integer |
|
||||||
|
| BIGINT | int64 | Large integer |
|
||||||
|
| SMALLINT | int16 | Small integer |
|
||||||
|
| TINYINT | int8 | Tiny integer |
|
||||||
|
| BIT | bool | Boolean/bit flag |
|
||||||
|
| REAL | float32 | Single precision |
|
||||||
|
| FLOAT | float64 | Double precision |
|
||||||
|
| NUMERIC, DECIMAL | decimal | Exact decimal |
|
||||||
|
| NVARCHAR, VARCHAR | string | Variable-length string |
|
||||||
|
| NCHAR, CHAR | string | Fixed-length string |
|
||||||
|
| DATETIME2 | timestamp | Default timestamp |
|
||||||
|
| DATETIMEOFFSET | timestamptz | Timestamp with timezone |
|
||||||
|
| DATE | date | Date only |
|
||||||
|
| TIME | time | Time only |
|
||||||
|
| UNIQUEIDENTIFIER | uuid | UUID/GUID |
|
||||||
|
| VARBINARY, BINARY | bytea | Binary data |
|
||||||
|
| XML | string | Stored as text |
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/mssql"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Convert canonical to MSSQL
|
||||||
|
mssqlType := mssql.ConvertCanonicalToMSSQL("int")
|
||||||
|
fmt.Println(mssqlType) // Output: INT
|
||||||
|
|
||||||
|
// Convert MSSQL to canonical
|
||||||
|
canonicalType := mssql.ConvertMSSQLToCanonical("BIGINT")
|
||||||
|
fmt.Println(canonicalType) // Output: int64
|
||||||
|
|
||||||
|
// Handle parameterized types
|
||||||
|
canonicalType = mssql.ConvertMSSQLToCanonical("NVARCHAR(255)")
|
||||||
|
fmt.Println(canonicalType) // Output: string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests with:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/mssql/...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Type conversions are case-insensitive
|
||||||
|
- Parameterized types (e.g., `NVARCHAR(255)`) have their base type extracted
|
||||||
|
- Unmapped types default to `string` for safety
|
||||||
|
- The package supports SQL Server 2016 and later versions
|
||||||
114
pkg/mssql/datatypes.go
Normal file
114
pkg/mssql/datatypes.go
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
package mssql
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
// CanonicalToMSSQLTypes maps canonical types to MSSQL types
|
||||||
|
var CanonicalToMSSQLTypes = map[string]string{
|
||||||
|
"bool": "BIT",
|
||||||
|
"int8": "TINYINT",
|
||||||
|
"int16": "SMALLINT",
|
||||||
|
"int": "INT",
|
||||||
|
"int32": "INT",
|
||||||
|
"int64": "BIGINT",
|
||||||
|
"uint": "BIGINT",
|
||||||
|
"uint8": "SMALLINT",
|
||||||
|
"uint16": "INT",
|
||||||
|
"uint32": "BIGINT",
|
||||||
|
"uint64": "BIGINT",
|
||||||
|
"float32": "REAL",
|
||||||
|
"float64": "FLOAT",
|
||||||
|
"decimal": "NUMERIC",
|
||||||
|
"string": "NVARCHAR(255)",
|
||||||
|
"text": "NVARCHAR(MAX)",
|
||||||
|
"date": "DATE",
|
||||||
|
"time": "TIME",
|
||||||
|
"timestamp": "DATETIME2",
|
||||||
|
"timestamptz": "DATETIMEOFFSET",
|
||||||
|
"uuid": "UNIQUEIDENTIFIER",
|
||||||
|
"json": "NVARCHAR(MAX)",
|
||||||
|
"jsonb": "NVARCHAR(MAX)",
|
||||||
|
"bytea": "VARBINARY(MAX)",
|
||||||
|
}
|
||||||
|
|
||||||
|
// MSSQLToCanonicalTypes maps MSSQL types to canonical types
|
||||||
|
var MSSQLToCanonicalTypes = map[string]string{
|
||||||
|
"bit": "bool",
|
||||||
|
"tinyint": "int8",
|
||||||
|
"smallint": "int16",
|
||||||
|
"int": "int",
|
||||||
|
"integer": "int",
|
||||||
|
"bigint": "int64",
|
||||||
|
"real": "float32",
|
||||||
|
"float": "float64",
|
||||||
|
"numeric": "decimal",
|
||||||
|
"decimal": "decimal",
|
||||||
|
"money": "decimal",
|
||||||
|
"smallmoney": "decimal",
|
||||||
|
"nvarchar": "string",
|
||||||
|
"nchar": "string",
|
||||||
|
"varchar": "string",
|
||||||
|
"char": "string",
|
||||||
|
"text": "string",
|
||||||
|
"ntext": "string",
|
||||||
|
"date": "date",
|
||||||
|
"time": "time",
|
||||||
|
"datetime": "timestamp",
|
||||||
|
"datetime2": "timestamp",
|
||||||
|
"smalldatetime": "timestamp",
|
||||||
|
"datetimeoffset": "timestamptz",
|
||||||
|
"uniqueidentifier": "uuid",
|
||||||
|
"varbinary": "bytea",
|
||||||
|
"binary": "bytea",
|
||||||
|
"image": "bytea",
|
||||||
|
"xml": "string",
|
||||||
|
"json": "json",
|
||||||
|
"sql_variant": "string",
|
||||||
|
"hierarchyid": "string",
|
||||||
|
"geography": "string",
|
||||||
|
"geometry": "string",
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConvertCanonicalToMSSQL converts a canonical type to MSSQL type
|
||||||
|
func ConvertCanonicalToMSSQL(canonicalType string) string {
|
||||||
|
// Check direct mapping
|
||||||
|
if mssqlType, exists := CanonicalToMSSQLTypes[strings.ToLower(canonicalType)]; exists {
|
||||||
|
return mssqlType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to find by prefix
|
||||||
|
lowerType := strings.ToLower(canonicalType)
|
||||||
|
for canonical, mssql := range CanonicalToMSSQLTypes {
|
||||||
|
if strings.HasPrefix(lowerType, canonical) {
|
||||||
|
return mssql
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to NVARCHAR
|
||||||
|
return "NVARCHAR(255)"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConvertMSSQLToCanonical converts an MSSQL type to canonical type
|
||||||
|
func ConvertMSSQLToCanonical(mssqlType string) string {
|
||||||
|
// Extract base type (remove parentheses and parameters)
|
||||||
|
baseType := mssqlType
|
||||||
|
if idx := strings.Index(baseType, "("); idx != -1 {
|
||||||
|
baseType = baseType[:idx]
|
||||||
|
}
|
||||||
|
baseType = strings.TrimSpace(baseType)
|
||||||
|
|
||||||
|
// Check direct mapping
|
||||||
|
if canonicalType, exists := MSSQLToCanonicalTypes[strings.ToLower(baseType)]; exists {
|
||||||
|
return canonicalType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to find by prefix
|
||||||
|
lowerType := strings.ToLower(baseType)
|
||||||
|
for mssql, canonical := range MSSQLToCanonicalTypes {
|
||||||
|
if strings.HasPrefix(lowerType, mssql) {
|
||||||
|
return canonical
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to string
|
||||||
|
return "string"
|
||||||
|
}
|
||||||
91
pkg/readers/mssql/README.md
Normal file
91
pkg/readers/mssql/README.md
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# MSSQL Reader
|
||||||
|
|
||||||
|
Reads database schema from Microsoft SQL Server databases using a live connection.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Live Connection**: Connects to MSSQL databases using the Microsoft ODBC driver
|
||||||
|
- **Multi-Schema Support**: Reads multiple schemas with full support for user-defined schemas
|
||||||
|
- **Comprehensive Metadata**: Reads tables, columns, constraints, indexes, and extended properties
|
||||||
|
- **Type Mapping**: Converts MSSQL types to canonical types for cross-database compatibility
|
||||||
|
- **Extended Properties**: Extracts table and column descriptions from MS_Description
|
||||||
|
- **Identity Columns**: Maps IDENTITY columns to AutoIncrement
|
||||||
|
- **Relationships**: Derives relationships from foreign key constraints
|
||||||
|
|
||||||
|
## Connection String Format
|
||||||
|
|
||||||
|
```
|
||||||
|
sqlserver://[user[:password]@][host][:port][?query]
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
sqlserver://sa:password@localhost/dbname
|
||||||
|
sqlserver://user:pass@192.168.1.100:1433/production
|
||||||
|
sqlserver://localhost/testdb?encrypt=disable
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supported Constraints
|
||||||
|
|
||||||
|
- Primary Keys
|
||||||
|
- Foreign Keys (with ON DELETE and ON UPDATE actions)
|
||||||
|
- Unique Constraints
|
||||||
|
- Check Constraints
|
||||||
|
|
||||||
|
## Type Mappings
|
||||||
|
|
||||||
|
| MSSQL Type | Canonical Type |
|
||||||
|
|------------|----------------|
|
||||||
|
| INT | int |
|
||||||
|
| BIGINT | int64 |
|
||||||
|
| SMALLINT | int16 |
|
||||||
|
| TINYINT | int8 |
|
||||||
|
| BIT | bool |
|
||||||
|
| REAL | float32 |
|
||||||
|
| FLOAT | float64 |
|
||||||
|
| NUMERIC, DECIMAL | decimal |
|
||||||
|
| NVARCHAR, VARCHAR | string |
|
||||||
|
| DATETIME2 | timestamp |
|
||||||
|
| DATETIMEOFFSET | timestamptz |
|
||||||
|
| UNIQUEIDENTIFIER | uuid |
|
||||||
|
| VARBINARY | bytea |
|
||||||
|
| DATE | date |
|
||||||
|
| TIME | time |
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/readers/mssql"
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
|
||||||
|
reader := mssql.NewReader(&readers.ReaderOptions{
|
||||||
|
ConnectionString: "sqlserver://sa:password@localhost/mydb",
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := reader.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process schema...
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
fmt.Printf("Schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
fmt.Printf(" Table: %s\n", table.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests with:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/readers/mssql/...
|
||||||
|
```
|
||||||
|
|
||||||
|
For integration testing with a live MSSQL database:
|
||||||
|
```bash
|
||||||
|
docker-compose up -d mssql
|
||||||
|
go test -tags=integration ./pkg/readers/mssql/...
|
||||||
|
docker-compose down
|
||||||
|
```
|
||||||
416
pkg/readers/mssql/queries.go
Normal file
416
pkg/readers/mssql/queries.go
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
package mssql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// querySchemas retrieves all user-defined schemas from the database
|
||||||
|
func (r *Reader) querySchemas() ([]*models.Schema, error) {
|
||||||
|
query := `
|
||||||
|
SELECT s.name, ISNULL(ep.value, '') as description
|
||||||
|
FROM sys.schemas s
|
||||||
|
LEFT JOIN sys.extended_properties ep
|
||||||
|
ON ep.major_id = s.schema_id
|
||||||
|
AND ep.minor_id = 0
|
||||||
|
AND ep.class = 3
|
||||||
|
AND ep.name = 'MS_Description'
|
||||||
|
WHERE s.name NOT IN ('dbo', 'guest', 'INFORMATION_SCHEMA', 'sys')
|
||||||
|
ORDER BY s.name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
schemas := make([]*models.Schema, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var name, description string
|
||||||
|
|
||||||
|
if err := rows.Scan(&name, &description); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := models.InitSchema(name)
|
||||||
|
if description != "" {
|
||||||
|
schema.Description = description
|
||||||
|
}
|
||||||
|
|
||||||
|
schemas = append(schemas, schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always include dbo schema if it has tables
|
||||||
|
dboSchema := models.InitSchema("dbo")
|
||||||
|
schemas = append(schemas, dboSchema)
|
||||||
|
|
||||||
|
return schemas, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryTables retrieves all tables for a given schema
|
||||||
|
func (r *Reader) queryTables(schemaName string) ([]*models.Table, error) {
|
||||||
|
query := `
|
||||||
|
SELECT t.table_schema, t.table_name, ISNULL(ep.value, '') as description
|
||||||
|
FROM information_schema.tables t
|
||||||
|
LEFT JOIN sys.extended_properties ep
|
||||||
|
ON ep.major_id = OBJECT_ID(QUOTENAME(t.table_schema) + '.' + QUOTENAME(t.table_name))
|
||||||
|
AND ep.minor_id = 0
|
||||||
|
AND ep.class = 1
|
||||||
|
AND ep.name = 'MS_Description'
|
||||||
|
WHERE t.table_schema = ? AND t.table_type = 'BASE TABLE'
|
||||||
|
ORDER BY t.table_name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
tables := make([]*models.Table, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var schema, tableName, description string
|
||||||
|
|
||||||
|
if err := rows.Scan(&schema, &tableName, &description); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
table := models.InitTable(tableName, schema)
|
||||||
|
if description != "" {
|
||||||
|
table.Description = description
|
||||||
|
}
|
||||||
|
|
||||||
|
tables = append(tables, table)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tables, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryColumns retrieves all columns for tables in a schema
|
||||||
|
// Returns map[schema.table]map[columnName]*Column
|
||||||
|
func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.Column, error) {
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
c.table_schema,
|
||||||
|
c.table_name,
|
||||||
|
c.column_name,
|
||||||
|
c.ordinal_position,
|
||||||
|
c.column_default,
|
||||||
|
c.is_nullable,
|
||||||
|
c.data_type,
|
||||||
|
c.character_maximum_length,
|
||||||
|
c.numeric_precision,
|
||||||
|
c.numeric_scale,
|
||||||
|
ISNULL(ep.value, '') as description,
|
||||||
|
COLUMNPROPERTY(OBJECT_ID(QUOTENAME(c.table_schema) + '.' + QUOTENAME(c.table_name)), c.column_name, 'IsIdentity') as is_identity
|
||||||
|
FROM information_schema.columns c
|
||||||
|
LEFT JOIN sys.extended_properties ep
|
||||||
|
ON ep.major_id = OBJECT_ID(QUOTENAME(c.table_schema) + '.' + QUOTENAME(c.table_name))
|
||||||
|
AND ep.minor_id = COLUMNPROPERTY(OBJECT_ID(QUOTENAME(c.table_schema) + '.' + QUOTENAME(c.table_name)), c.column_name, 'ColumnId')
|
||||||
|
AND ep.class = 1
|
||||||
|
AND ep.name = 'MS_Description'
|
||||||
|
WHERE c.table_schema = ?
|
||||||
|
ORDER BY c.table_schema, c.table_name, c.ordinal_position
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
columnsMap := make(map[string]map[string]*models.Column)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var schema, tableName, columnName, isNullable, dataType, description string
|
||||||
|
var ordinalPosition int
|
||||||
|
var columnDefault, charMaxLength, numPrecision, numScale, isIdentity *int
|
||||||
|
|
||||||
|
if err := rows.Scan(&schema, &tableName, &columnName, &ordinalPosition, &columnDefault, &isNullable, &dataType, &charMaxLength, &numPrecision, &numScale, &description, &isIdentity); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
column := models.InitColumn(columnName, tableName, schema)
|
||||||
|
column.Type = r.mapDataType(dataType)
|
||||||
|
column.NotNull = (isNullable == "NO")
|
||||||
|
column.Sequence = uint(ordinalPosition)
|
||||||
|
|
||||||
|
if description != "" {
|
||||||
|
column.Description = description
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is an identity column (auto-increment)
|
||||||
|
if isIdentity != nil && *isIdentity == 1 {
|
||||||
|
column.AutoIncrement = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if charMaxLength != nil && *charMaxLength > 0 {
|
||||||
|
column.Length = *charMaxLength
|
||||||
|
}
|
||||||
|
|
||||||
|
if numPrecision != nil && *numPrecision > 0 {
|
||||||
|
column.Precision = *numPrecision
|
||||||
|
}
|
||||||
|
|
||||||
|
if numScale != nil && *numScale > 0 {
|
||||||
|
column.Scale = *numScale
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create table key
|
||||||
|
tableKey := schema + "." + tableName
|
||||||
|
if columnsMap[tableKey] == nil {
|
||||||
|
columnsMap[tableKey] = make(map[string]*models.Column)
|
||||||
|
}
|
||||||
|
columnsMap[tableKey][columnName] = column
|
||||||
|
}
|
||||||
|
|
||||||
|
return columnsMap, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryPrimaryKeys retrieves all primary key constraints for a schema
|
||||||
|
// Returns map[schema.table]*Constraint
|
||||||
|
func (r *Reader) queryPrimaryKeys(schemaName string) (map[string]*models.Constraint, error) {
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
s.name as schema_name,
|
||||||
|
t.name as table_name,
|
||||||
|
i.name as constraint_name,
|
||||||
|
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY ic.key_ordinal) as columns
|
||||||
|
FROM sys.tables t
|
||||||
|
INNER JOIN sys.indexes i ON t.object_id = i.object_id AND i.is_primary_key = 1
|
||||||
|
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
|
||||||
|
INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
|
||||||
|
INNER JOIN sys.columns c ON t.object_id = c.object_id AND ic.column_id = c.column_id
|
||||||
|
WHERE s.name = ?
|
||||||
|
GROUP BY s.name, t.name, i.name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
primaryKeys := make(map[string]*models.Constraint)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var schema, tableName, constraintName, columnsStr string
|
||||||
|
|
||||||
|
if err := rows.Scan(&schema, &tableName, &constraintName, &columnsStr); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
columns := strings.Split(columnsStr, ",")
|
||||||
|
|
||||||
|
constraint := models.InitConstraint(constraintName, models.PrimaryKeyConstraint)
|
||||||
|
constraint.Schema = schema
|
||||||
|
constraint.Table = tableName
|
||||||
|
constraint.Columns = columns
|
||||||
|
|
||||||
|
tableKey := schema + "." + tableName
|
||||||
|
primaryKeys[tableKey] = constraint
|
||||||
|
}
|
||||||
|
|
||||||
|
return primaryKeys, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryForeignKeys retrieves all foreign key constraints for a schema
|
||||||
|
// Returns map[schema.table][]*Constraint
|
||||||
|
func (r *Reader) queryForeignKeys(schemaName string) (map[string][]*models.Constraint, error) {
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
s.name as schema_name,
|
||||||
|
t.name as table_name,
|
||||||
|
fk.name as constraint_name,
|
||||||
|
rs.name as referenced_schema,
|
||||||
|
rt.name as referenced_table,
|
||||||
|
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY fkc.constraint_column_id) as columns,
|
||||||
|
STRING_AGG(rc.name, ',') WITHIN GROUP (ORDER BY fkc.constraint_column_id) as referenced_columns,
|
||||||
|
fk.delete_referential_action_desc,
|
||||||
|
fk.update_referential_action_desc
|
||||||
|
FROM sys.foreign_keys fk
|
||||||
|
INNER JOIN sys.tables t ON fk.parent_object_id = t.object_id
|
||||||
|
INNER JOIN sys.tables rt ON fk.referenced_object_id = rt.object_id
|
||||||
|
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
|
||||||
|
INNER JOIN sys.schemas rs ON rt.schema_id = rs.schema_id
|
||||||
|
INNER JOIN sys.foreign_key_columns fkc ON fk.object_id = fkc.constraint_object_id
|
||||||
|
INNER JOIN sys.columns c ON fkc.parent_object_id = c.object_id AND fkc.parent_column_id = c.column_id
|
||||||
|
INNER JOIN sys.columns rc ON fkc.referenced_object_id = rc.object_id AND fkc.referenced_column_id = rc.column_id
|
||||||
|
WHERE s.name = ?
|
||||||
|
GROUP BY s.name, t.name, fk.name, rs.name, rt.name, fk.delete_referential_action_desc, fk.update_referential_action_desc
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
foreignKeys := make(map[string][]*models.Constraint)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var schema, tableName, constraintName, refSchema, refTable, columnsStr, refColumnsStr, deleteAction, updateAction string
|
||||||
|
|
||||||
|
if err := rows.Scan(&schema, &tableName, &constraintName, &refSchema, &refTable, &columnsStr, &refColumnsStr, &deleteAction, &updateAction); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
columns := strings.Split(columnsStr, ",")
|
||||||
|
refColumns := strings.Split(refColumnsStr, ",")
|
||||||
|
|
||||||
|
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
|
||||||
|
constraint.Schema = schema
|
||||||
|
constraint.Table = tableName
|
||||||
|
constraint.Columns = columns
|
||||||
|
constraint.ReferencedSchema = refSchema
|
||||||
|
constraint.ReferencedTable = refTable
|
||||||
|
constraint.ReferencedColumns = refColumns
|
||||||
|
constraint.OnDelete = strings.ToUpper(deleteAction)
|
||||||
|
constraint.OnUpdate = strings.ToUpper(updateAction)
|
||||||
|
|
||||||
|
tableKey := schema + "." + tableName
|
||||||
|
foreignKeys[tableKey] = append(foreignKeys[tableKey], constraint)
|
||||||
|
}
|
||||||
|
|
||||||
|
return foreignKeys, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryUniqueConstraints retrieves all unique constraints for a schema
|
||||||
|
// Returns map[schema.table][]*Constraint
|
||||||
|
func (r *Reader) queryUniqueConstraints(schemaName string) (map[string][]*models.Constraint, error) {
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
s.name as schema_name,
|
||||||
|
t.name as table_name,
|
||||||
|
i.name as constraint_name,
|
||||||
|
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY ic.key_ordinal) as columns
|
||||||
|
FROM sys.tables t
|
||||||
|
INNER JOIN sys.indexes i ON t.object_id = i.object_id AND i.is_unique = 1 AND i.is_primary_key = 0
|
||||||
|
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
|
||||||
|
INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
|
||||||
|
INNER JOIN sys.columns c ON t.object_id = c.object_id AND ic.column_id = c.column_id
|
||||||
|
WHERE s.name = ?
|
||||||
|
GROUP BY s.name, t.name, i.name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
uniqueConstraints := make(map[string][]*models.Constraint)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var schema, tableName, constraintName, columnsStr string
|
||||||
|
|
||||||
|
if err := rows.Scan(&schema, &tableName, &constraintName, &columnsStr); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
columns := strings.Split(columnsStr, ",")
|
||||||
|
|
||||||
|
constraint := models.InitConstraint(constraintName, models.UniqueConstraint)
|
||||||
|
constraint.Schema = schema
|
||||||
|
constraint.Table = tableName
|
||||||
|
constraint.Columns = columns
|
||||||
|
|
||||||
|
tableKey := schema + "." + tableName
|
||||||
|
uniqueConstraints[tableKey] = append(uniqueConstraints[tableKey], constraint)
|
||||||
|
}
|
||||||
|
|
||||||
|
return uniqueConstraints, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryCheckConstraints retrieves all check constraints for a schema
|
||||||
|
// Returns map[schema.table][]*Constraint
|
||||||
|
func (r *Reader) queryCheckConstraints(schemaName string) (map[string][]*models.Constraint, error) {
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
s.name as schema_name,
|
||||||
|
t.name as table_name,
|
||||||
|
cc.name as constraint_name,
|
||||||
|
cc.definition
|
||||||
|
FROM sys.tables t
|
||||||
|
INNER JOIN sys.check_constraints cc ON t.object_id = cc.parent_object_id
|
||||||
|
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
|
||||||
|
WHERE s.name = ?
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
checkConstraints := make(map[string][]*models.Constraint)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var schema, tableName, constraintName, definition string
|
||||||
|
|
||||||
|
if err := rows.Scan(&schema, &tableName, &constraintName, &definition); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
constraint := models.InitConstraint(constraintName, models.CheckConstraint)
|
||||||
|
constraint.Schema = schema
|
||||||
|
constraint.Table = tableName
|
||||||
|
constraint.Expression = definition
|
||||||
|
|
||||||
|
tableKey := schema + "." + tableName
|
||||||
|
checkConstraints[tableKey] = append(checkConstraints[tableKey], constraint)
|
||||||
|
}
|
||||||
|
|
||||||
|
return checkConstraints, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryIndexes retrieves all indexes for a schema
|
||||||
|
// Returns map[schema.table][]*Index
|
||||||
|
func (r *Reader) queryIndexes(schemaName string) (map[string][]*models.Index, error) {
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
s.name as schema_name,
|
||||||
|
t.name as table_name,
|
||||||
|
i.name as index_name,
|
||||||
|
i.is_unique,
|
||||||
|
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY ic.key_ordinal) as columns
|
||||||
|
FROM sys.tables t
|
||||||
|
INNER JOIN sys.indexes i ON t.object_id = i.object_id AND i.is_primary_key = 0 AND i.name IS NOT NULL
|
||||||
|
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
|
||||||
|
INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
|
||||||
|
INNER JOIN sys.columns c ON t.object_id = c.object_id AND ic.column_id = c.column_id
|
||||||
|
WHERE s.name = ?
|
||||||
|
GROUP BY s.name, t.name, i.name, i.is_unique
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
indexes := make(map[string][]*models.Index)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var schema, tableName, indexName, columnsStr string
|
||||||
|
var isUnique int
|
||||||
|
|
||||||
|
if err := rows.Scan(&schema, &tableName, &indexName, &isUnique, &columnsStr); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
columns := strings.Split(columnsStr, ",")
|
||||||
|
|
||||||
|
index := models.InitIndex(indexName, tableName, schema)
|
||||||
|
index.Columns = columns
|
||||||
|
index.Unique = (isUnique == 1)
|
||||||
|
index.Type = "btree" // MSSQL uses btree by default
|
||||||
|
|
||||||
|
tableKey := schema + "." + tableName
|
||||||
|
indexes[tableKey] = append(indexes[tableKey], index)
|
||||||
|
}
|
||||||
|
|
||||||
|
return indexes, rows.Err()
|
||||||
|
}
|
||||||
266
pkg/readers/mssql/reader.go
Normal file
266
pkg/readers/mssql/reader.go
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
package mssql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
_ "github.com/microsoft/go-mssqldb" // MSSQL driver
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/mssql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reader implements the readers.Reader interface for MSSQL databases
|
||||||
|
type Reader struct {
|
||||||
|
options *readers.ReaderOptions
|
||||||
|
db *sql.DB
|
||||||
|
ctx context.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader creates a new MSSQL reader
|
||||||
|
func NewReader(options *readers.ReaderOptions) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
options: options,
|
||||||
|
ctx: context.Background(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDatabase reads the entire database schema from MSSQL
|
||||||
|
func (r *Reader) ReadDatabase() (*models.Database, error) {
|
||||||
|
// Validate connection string
|
||||||
|
if r.options.ConnectionString == "" {
|
||||||
|
return nil, fmt.Errorf("connection string is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to the database
|
||||||
|
if err := r.connect(); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to connect: %w", err)
|
||||||
|
}
|
||||||
|
defer r.close()
|
||||||
|
|
||||||
|
// Get database name
|
||||||
|
var dbName string
|
||||||
|
err := r.db.QueryRowContext(r.ctx, "SELECT DB_NAME()").Scan(&dbName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get database name: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize database model
|
||||||
|
db := models.InitDatabase(dbName)
|
||||||
|
db.DatabaseType = models.MSSQLDatabaseType
|
||||||
|
db.SourceFormat = "mssql"
|
||||||
|
|
||||||
|
// Get MSSQL version
|
||||||
|
var version string
|
||||||
|
err = r.db.QueryRowContext(r.ctx, "SELECT @@VERSION").Scan(&version)
|
||||||
|
if err == nil {
|
||||||
|
db.DatabaseVersion = version
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query all schemas
|
||||||
|
schemas, err := r.querySchemas()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query schemas: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each schema
|
||||||
|
for _, schema := range schemas {
|
||||||
|
// Query tables for this schema
|
||||||
|
tables, err := r.queryTables(schema.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query tables for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
schema.Tables = tables
|
||||||
|
|
||||||
|
// Query columns for tables
|
||||||
|
columnsMap, err := r.queryColumns(schema.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query columns for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate table columns
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
if cols, exists := columnsMap[tableKey]; exists {
|
||||||
|
table.Columns = cols
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query primary keys
|
||||||
|
primaryKeys, err := r.queryPrimaryKeys(schema.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query primary keys for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply primary keys to tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
if pk, exists := primaryKeys[tableKey]; exists {
|
||||||
|
table.Constraints[pk.Name] = pk
|
||||||
|
// Mark columns as primary key and not null
|
||||||
|
for _, colName := range pk.Columns {
|
||||||
|
if col, colExists := table.Columns[colName]; colExists {
|
||||||
|
col.IsPrimaryKey = true
|
||||||
|
col.NotNull = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query foreign keys
|
||||||
|
foreignKeys, err := r.queryForeignKeys(schema.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query foreign keys for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply foreign keys to tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
if fks, exists := foreignKeys[tableKey]; exists {
|
||||||
|
for _, fk := range fks {
|
||||||
|
table.Constraints[fk.Name] = fk
|
||||||
|
// Derive relationship from foreign key
|
||||||
|
r.deriveRelationship(table, fk)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query unique constraints
|
||||||
|
uniqueConstraints, err := r.queryUniqueConstraints(schema.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query unique constraints for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply unique constraints to tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
if ucs, exists := uniqueConstraints[tableKey]; exists {
|
||||||
|
for _, uc := range ucs {
|
||||||
|
table.Constraints[uc.Name] = uc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query check constraints
|
||||||
|
checkConstraints, err := r.queryCheckConstraints(schema.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query check constraints for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply check constraints to tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
if ccs, exists := checkConstraints[tableKey]; exists {
|
||||||
|
for _, cc := range ccs {
|
||||||
|
table.Constraints[cc.Name] = cc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query indexes
|
||||||
|
indexes, err := r.queryIndexes(schema.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query indexes for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply indexes to tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
tableKey := schema.Name + "." + table.Name
|
||||||
|
if idxs, exists := indexes[tableKey]; exists {
|
||||||
|
for _, idx := range idxs {
|
||||||
|
table.Indexes[idx.Name] = idx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set RefDatabase for schema
|
||||||
|
schema.RefDatabase = db
|
||||||
|
|
||||||
|
// Set RefSchema for tables
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
table.RefSchema = schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add schema to database
|
||||||
|
db.Schemas = append(db.Schemas, schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadSchema reads a single schema (returns the first schema from the database)
|
||||||
|
func (r *Reader) ReadSchema() (*models.Schema, error) {
|
||||||
|
db, err := r.ReadDatabase()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(db.Schemas) == 0 {
|
||||||
|
return nil, fmt.Errorf("no schemas found in database")
|
||||||
|
}
|
||||||
|
return db.Schemas[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTable reads a single table (returns the first table from the first schema)
|
||||||
|
func (r *Reader) ReadTable() (*models.Table, error) {
|
||||||
|
schema, err := r.ReadSchema()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(schema.Tables) == 0 {
|
||||||
|
return nil, fmt.Errorf("no tables found in schema")
|
||||||
|
}
|
||||||
|
return schema.Tables[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// connect establishes a connection to the MSSQL database
|
||||||
|
func (r *Reader) connect() error {
|
||||||
|
db, err := sql.Open("mssql", r.options.ConnectionString)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test connection
|
||||||
|
if err = db.PingContext(r.ctx); err != nil {
|
||||||
|
db.Close()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
r.db = db
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// close closes the database connection
|
||||||
|
func (r *Reader) close() {
|
||||||
|
if r.db != nil {
|
||||||
|
r.db.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mapDataType maps MSSQL data types to canonical types
|
||||||
|
func (r *Reader) mapDataType(mssqlType string) string {
|
||||||
|
return mssql.ConvertMSSQLToCanonical(mssqlType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// deriveRelationship creates a relationship from a foreign key constraint
|
||||||
|
func (r *Reader) deriveRelationship(table *models.Table, fk *models.Constraint) {
|
||||||
|
relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable)
|
||||||
|
|
||||||
|
relationship := models.InitRelationship(relationshipName, models.OneToMany)
|
||||||
|
relationship.FromTable = table.Name
|
||||||
|
relationship.FromSchema = table.Schema
|
||||||
|
relationship.ToTable = fk.ReferencedTable
|
||||||
|
relationship.ToSchema = fk.ReferencedSchema
|
||||||
|
relationship.ForeignKey = fk.Name
|
||||||
|
|
||||||
|
// Store constraint actions in properties
|
||||||
|
if fk.OnDelete != "" {
|
||||||
|
relationship.Properties["on_delete"] = fk.OnDelete
|
||||||
|
}
|
||||||
|
if fk.OnUpdate != "" {
|
||||||
|
relationship.Properties["on_update"] = fk.OnUpdate
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Relationships[relationshipName] = relationship
|
||||||
|
}
|
||||||
86
pkg/readers/mssql/reader_test.go
Normal file
86
pkg/readers/mssql/reader_test.go
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
package mssql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/mssql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/readers"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestMapDataType tests MSSQL type mapping to canonical types
|
||||||
|
func TestMapDataType(t *testing.T) {
|
||||||
|
reader := NewReader(&readers.ReaderOptions{})
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
mssqlType string
|
||||||
|
expectedType string
|
||||||
|
}{
|
||||||
|
{"INT to int", "INT", "int"},
|
||||||
|
{"BIGINT to int64", "BIGINT", "int64"},
|
||||||
|
{"BIT to bool", "BIT", "bool"},
|
||||||
|
{"NVARCHAR to string", "NVARCHAR(255)", "string"},
|
||||||
|
{"DATETIME2 to timestamp", "DATETIME2", "timestamp"},
|
||||||
|
{"DATETIMEOFFSET to timestamptz", "DATETIMEOFFSET", "timestamptz"},
|
||||||
|
{"UNIQUEIDENTIFIER to uuid", "UNIQUEIDENTIFIER", "uuid"},
|
||||||
|
{"FLOAT to float64", "FLOAT", "float64"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := reader.mapDataType(tt.mssqlType)
|
||||||
|
assert.Equal(t, tt.expectedType, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestConvertCanonicalToMSSQL tests canonical to MSSQL type conversion
|
||||||
|
func TestConvertCanonicalToMSSQL(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
canonicalType string
|
||||||
|
expectedMSSQL string
|
||||||
|
}{
|
||||||
|
{"int to INT", "int", "INT"},
|
||||||
|
{"int64 to BIGINT", "int64", "BIGINT"},
|
||||||
|
{"bool to BIT", "bool", "BIT"},
|
||||||
|
{"string to NVARCHAR(255)", "string", "NVARCHAR(255)"},
|
||||||
|
{"text to NVARCHAR(MAX)", "text", "NVARCHAR(MAX)"},
|
||||||
|
{"timestamp to DATETIME2", "timestamp", "DATETIME2"},
|
||||||
|
{"timestamptz to DATETIMEOFFSET", "timestamptz", "DATETIMEOFFSET"},
|
||||||
|
{"uuid to UNIQUEIDENTIFIER", "uuid", "UNIQUEIDENTIFIER"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := mssql.ConvertCanonicalToMSSQL(tt.canonicalType)
|
||||||
|
assert.Equal(t, tt.expectedMSSQL, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestConvertMSSQLToCanonical tests MSSQL to canonical type conversion
|
||||||
|
func TestConvertMSSQLToCanonical(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
mssqlType string
|
||||||
|
expectedType string
|
||||||
|
}{
|
||||||
|
{"INT to int", "INT", "int"},
|
||||||
|
{"BIGINT to int64", "BIGINT", "int64"},
|
||||||
|
{"BIT to bool", "BIT", "bool"},
|
||||||
|
{"NVARCHAR with params", "NVARCHAR(255)", "string"},
|
||||||
|
{"DATETIME2 to timestamp", "DATETIME2", "timestamp"},
|
||||||
|
{"DATETIMEOFFSET to timestamptz", "DATETIMEOFFSET", "timestamptz"},
|
||||||
|
{"UNIQUEIDENTIFIER to uuid", "UNIQUEIDENTIFIER", "uuid"},
|
||||||
|
{"VARBINARY to bytea", "VARBINARY(MAX)", "bytea"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := mssql.ConvertMSSQLToCanonical(tt.mssqlType)
|
||||||
|
assert.Equal(t, tt.expectedType, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
130
pkg/writers/mssql/README.md
Normal file
130
pkg/writers/mssql/README.md
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
# MSSQL Writer
|
||||||
|
|
||||||
|
Generates Microsoft SQL Server DDL (Data Definition Language) from database schema models.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **DDL Generation**: Generates complete SQL scripts for creating MSSQL schema
|
||||||
|
- **Schema Support**: Creates multiple schemas with proper naming
|
||||||
|
- **Bracket Notation**: Uses [schema].[table] bracket notation for identifiers
|
||||||
|
- **Identity Columns**: Generates IDENTITY(1,1) for auto-increment columns
|
||||||
|
- **Constraints**: Generates primary keys, foreign keys, unique, and check constraints
|
||||||
|
- **Indexes**: Creates indexes with unique support
|
||||||
|
- **Extended Properties**: Uses sp_addextendedproperty for comments
|
||||||
|
- **Direct Execution**: Can directly execute DDL on MSSQL database
|
||||||
|
- **Schema Flattening**: Optional schema flattening for compatibility
|
||||||
|
|
||||||
|
## Features by Phase
|
||||||
|
|
||||||
|
1. **Phase 1**: Create schemas
|
||||||
|
2. **Phase 2**: Create tables with columns, identity, and defaults
|
||||||
|
3. **Phase 3**: Add primary key constraints
|
||||||
|
4. **Phase 4**: Create indexes
|
||||||
|
5. **Phase 5**: Add unique constraints
|
||||||
|
6. **Phase 6**: Add check constraints
|
||||||
|
7. **Phase 7**: Add foreign key constraints
|
||||||
|
8. **Phase 8**: Add extended properties (comments)
|
||||||
|
|
||||||
|
## Type Mappings
|
||||||
|
|
||||||
|
| Canonical Type | MSSQL Type |
|
||||||
|
|----------------|-----------|
|
||||||
|
| int | INT |
|
||||||
|
| int64 | BIGINT |
|
||||||
|
| int16 | SMALLINT |
|
||||||
|
| int8 | TINYINT |
|
||||||
|
| bool | BIT |
|
||||||
|
| float32 | REAL |
|
||||||
|
| float64 | FLOAT |
|
||||||
|
| decimal | NUMERIC |
|
||||||
|
| string | NVARCHAR(255) |
|
||||||
|
| text | NVARCHAR(MAX) |
|
||||||
|
| timestamp | DATETIME2 |
|
||||||
|
| timestamptz | DATETIMEOFFSET |
|
||||||
|
| uuid | UNIQUEIDENTIFIER |
|
||||||
|
| bytea | VARBINARY(MAX) |
|
||||||
|
| date | DATE |
|
||||||
|
| time | TIME |
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Generate SQL File
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/writers/mssql"
|
||||||
|
import "git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
|
||||||
|
writer := mssql.NewWriter(&writers.WriterOptions{
|
||||||
|
OutputPath: "schema.sql",
|
||||||
|
FlattenSchema: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Direct Database Execution
|
||||||
|
|
||||||
|
```go
|
||||||
|
writer := mssql.NewWriter(&writers.WriterOptions{
|
||||||
|
OutputPath: "",
|
||||||
|
Metadata: map[string]interface{}{
|
||||||
|
"connection_string": "sqlserver://sa:password@localhost/newdb",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
err := writer.WriteDatabase(db)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### CLI Usage
|
||||||
|
|
||||||
|
Generate SQL file:
|
||||||
|
```bash
|
||||||
|
relspec convert --from json --from-path schema.json \
|
||||||
|
--to mssql --to-path schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
Execute directly to database:
|
||||||
|
```bash
|
||||||
|
relspec convert --from json --from-path schema.json \
|
||||||
|
--to mssql \
|
||||||
|
--metadata '{"connection_string":"sqlserver://sa:password@localhost/mydb"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Default Values
|
||||||
|
|
||||||
|
The writer supports several default value patterns:
|
||||||
|
- Functions: `GETDATE()`, `CURRENT_TIMESTAMP()`
|
||||||
|
- Literals: strings wrapped in quotes, numbers, booleans (0/1 for BIT)
|
||||||
|
- CAST expressions
|
||||||
|
|
||||||
|
## Comments/Extended Properties
|
||||||
|
|
||||||
|
Table and column descriptions are stored as MS_Description extended properties:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = 'MS_Description',
|
||||||
|
@value = 'Table description here',
|
||||||
|
@level0type = 'SCHEMA', @level0name = 'dbo',
|
||||||
|
@level1type = 'TABLE', @level1name = 'my_table';
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run tests with:
|
||||||
|
```bash
|
||||||
|
go test ./pkg/writers/mssql/...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
- Views are not currently supported in the writer
|
||||||
|
- Sequences are not supported (MSSQL uses IDENTITY instead)
|
||||||
|
- Partitioning and advanced features are not supported
|
||||||
|
- Generated DDL assumes no triggers or computed columns
|
||||||
579
pkg/writers/mssql/writer.go
Normal file
579
pkg/writers/mssql/writer.go
Normal file
@@ -0,0 +1,579 @@
|
|||||||
|
package mssql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
_ "github.com/microsoft/go-mssqldb" // MSSQL driver
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/mssql"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Writer implements the Writer interface for MSSQL SQL output
|
||||||
|
type Writer struct {
|
||||||
|
options *writers.WriterOptions
|
||||||
|
writer io.Writer
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWriter creates a new MSSQL SQL writer
|
||||||
|
func NewWriter(options *writers.WriterOptions) *Writer {
|
||||||
|
return &Writer{
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// qualTable returns a schema-qualified name using bracket notation
|
||||||
|
func (w *Writer) qualTable(schema, name string) string {
|
||||||
|
if w.options.FlattenSchema {
|
||||||
|
return fmt.Sprintf("[%s]", name)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("[%s].[%s]", schema, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteDatabase writes the entire database schema as SQL
|
||||||
|
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||||
|
// Check if we should execute SQL directly on a database
|
||||||
|
if connString, ok := w.options.Metadata["connection_string"].(string); ok && connString != "" {
|
||||||
|
return w.executeDatabaseSQL(db, connString)
|
||||||
|
}
|
||||||
|
|
||||||
|
var writer io.Writer
|
||||||
|
var file *os.File
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Use existing writer if already set (for testing)
|
||||||
|
if w.writer != nil {
|
||||||
|
writer = w.writer
|
||||||
|
} else if w.options.OutputPath != "" {
|
||||||
|
// Determine output destination
|
||||||
|
file, err = os.Create(w.options.OutputPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create output file: %w", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
writer = file
|
||||||
|
} else {
|
||||||
|
writer = os.Stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
w.writer = writer
|
||||||
|
|
||||||
|
// Write header comment
|
||||||
|
fmt.Fprintf(w.writer, "-- MSSQL Database Schema\n")
|
||||||
|
fmt.Fprintf(w.writer, "-- Database: %s\n", db.Name)
|
||||||
|
fmt.Fprintf(w.writer, "-- Generated by RelSpec\n\n")
|
||||||
|
|
||||||
|
// Process each schema in the database
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if err := w.WriteSchema(schema); err != nil {
|
||||||
|
return fmt.Errorf("failed to write schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteSchema writes a single schema and all its tables
|
||||||
|
func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||||
|
if w.writer == nil {
|
||||||
|
w.writer = os.Stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 1: Create schema (skip dbo schema and when flattening)
|
||||||
|
if schema.Name != "dbo" && !w.options.FlattenSchema {
|
||||||
|
fmt.Fprintf(w.writer, "-- Schema: %s\n", schema.Name)
|
||||||
|
fmt.Fprintf(w.writer, "CREATE SCHEMA [%s];\n\n", schema.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 2: Create tables with columns
|
||||||
|
fmt.Fprintf(w.writer, "-- Tables for schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeCreateTable(schema, table); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 3: Primary keys
|
||||||
|
fmt.Fprintf(w.writer, "-- Primary keys for schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writePrimaryKey(schema, table); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 4: Indexes
|
||||||
|
fmt.Fprintf(w.writer, "-- Indexes for schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeIndexes(schema, table); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 5: Unique constraints
|
||||||
|
fmt.Fprintf(w.writer, "-- Unique constraints for schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeUniqueConstraints(schema, table); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 6: Check constraints
|
||||||
|
fmt.Fprintf(w.writer, "-- Check constraints for schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeCheckConstraints(schema, table); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 7: Foreign keys
|
||||||
|
fmt.Fprintf(w.writer, "-- Foreign keys for schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeForeignKeys(schema, table); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 8: Comments
|
||||||
|
fmt.Fprintf(w.writer, "-- Comments for schema: %s\n", schema.Name)
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
if err := w.writeComments(schema, table); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTable writes a single table with all its elements
|
||||||
|
func (w *Writer) WriteTable(table *models.Table) error {
|
||||||
|
if w.writer == nil {
|
||||||
|
w.writer = os.Stdout
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a temporary schema with just this table
|
||||||
|
schema := models.InitSchema(table.Schema)
|
||||||
|
schema.Tables = append(schema.Tables, table)
|
||||||
|
|
||||||
|
return w.WriteSchema(schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeCreateTable generates CREATE TABLE statement
|
||||||
|
func (w *Writer) writeCreateTable(schema *models.Schema, table *models.Table) error {
|
||||||
|
fmt.Fprintf(w.writer, "CREATE TABLE %s (\n", w.qualTable(schema.Name, table.Name))
|
||||||
|
|
||||||
|
// Sort columns by sequence
|
||||||
|
columns := getSortedColumns(table.Columns)
|
||||||
|
columnDefs := make([]string, 0, len(columns))
|
||||||
|
|
||||||
|
for _, col := range columns {
|
||||||
|
def := w.generateColumnDefinition(col)
|
||||||
|
columnDefs = append(columnDefs, " "+def)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w.writer, "%s\n", strings.Join(columnDefs, ",\n"))
|
||||||
|
fmt.Fprintf(w.writer, ");\n\n")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateColumnDefinition generates MSSQL column definition
|
||||||
|
func (w *Writer) generateColumnDefinition(col *models.Column) string {
|
||||||
|
parts := []string{fmt.Sprintf("[%s]", col.Name)}
|
||||||
|
|
||||||
|
// Type with length/precision
|
||||||
|
baseType := mssql.ConvertCanonicalToMSSQL(col.Type)
|
||||||
|
typeStr := baseType
|
||||||
|
|
||||||
|
// Handle specific type parameters for MSSQL
|
||||||
|
if col.Length > 0 && col.Precision == 0 {
|
||||||
|
// String types with length - override the default length from baseType
|
||||||
|
if strings.HasPrefix(baseType, "NVARCHAR") || strings.HasPrefix(baseType, "VARCHAR") ||
|
||||||
|
strings.HasPrefix(baseType, "CHAR") || strings.HasPrefix(baseType, "NCHAR") {
|
||||||
|
if col.Length > 0 && col.Length < 8000 {
|
||||||
|
// Extract base type without length specification
|
||||||
|
baseName := strings.Split(baseType, "(")[0]
|
||||||
|
typeStr = fmt.Sprintf("%s(%d)", baseName, col.Length)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if col.Precision > 0 {
|
||||||
|
// Numeric types with precision/scale
|
||||||
|
baseName := strings.Split(baseType, "(")[0]
|
||||||
|
if col.Scale > 0 {
|
||||||
|
typeStr = fmt.Sprintf("%s(%d,%d)", baseName, col.Precision, col.Scale)
|
||||||
|
} else {
|
||||||
|
typeStr = fmt.Sprintf("%s(%d)", baseName, col.Precision)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parts = append(parts, typeStr)
|
||||||
|
|
||||||
|
// IDENTITY for auto-increment
|
||||||
|
if col.AutoIncrement {
|
||||||
|
parts = append(parts, "IDENTITY(1,1)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOT NULL
|
||||||
|
if col.NotNull {
|
||||||
|
parts = append(parts, "NOT NULL")
|
||||||
|
}
|
||||||
|
|
||||||
|
// DEFAULT
|
||||||
|
if col.Default != nil {
|
||||||
|
switch v := col.Default.(type) {
|
||||||
|
case string:
|
||||||
|
cleanDefault := stripBackticks(v)
|
||||||
|
if strings.HasPrefix(strings.ToUpper(cleanDefault), "GETDATE") ||
|
||||||
|
strings.HasPrefix(strings.ToUpper(cleanDefault), "CURRENT_") {
|
||||||
|
parts = append(parts, fmt.Sprintf("DEFAULT %s", cleanDefault))
|
||||||
|
} else if cleanDefault == "true" || cleanDefault == "false" {
|
||||||
|
if cleanDefault == "true" {
|
||||||
|
parts = append(parts, "DEFAULT 1")
|
||||||
|
} else {
|
||||||
|
parts = append(parts, "DEFAULT 0")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
parts = append(parts, fmt.Sprintf("DEFAULT '%s'", escapeQuote(cleanDefault)))
|
||||||
|
}
|
||||||
|
case bool:
|
||||||
|
if v {
|
||||||
|
parts = append(parts, "DEFAULT 1")
|
||||||
|
} else {
|
||||||
|
parts = append(parts, "DEFAULT 0")
|
||||||
|
}
|
||||||
|
case int, int64:
|
||||||
|
parts = append(parts, fmt.Sprintf("DEFAULT %v", v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(parts, " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// writePrimaryKey generates ALTER TABLE statement for primary key
|
||||||
|
func (w *Writer) writePrimaryKey(schema *models.Schema, table *models.Table) error {
|
||||||
|
// Find primary key constraint
|
||||||
|
var pkConstraint *models.Constraint
|
||||||
|
for _, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.PrimaryKeyConstraint {
|
||||||
|
pkConstraint = constraint
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var columnNames []string
|
||||||
|
pkName := fmt.Sprintf("PK_%s_%s", schema.Name, table.Name)
|
||||||
|
|
||||||
|
if pkConstraint != nil {
|
||||||
|
pkName = pkConstraint.Name
|
||||||
|
columnNames = make([]string, 0, len(pkConstraint.Columns))
|
||||||
|
for _, colName := range pkConstraint.Columns {
|
||||||
|
columnNames = append(columnNames, fmt.Sprintf("[%s]", colName))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Check for columns with IsPrimaryKey = true
|
||||||
|
for _, col := range table.Columns {
|
||||||
|
if col.IsPrimaryKey {
|
||||||
|
columnNames = append(columnNames, fmt.Sprintf("[%s]", col.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Strings(columnNames)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(columnNames) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w.writer, "ALTER TABLE %s ADD CONSTRAINT [%s] PRIMARY KEY (%s);\n\n",
|
||||||
|
w.qualTable(schema.Name, table.Name), pkName, strings.Join(columnNames, ", "))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeIndexes generates CREATE INDEX statements
|
||||||
|
func (w *Writer) writeIndexes(schema *models.Schema, table *models.Table) error {
|
||||||
|
// Sort indexes by name
|
||||||
|
indexNames := make([]string, 0, len(table.Indexes))
|
||||||
|
for name := range table.Indexes {
|
||||||
|
indexNames = append(indexNames, name)
|
||||||
|
}
|
||||||
|
sort.Strings(indexNames)
|
||||||
|
|
||||||
|
for _, name := range indexNames {
|
||||||
|
index := table.Indexes[name]
|
||||||
|
|
||||||
|
// Skip if it's a primary key index
|
||||||
|
if strings.HasPrefix(strings.ToLower(index.Name), "pk_") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build column list
|
||||||
|
columnExprs := make([]string, 0, len(index.Columns))
|
||||||
|
for _, colName := range index.Columns {
|
||||||
|
columnExprs = append(columnExprs, fmt.Sprintf("[%s]", colName))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(columnExprs) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
unique := ""
|
||||||
|
if index.Unique {
|
||||||
|
unique = "UNIQUE "
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w.writer, "CREATE %sINDEX [%s] ON %s (%s);\n\n",
|
||||||
|
unique, index.Name, w.qualTable(schema.Name, table.Name), strings.Join(columnExprs, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeUniqueConstraints generates ALTER TABLE statements for unique constraints
|
||||||
|
func (w *Writer) writeUniqueConstraints(schema *models.Schema, table *models.Table) error {
|
||||||
|
// Sort constraints by name
|
||||||
|
constraintNames := make([]string, 0)
|
||||||
|
for name, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.UniqueConstraint {
|
||||||
|
constraintNames = append(constraintNames, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Strings(constraintNames)
|
||||||
|
|
||||||
|
for _, name := range constraintNames {
|
||||||
|
constraint := table.Constraints[name]
|
||||||
|
|
||||||
|
// Build column list
|
||||||
|
columnExprs := make([]string, 0, len(constraint.Columns))
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
columnExprs = append(columnExprs, fmt.Sprintf("[%s]", colName))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(columnExprs) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w.writer, "ALTER TABLE %s ADD CONSTRAINT [%s] UNIQUE (%s);\n\n",
|
||||||
|
w.qualTable(schema.Name, table.Name), constraint.Name, strings.Join(columnExprs, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeCheckConstraints generates ALTER TABLE statements for check constraints
|
||||||
|
func (w *Writer) writeCheckConstraints(schema *models.Schema, table *models.Table) error {
|
||||||
|
// Sort constraints by name
|
||||||
|
constraintNames := make([]string, 0)
|
||||||
|
for name, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.CheckConstraint {
|
||||||
|
constraintNames = append(constraintNames, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Strings(constraintNames)
|
||||||
|
|
||||||
|
for _, name := range constraintNames {
|
||||||
|
constraint := table.Constraints[name]
|
||||||
|
|
||||||
|
if constraint.Expression == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w.writer, "ALTER TABLE %s ADD CONSTRAINT [%s] CHECK (%s);\n\n",
|
||||||
|
w.qualTable(schema.Name, table.Name), constraint.Name, constraint.Expression)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeForeignKeys generates ALTER TABLE statements for foreign keys
|
||||||
|
func (w *Writer) writeForeignKeys(schema *models.Schema, table *models.Table) error {
|
||||||
|
// Process foreign key constraints
|
||||||
|
constraintNames := make([]string, 0)
|
||||||
|
for name, constraint := range table.Constraints {
|
||||||
|
if constraint.Type == models.ForeignKeyConstraint {
|
||||||
|
constraintNames = append(constraintNames, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Strings(constraintNames)
|
||||||
|
|
||||||
|
for _, name := range constraintNames {
|
||||||
|
constraint := table.Constraints[name]
|
||||||
|
|
||||||
|
// Build column lists
|
||||||
|
sourceColumns := make([]string, 0, len(constraint.Columns))
|
||||||
|
for _, colName := range constraint.Columns {
|
||||||
|
sourceColumns = append(sourceColumns, fmt.Sprintf("[%s]", colName))
|
||||||
|
}
|
||||||
|
|
||||||
|
targetColumns := make([]string, 0, len(constraint.ReferencedColumns))
|
||||||
|
for _, colName := range constraint.ReferencedColumns {
|
||||||
|
targetColumns = append(targetColumns, fmt.Sprintf("[%s]", colName))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(sourceColumns) == 0 || len(targetColumns) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
refSchema := constraint.ReferencedSchema
|
||||||
|
if refSchema == "" {
|
||||||
|
refSchema = schema.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
onDelete := "NO ACTION"
|
||||||
|
if constraint.OnDelete != "" {
|
||||||
|
onDelete = strings.ToUpper(constraint.OnDelete)
|
||||||
|
}
|
||||||
|
|
||||||
|
onUpdate := "NO ACTION"
|
||||||
|
if constraint.OnUpdate != "" {
|
||||||
|
onUpdate = strings.ToUpper(constraint.OnUpdate)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w.writer, "ALTER TABLE %s ADD CONSTRAINT [%s] FOREIGN KEY (%s)\n",
|
||||||
|
w.qualTable(schema.Name, table.Name), constraint.Name, strings.Join(sourceColumns, ", "))
|
||||||
|
fmt.Fprintf(w.writer, " REFERENCES %s (%s)\n",
|
||||||
|
w.qualTable(refSchema, constraint.ReferencedTable), strings.Join(targetColumns, ", "))
|
||||||
|
fmt.Fprintf(w.writer, " ON DELETE %s ON UPDATE %s;\n\n",
|
||||||
|
onDelete, onUpdate)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeComments generates EXEC sp_addextendedproperty statements for table and column descriptions
|
||||||
|
func (w *Writer) writeComments(schema *models.Schema, table *models.Table) error {
|
||||||
|
// Table comment
|
||||||
|
if table.Description != "" {
|
||||||
|
fmt.Fprintf(w.writer, "EXEC sp_addextendedproperty\n")
|
||||||
|
fmt.Fprintf(w.writer, " @name = 'MS_Description',\n")
|
||||||
|
fmt.Fprintf(w.writer, " @value = '%s',\n", escapeQuote(table.Description))
|
||||||
|
fmt.Fprintf(w.writer, " @level0type = 'SCHEMA', @level0name = '%s',\n", schema.Name)
|
||||||
|
fmt.Fprintf(w.writer, " @level1type = 'TABLE', @level1name = '%s';\n\n", table.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Column comments
|
||||||
|
for _, col := range getSortedColumns(table.Columns) {
|
||||||
|
if col.Description != "" {
|
||||||
|
fmt.Fprintf(w.writer, "EXEC sp_addextendedproperty\n")
|
||||||
|
fmt.Fprintf(w.writer, " @name = 'MS_Description',\n")
|
||||||
|
fmt.Fprintf(w.writer, " @value = '%s',\n", escapeQuote(col.Description))
|
||||||
|
fmt.Fprintf(w.writer, " @level0type = 'SCHEMA', @level0name = '%s',\n", schema.Name)
|
||||||
|
fmt.Fprintf(w.writer, " @level1type = 'TABLE', @level1name = '%s',\n", table.Name)
|
||||||
|
fmt.Fprintf(w.writer, " @level2type = 'COLUMN', @level2name = '%s';\n\n", col.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeDatabaseSQL executes SQL statements directly on an MSSQL database
|
||||||
|
func (w *Writer) executeDatabaseSQL(db *models.Database, connString string) error {
|
||||||
|
// Generate SQL statements
|
||||||
|
statements := []string{}
|
||||||
|
statements = append(statements, "-- MSSQL Database Schema")
|
||||||
|
statements = append(statements, fmt.Sprintf("-- Database: %s", db.Name))
|
||||||
|
statements = append(statements, "-- Generated by RelSpec")
|
||||||
|
|
||||||
|
for _, schema := range db.Schemas {
|
||||||
|
if err := w.generateSchemaStatements(schema, &statements); err != nil {
|
||||||
|
return fmt.Errorf("failed to generate statements for schema %s: %w", schema.Name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to database
|
||||||
|
dbConn, err := sql.Open("mssql", connString)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to connect to database: %w", err)
|
||||||
|
}
|
||||||
|
defer dbConn.Close()
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
if err = dbConn.PingContext(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to ping database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute statements
|
||||||
|
executedCount := 0
|
||||||
|
for i, stmt := range statements {
|
||||||
|
stmtTrimmed := strings.TrimSpace(stmt)
|
||||||
|
|
||||||
|
// Skip comments and empty statements
|
||||||
|
if strings.HasPrefix(stmtTrimmed, "--") || stmtTrimmed == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "Executing statement %d/%d...\n", i+1, len(statements))
|
||||||
|
|
||||||
|
_, execErr := dbConn.ExecContext(ctx, stmt)
|
||||||
|
if execErr != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "⚠ Warning: Statement failed: %v\n", execErr)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
executedCount++
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stderr, "✓ Successfully executed %d statements\n", executedCount)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateSchemaStatements generates SQL statements for a schema
|
||||||
|
func (w *Writer) generateSchemaStatements(schema *models.Schema, statements *[]string) error {
|
||||||
|
// Phase 1: Create schema
|
||||||
|
if schema.Name != "dbo" && !w.options.FlattenSchema {
|
||||||
|
*statements = append(*statements, fmt.Sprintf("-- Schema: %s", schema.Name))
|
||||||
|
*statements = append(*statements, fmt.Sprintf("CREATE SCHEMA [%s];", schema.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 2: Create tables
|
||||||
|
*statements = append(*statements, fmt.Sprintf("-- Tables for schema: %s", schema.Name))
|
||||||
|
for _, table := range schema.Tables {
|
||||||
|
createTableSQL := fmt.Sprintf("CREATE TABLE %s (", w.qualTable(schema.Name, table.Name))
|
||||||
|
columnDefs := make([]string, 0)
|
||||||
|
|
||||||
|
columns := getSortedColumns(table.Columns)
|
||||||
|
for _, col := range columns {
|
||||||
|
def := w.generateColumnDefinition(col)
|
||||||
|
columnDefs = append(columnDefs, " "+def)
|
||||||
|
}
|
||||||
|
|
||||||
|
createTableSQL += "\n" + strings.Join(columnDefs, ",\n") + "\n)"
|
||||||
|
*statements = append(*statements, createTableSQL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 3-7: Constraints and indexes will be added by WriteSchema logic
|
||||||
|
// For now, just create tables
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
|
||||||
|
// getSortedColumns returns columns sorted by sequence
|
||||||
|
func getSortedColumns(columns map[string]*models.Column) []*models.Column {
|
||||||
|
names := make([]string, 0, len(columns))
|
||||||
|
for name := range columns {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
sort.Strings(names)
|
||||||
|
|
||||||
|
sorted := make([]*models.Column, 0, len(columns))
|
||||||
|
for _, name := range names {
|
||||||
|
sorted = append(sorted, columns[name])
|
||||||
|
}
|
||||||
|
return sorted
|
||||||
|
}
|
||||||
|
|
||||||
|
// escapeQuote escapes single quotes in strings for SQL
|
||||||
|
func escapeQuote(s string) string {
|
||||||
|
return strings.ReplaceAll(s, "'", "''")
|
||||||
|
}
|
||||||
|
|
||||||
|
// stripBackticks removes backticks from SQL expressions
|
||||||
|
func stripBackticks(s string) string {
|
||||||
|
return strings.ReplaceAll(s, "`", "")
|
||||||
|
}
|
||||||
205
pkg/writers/mssql/writer_test.go
Normal file
205
pkg/writers/mssql/writer_test.go
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
package mssql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/models"
|
||||||
|
"git.warky.dev/wdevs/relspecgo/pkg/writers"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestGenerateColumnDefinition tests column definition generation
|
||||||
|
func TestGenerateColumnDefinition(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{})
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
column *models.Column
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "INT NOT NULL",
|
||||||
|
column: &models.Column{
|
||||||
|
Name: "id",
|
||||||
|
Type: "int",
|
||||||
|
NotNull: true,
|
||||||
|
Sequence: 1,
|
||||||
|
},
|
||||||
|
expected: "[id] INT NOT NULL",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "VARCHAR with length",
|
||||||
|
column: &models.Column{
|
||||||
|
Name: "name",
|
||||||
|
Type: "string",
|
||||||
|
Length: 100,
|
||||||
|
NotNull: true,
|
||||||
|
Sequence: 2,
|
||||||
|
},
|
||||||
|
expected: "[name] NVARCHAR(100) NOT NULL",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "DATETIME2 with default",
|
||||||
|
column: &models.Column{
|
||||||
|
Name: "created_at",
|
||||||
|
Type: "timestamp",
|
||||||
|
NotNull: true,
|
||||||
|
Default: "GETDATE()",
|
||||||
|
Sequence: 3,
|
||||||
|
},
|
||||||
|
expected: "[created_at] DATETIME2 NOT NULL DEFAULT GETDATE()",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "IDENTITY column",
|
||||||
|
column: &models.Column{
|
||||||
|
Name: "id",
|
||||||
|
Type: "int",
|
||||||
|
AutoIncrement: true,
|
||||||
|
NotNull: true,
|
||||||
|
Sequence: 1,
|
||||||
|
},
|
||||||
|
expected: "[id] INT IDENTITY(1,1) NOT NULL",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := writer.generateColumnDefinition(tt.column)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestWriteCreateTable tests CREATE TABLE statement generation
|
||||||
|
func TestWriteCreateTable(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{})
|
||||||
|
|
||||||
|
// Create a test schema with a table
|
||||||
|
schema := models.InitSchema("dbo")
|
||||||
|
table := models.InitTable("users", "dbo")
|
||||||
|
|
||||||
|
col1 := models.InitColumn("id", "users", "dbo")
|
||||||
|
col1.Type = "int"
|
||||||
|
col1.AutoIncrement = true
|
||||||
|
col1.NotNull = true
|
||||||
|
col1.Sequence = 1
|
||||||
|
|
||||||
|
col2 := models.InitColumn("email", "users", "dbo")
|
||||||
|
col2.Type = "string"
|
||||||
|
col2.Length = 255
|
||||||
|
col2.NotNull = true
|
||||||
|
col2.Sequence = 2
|
||||||
|
|
||||||
|
table.Columns["id"] = col1
|
||||||
|
table.Columns["email"] = col2
|
||||||
|
|
||||||
|
// Write to buffer
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
writer.writer = buf
|
||||||
|
|
||||||
|
err := writer.writeCreateTable(schema, table)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
assert.Contains(t, output, "CREATE TABLE [dbo].[users]")
|
||||||
|
assert.Contains(t, output, "[id] INT IDENTITY(1,1) NOT NULL")
|
||||||
|
assert.Contains(t, output, "[email] NVARCHAR(255) NOT NULL")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestWritePrimaryKey tests PRIMARY KEY constraint generation
|
||||||
|
func TestWritePrimaryKey(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{})
|
||||||
|
|
||||||
|
schema := models.InitSchema("dbo")
|
||||||
|
table := models.InitTable("users", "dbo")
|
||||||
|
|
||||||
|
// Add primary key constraint
|
||||||
|
pk := models.InitConstraint("PK_users_id", models.PrimaryKeyConstraint)
|
||||||
|
pk.Columns = []string{"id"}
|
||||||
|
table.Constraints[pk.Name] = pk
|
||||||
|
|
||||||
|
// Add column
|
||||||
|
col := models.InitColumn("id", "users", "dbo")
|
||||||
|
col.Type = "int"
|
||||||
|
col.Sequence = 1
|
||||||
|
table.Columns["id"] = col
|
||||||
|
|
||||||
|
// Write to buffer
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
writer.writer = buf
|
||||||
|
|
||||||
|
err := writer.writePrimaryKey(schema, table)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
assert.Contains(t, output, "ALTER TABLE [dbo].[users]")
|
||||||
|
assert.Contains(t, output, "PRIMARY KEY")
|
||||||
|
assert.Contains(t, output, "[id]")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestWriteForeignKey tests FOREIGN KEY constraint generation
|
||||||
|
func TestWriteForeignKey(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{})
|
||||||
|
|
||||||
|
schema := models.InitSchema("dbo")
|
||||||
|
table := models.InitTable("orders", "dbo")
|
||||||
|
|
||||||
|
// Add foreign key constraint
|
||||||
|
fk := models.InitConstraint("FK_orders_users", models.ForeignKeyConstraint)
|
||||||
|
fk.Columns = []string{"user_id"}
|
||||||
|
fk.ReferencedSchema = "dbo"
|
||||||
|
fk.ReferencedTable = "users"
|
||||||
|
fk.ReferencedColumns = []string{"id"}
|
||||||
|
fk.OnDelete = "CASCADE"
|
||||||
|
fk.OnUpdate = "NO ACTION"
|
||||||
|
table.Constraints[fk.Name] = fk
|
||||||
|
|
||||||
|
// Add column
|
||||||
|
col := models.InitColumn("user_id", "orders", "dbo")
|
||||||
|
col.Type = "int"
|
||||||
|
col.Sequence = 1
|
||||||
|
table.Columns["user_id"] = col
|
||||||
|
|
||||||
|
// Write to buffer
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
writer.writer = buf
|
||||||
|
|
||||||
|
err := writer.writeForeignKeys(schema, table)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
assert.Contains(t, output, "ALTER TABLE [dbo].[orders]")
|
||||||
|
assert.Contains(t, output, "FK_orders_users")
|
||||||
|
assert.Contains(t, output, "FOREIGN KEY")
|
||||||
|
assert.Contains(t, output, "CASCADE")
|
||||||
|
assert.Contains(t, output, "NO ACTION")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestWriteComments tests extended property generation for comments
|
||||||
|
func TestWriteComments(t *testing.T) {
|
||||||
|
writer := NewWriter(&writers.WriterOptions{})
|
||||||
|
|
||||||
|
schema := models.InitSchema("dbo")
|
||||||
|
table := models.InitTable("users", "dbo")
|
||||||
|
table.Description = "User accounts table"
|
||||||
|
|
||||||
|
col := models.InitColumn("id", "users", "dbo")
|
||||||
|
col.Type = "int"
|
||||||
|
col.Description = "Primary key"
|
||||||
|
col.Sequence = 1
|
||||||
|
table.Columns["id"] = col
|
||||||
|
|
||||||
|
// Write to buffer
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
writer.writer = buf
|
||||||
|
|
||||||
|
err := writer.writeComments(schema, table)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
output := buf.String()
|
||||||
|
assert.Contains(t, output, "sp_addextendedproperty")
|
||||||
|
assert.Contains(t, output, "MS_Description")
|
||||||
|
assert.Contains(t, output, "User accounts table")
|
||||||
|
assert.Contains(t, output, "Primary key")
|
||||||
|
}
|
||||||
286
test_data/mssql/TESTING.md
Normal file
286
test_data/mssql/TESTING.md
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
# MSSQL Reader and Writer Testing Guide
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Docker and Docker Compose installed
|
||||||
|
- RelSpec binary built (`make build`)
|
||||||
|
- jq (optional, for JSON processing)
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### 1. Start SQL Server Express
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d mssql
|
||||||
|
|
||||||
|
# Wait for container to be healthy
|
||||||
|
docker-compose ps
|
||||||
|
|
||||||
|
# Monitor startup logs
|
||||||
|
docker-compose logs -f mssql
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Verify Database Creation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker exec -it $(docker-compose ps -q mssql) \
|
||||||
|
/opt/mssql-tools/bin/sqlcmd \
|
||||||
|
-S localhost \
|
||||||
|
-U sa \
|
||||||
|
-P 'StrongPassword123!' \
|
||||||
|
-Q "SELECT name FROM sys.databases WHERE name = 'RelSpecTest'"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing Scenarios
|
||||||
|
|
||||||
|
### Scenario 1: Read MSSQL Database to JSON
|
||||||
|
|
||||||
|
Read the test schema from MSSQL and export to JSON:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./build/relspec convert \
|
||||||
|
--from mssql \
|
||||||
|
--from-conn "sqlserver://sa:StrongPassword123!@localhost:1433/RelSpecTest" \
|
||||||
|
--to json \
|
||||||
|
--to-path test_output.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Verify output:
|
||||||
|
```bash
|
||||||
|
jq '.Schemas[0].Tables | length' test_output.json
|
||||||
|
jq '.Schemas[0].Tables[0]' test_output.json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 2: Read MSSQL Database to DBML
|
||||||
|
|
||||||
|
Convert MSSQL schema to DBML format:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./build/relspec convert \
|
||||||
|
--from mssql \
|
||||||
|
--from-conn "sqlserver://sa:StrongPassword123!@localhost:1433/RelSpecTest" \
|
||||||
|
--to dbml \
|
||||||
|
--to-path test_output.dbml
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 3: Generate SQL Script (No Direct Execution)
|
||||||
|
|
||||||
|
Generate SQL script without executing:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./build/relspec convert \
|
||||||
|
--from mssql \
|
||||||
|
--from-conn "sqlserver://sa:StrongPassword123!@localhost:1433/RelSpecTest" \
|
||||||
|
--to mssql \
|
||||||
|
--to-path test_output.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
Inspect generated SQL:
|
||||||
|
```bash
|
||||||
|
head -50 test_output.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 4: Round-Trip Conversion (MSSQL → JSON → MSSQL)
|
||||||
|
|
||||||
|
Test bidirectional conversion:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Step 1: MSSQL → JSON
|
||||||
|
./build/relspec convert \
|
||||||
|
--from mssql \
|
||||||
|
--from-conn "sqlserver://sa:StrongPassword123!@localhost:1433/RelSpecTest" \
|
||||||
|
--to json \
|
||||||
|
--to-path backup.json
|
||||||
|
|
||||||
|
# Step 2: JSON → MSSQL SQL
|
||||||
|
./build/relspec convert \
|
||||||
|
--from json \
|
||||||
|
--from-path backup.json \
|
||||||
|
--to mssql \
|
||||||
|
--to-path restore.sql
|
||||||
|
|
||||||
|
# Inspect SQL
|
||||||
|
cat restore.sql | head -50
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 5: Cross-Database Conversion
|
||||||
|
|
||||||
|
If you have PostgreSQL running, test conversion:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# MSSQL → PostgreSQL SQL
|
||||||
|
./build/relspec convert \
|
||||||
|
--from mssql \
|
||||||
|
--from-conn "sqlserver://sa:StrongPassword123!@localhost:1433/RelSpecTest" \
|
||||||
|
--to pgsql \
|
||||||
|
--to-path mssql_to_pg.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 6: Test Type Mappings
|
||||||
|
|
||||||
|
Create a JSON file with various types and convert to MSSQL:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"Name": "TypeTest",
|
||||||
|
"Schemas": [
|
||||||
|
{
|
||||||
|
"Name": "dbo",
|
||||||
|
"Tables": [
|
||||||
|
{
|
||||||
|
"Name": "type_samples",
|
||||||
|
"Columns": {
|
||||||
|
"id": {
|
||||||
|
"Name": "id",
|
||||||
|
"Type": "int",
|
||||||
|
"AutoIncrement": true,
|
||||||
|
"NotNull": true,
|
||||||
|
"Sequence": 1
|
||||||
|
},
|
||||||
|
"big_num": {
|
||||||
|
"Name": "big_num",
|
||||||
|
"Type": "int64",
|
||||||
|
"Sequence": 2
|
||||||
|
},
|
||||||
|
"is_active": {
|
||||||
|
"Name": "is_active",
|
||||||
|
"Type": "bool",
|
||||||
|
"Sequence": 3
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"Name": "description",
|
||||||
|
"Type": "text",
|
||||||
|
"Sequence": 4
|
||||||
|
},
|
||||||
|
"created_at": {
|
||||||
|
"Name": "created_at",
|
||||||
|
"Type": "timestamp",
|
||||||
|
"NotNull": true,
|
||||||
|
"Default": "GETDATE()",
|
||||||
|
"Sequence": 5
|
||||||
|
},
|
||||||
|
"unique_id": {
|
||||||
|
"Name": "unique_id",
|
||||||
|
"Type": "uuid",
|
||||||
|
"Sequence": 6
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"Name": "metadata",
|
||||||
|
"Type": "json",
|
||||||
|
"Sequence": 7
|
||||||
|
},
|
||||||
|
"binary_data": {
|
||||||
|
"Name": "binary_data",
|
||||||
|
"Type": "bytea",
|
||||||
|
"Sequence": 8
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Constraints": {
|
||||||
|
"PK_type_samples_id": {
|
||||||
|
"Name": "PK_type_samples_id",
|
||||||
|
"Type": "PRIMARY_KEY",
|
||||||
|
"Columns": ["id"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Convert to MSSQL:
|
||||||
|
```bash
|
||||||
|
./build/relspec convert \
|
||||||
|
--from json \
|
||||||
|
--from-path type_test.json \
|
||||||
|
--to mssql \
|
||||||
|
--to-path type_test.sql
|
||||||
|
|
||||||
|
cat type_test.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cleanup
|
||||||
|
|
||||||
|
Stop and remove the SQL Server container:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose down
|
||||||
|
|
||||||
|
# Clean up test files
|
||||||
|
rm -f test_output.* backup.json restore.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Container won't start
|
||||||
|
|
||||||
|
Check Docker daemon is running and database logs:
|
||||||
|
```bash
|
||||||
|
docker-compose logs mssql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Connection refused errors
|
||||||
|
|
||||||
|
Wait for container to be healthy:
|
||||||
|
```bash
|
||||||
|
docker-compose ps
|
||||||
|
# Wait until STATUS shows "healthy"
|
||||||
|
|
||||||
|
# Or check manually
|
||||||
|
docker exec -it $(docker-compose ps -q mssql) \
|
||||||
|
/opt/mssql-tools/bin/sqlcmd \
|
||||||
|
-S localhost \
|
||||||
|
-U sa \
|
||||||
|
-P 'StrongPassword123!' \
|
||||||
|
-Q "SELECT @@VERSION"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test schema not found
|
||||||
|
|
||||||
|
Initialize the test schema:
|
||||||
|
```bash
|
||||||
|
docker exec -i $(docker-compose ps -q mssql) \
|
||||||
|
/opt/mssql-tools/bin/sqlcmd \
|
||||||
|
-S localhost \
|
||||||
|
-U sa \
|
||||||
|
-P 'StrongPassword123!' \
|
||||||
|
< test_data/mssql/test_schema.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Connection string format issues
|
||||||
|
|
||||||
|
Use the correct format for connection strings:
|
||||||
|
- Default port: 1433
|
||||||
|
- Username: `sa`
|
||||||
|
- Password: `StrongPassword123!`
|
||||||
|
- Database: `RelSpecTest`
|
||||||
|
|
||||||
|
Format: `sqlserver://sa:StrongPassword123!@localhost:1433/RelSpecTest`
|
||||||
|
|
||||||
|
## Performance Notes
|
||||||
|
|
||||||
|
- Initial reader setup may take a few seconds
|
||||||
|
- Type mapping queries are cached within a single read operation
|
||||||
|
- Direct execution mode is atomic per table/constraint
|
||||||
|
- Large schemas (100+ tables) should complete in under 5 seconds
|
||||||
|
|
||||||
|
## Unit Test Verification
|
||||||
|
|
||||||
|
Run the MSSQL-specific tests:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Type mapping tests
|
||||||
|
go test ./pkg/mssql/... -v
|
||||||
|
|
||||||
|
# Reader tests
|
||||||
|
go test ./pkg/readers/mssql/... -v
|
||||||
|
|
||||||
|
# Writer tests
|
||||||
|
go test ./pkg/writers/mssql/... -v
|
||||||
|
|
||||||
|
# All together
|
||||||
|
go test ./pkg/mssql/... ./pkg/readers/mssql/... ./pkg/writers/mssql/... -v
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected output: All tests should PASS
|
||||||
187
test_data/mssql/test_schema.sql
Normal file
187
test_data/mssql/test_schema.sql
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
-- Test schema for MSSQL Reader integration tests
|
||||||
|
-- This script creates a sample database for testing the MSSQL reader
|
||||||
|
|
||||||
|
USE master;
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Drop existing database if it exists
|
||||||
|
IF EXISTS (SELECT 1 FROM sys.databases WHERE name = 'RelSpecTest')
|
||||||
|
BEGIN
|
||||||
|
ALTER DATABASE RelSpecTest SET SINGLE_USER WITH ROLLBACK IMMEDIATE;
|
||||||
|
DROP DATABASE RelSpecTest;
|
||||||
|
END
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Create test database
|
||||||
|
CREATE DATABASE RelSpecTest;
|
||||||
|
GO
|
||||||
|
|
||||||
|
USE RelSpecTest;
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Create schemas
|
||||||
|
CREATE SCHEMA [public];
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE SCHEMA [auth];
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Create tables in public schema
|
||||||
|
CREATE TABLE [public].[users] (
|
||||||
|
[id] INT IDENTITY(1,1) NOT NULL,
|
||||||
|
[email] NVARCHAR(255) NOT NULL,
|
||||||
|
[username] NVARCHAR(100) NOT NULL,
|
||||||
|
[created_at] DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
[updated_at] DATETIME2 NULL,
|
||||||
|
[is_active] BIT NOT NULL DEFAULT 1,
|
||||||
|
PRIMARY KEY ([id]),
|
||||||
|
UNIQUE ([email]),
|
||||||
|
UNIQUE ([username])
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE [public].[posts] (
|
||||||
|
[id] INT IDENTITY(1,1) NOT NULL,
|
||||||
|
[user_id] INT NOT NULL,
|
||||||
|
[title] NVARCHAR(255) NOT NULL,
|
||||||
|
[content] NVARCHAR(MAX) NOT NULL,
|
||||||
|
[published_at] DATETIME2 NULL,
|
||||||
|
[created_at] DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
PRIMARY KEY ([id])
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE [public].[comments] (
|
||||||
|
[id] INT IDENTITY(1,1) NOT NULL,
|
||||||
|
[post_id] INT NOT NULL,
|
||||||
|
[user_id] INT NOT NULL,
|
||||||
|
[content] NVARCHAR(MAX) NOT NULL,
|
||||||
|
[created_at] DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
PRIMARY KEY ([id])
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Create tables in auth schema
|
||||||
|
CREATE TABLE [auth].[roles] (
|
||||||
|
[id] INT IDENTITY(1,1) NOT NULL,
|
||||||
|
[name] NVARCHAR(100) NOT NULL,
|
||||||
|
[description] NVARCHAR(MAX) NULL,
|
||||||
|
PRIMARY KEY ([id]),
|
||||||
|
UNIQUE ([name])
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE [auth].[user_roles] (
|
||||||
|
[id] INT IDENTITY(1,1) NOT NULL,
|
||||||
|
[user_id] INT NOT NULL,
|
||||||
|
[role_id] INT NOT NULL,
|
||||||
|
PRIMARY KEY ([id]),
|
||||||
|
UNIQUE ([user_id], [role_id])
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Add foreign keys
|
||||||
|
ALTER TABLE [public].[posts]
|
||||||
|
ADD CONSTRAINT [FK_posts_users]
|
||||||
|
FOREIGN KEY ([user_id])
|
||||||
|
REFERENCES [public].[users] ([id])
|
||||||
|
ON DELETE CASCADE ON UPDATE NO ACTION;
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE [public].[comments]
|
||||||
|
ADD CONSTRAINT [FK_comments_posts]
|
||||||
|
FOREIGN KEY ([post_id])
|
||||||
|
REFERENCES [public].[posts] ([id])
|
||||||
|
ON DELETE CASCADE ON UPDATE NO ACTION;
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE [public].[comments]
|
||||||
|
ADD CONSTRAINT [FK_comments_users]
|
||||||
|
FOREIGN KEY ([user_id])
|
||||||
|
REFERENCES [public].[users] ([id])
|
||||||
|
ON DELETE CASCADE ON UPDATE NO ACTION;
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE [auth].[user_roles]
|
||||||
|
ADD CONSTRAINT [FK_user_roles_users]
|
||||||
|
FOREIGN KEY ([user_id])
|
||||||
|
REFERENCES [public].[users] ([id])
|
||||||
|
ON DELETE CASCADE ON UPDATE NO ACTION;
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE [auth].[user_roles]
|
||||||
|
ADD CONSTRAINT [FK_user_roles_roles]
|
||||||
|
FOREIGN KEY ([role_id])
|
||||||
|
REFERENCES [auth].[roles] ([id])
|
||||||
|
ON DELETE CASCADE ON UPDATE NO ACTION;
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Create indexes
|
||||||
|
CREATE INDEX [IDX_users_email] ON [public].[users] ([email]);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE INDEX [IDX_posts_user_id] ON [public].[posts] ([user_id]);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE INDEX [IDX_comments_post_id] ON [public].[comments] ([post_id]);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE INDEX [IDX_comments_user_id] ON [public].[comments] ([user_id]);
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Add extended properties (comments)
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = 'MS_Description',
|
||||||
|
@value = 'User accounts table',
|
||||||
|
@level0type = 'SCHEMA', @level0name = 'public',
|
||||||
|
@level1type = 'TABLE', @level1name = 'users';
|
||||||
|
GO
|
||||||
|
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = 'MS_Description',
|
||||||
|
@value = 'User unique identifier',
|
||||||
|
@level0type = 'SCHEMA', @level0name = 'public',
|
||||||
|
@level1type = 'TABLE', @level1name = 'users',
|
||||||
|
@level2type = 'COLUMN', @level2name = 'id';
|
||||||
|
GO
|
||||||
|
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = 'MS_Description',
|
||||||
|
@value = 'User email address',
|
||||||
|
@level0type = 'SCHEMA', @level0name = 'public',
|
||||||
|
@level1type = 'TABLE', @level1name = 'users',
|
||||||
|
@level2type = 'COLUMN', @level2name = 'email';
|
||||||
|
GO
|
||||||
|
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = 'MS_Description',
|
||||||
|
@value = 'Blog posts table',
|
||||||
|
@level0type = 'SCHEMA', @level0name = 'public',
|
||||||
|
@level1type = 'TABLE', @level1name = 'posts';
|
||||||
|
GO
|
||||||
|
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = 'MS_Description',
|
||||||
|
@value = 'User roles mapping table',
|
||||||
|
@level0type = 'SCHEMA', @level0name = 'auth',
|
||||||
|
@level1type = 'TABLE', @level1name = 'user_roles';
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Add check constraint
|
||||||
|
ALTER TABLE [public].[users]
|
||||||
|
ADD CONSTRAINT [CK_users_email_format]
|
||||||
|
CHECK (LEN(email) > 0 AND email LIKE '%@%.%');
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Verify schema was created
|
||||||
|
SELECT
|
||||||
|
SCHEMA_NAME(s.schema_id) as [Schema],
|
||||||
|
t.name as [Table],
|
||||||
|
COUNT(c.column_id) as [ColumnCount]
|
||||||
|
FROM sys.tables t
|
||||||
|
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
|
||||||
|
LEFT JOIN sys.columns c ON t.object_id = c.object_id
|
||||||
|
WHERE SCHEMA_NAME(s.schema_id) IN ('public', 'auth')
|
||||||
|
GROUP BY SCHEMA_NAME(s.schema_id), t.name
|
||||||
|
ORDER BY [Schema], [Table];
|
||||||
|
GO
|
||||||
Reference in New Issue
Block a user