Files
relspecgo/tests/integration/roundtrip_test.go
Hein a427aa5537
Some checks are pending
CI / Test (1.23) (push) Waiting to run
CI / Test (1.24) (push) Waiting to run
CI / Test (1.25) (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Build (push) Waiting to run
More Roundtrip tests
2025-12-17 22:52:24 +02:00

617 lines
21 KiB
Go

package integration
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
dctxreader "git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
jsonreader "git.warky.dev/wdevs/relspecgo/pkg/readers/json"
pgsqlreader "git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
jsonwriter "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
pgsqlwriter "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
"github.com/jackc/pgx/v5"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// getTestConnectionString returns a PostgreSQL connection string from environment
// or skips the test if not available
func getTestConnectionString(t *testing.T) string {
connStr := os.Getenv("RELSPEC_TEST_PG_CONN")
if connStr == "" {
t.Skip("Skipping integration test: RELSPEC_TEST_PG_CONN environment variable not set")
}
return connStr
}
// TestPostgresToJSONRoundTrip tests reading from the test database and writing to JSON,
// then comparing the round-trip back
func TestPostgresToJSONRoundTrip(t *testing.T) {
connStr := getTestConnectionString(t)
testDir := t.TempDir()
// Step 1: Read from PostgreSQL test database
t.Log("Step 1: Reading from PostgreSQL...")
pgsqlReaderOpts := &readers.ReaderOptions{
ConnectionString: connStr,
}
pgsqlReader := pgsqlreader.NewReader(pgsqlReaderOpts)
dbFromPG, err := pgsqlReader.ReadDatabase()
require.NoError(t, err, "Failed to read from PostgreSQL")
require.NotNil(t, dbFromPG, "Database from PostgreSQL should not be nil")
t.Logf(" ✓ Read database '%s' with %d schemas", dbFromPG.Name, len(dbFromPG.Schemas))
// Step 2: Write to JSON (first JSON output)
t.Log("Step 2: Writing to JSON (first output)...")
json1Path := filepath.Join(testDir, "from_postgres.json")
jsonWriter1Opts := &writers.WriterOptions{
OutputPath: json1Path,
}
jsonWriter1 := jsonwriter.NewWriter(jsonWriter1Opts)
err = jsonWriter1.WriteDatabase(dbFromPG)
require.NoError(t, err, "Failed to write first JSON")
json1Stat, err := os.Stat(json1Path)
require.NoError(t, err, "First JSON file should exist")
require.Greater(t, json1Stat.Size(), int64(0), "First JSON file should not be empty")
t.Logf(" ✓ Wrote JSON file (%d bytes)", json1Stat.Size())
// Step 3: Read JSON and write to SQL
t.Log("Step 3: Reading JSON and generating SQL...")
jsonReaderOpts := &readers.ReaderOptions{
FilePath: json1Path,
}
jsonReader := jsonreader.NewReader(jsonReaderOpts)
dbFromJSON, err := jsonReader.ReadDatabase()
require.NoError(t, err, "Failed to read JSON file")
require.NotNil(t, dbFromJSON, "Database from JSON should not be nil")
t.Logf(" ✓ Read database from JSON with %d schemas", len(dbFromJSON.Schemas))
// Generate SQL DDL
sqlPath := filepath.Join(testDir, "schema.sql")
pgsqlWriterOpts := &writers.WriterOptions{
OutputPath: sqlPath,
}
pgsqlWriter := pgsqlwriter.NewWriter(pgsqlWriterOpts)
err = pgsqlWriter.WriteDatabase(dbFromJSON)
require.NoError(t, err, "Failed to generate SQL")
sqlStat, err := os.Stat(sqlPath)
require.NoError(t, err, "SQL file should exist")
require.Greater(t, sqlStat.Size(), int64(0), "SQL file should not be empty")
t.Logf(" ✓ Generated SQL DDL (%d bytes)", sqlStat.Size())
// Step 4: Write to second JSON to verify round-trip
t.Log("Step 4: Writing to JSON (second output for comparison)...")
json2Path := filepath.Join(testDir, "roundtrip.json")
jsonWriter2Opts := &writers.WriterOptions{
OutputPath: json2Path,
}
jsonWriter2 := jsonwriter.NewWriter(jsonWriter2Opts)
err = jsonWriter2.WriteDatabase(dbFromJSON)
require.NoError(t, err, "Failed to write second JSON")
json2Stat, err := os.Stat(json2Path)
require.NoError(t, err, "Second JSON file should exist")
require.Greater(t, json2Stat.Size(), int64(0), "Second JSON file should not be empty")
t.Logf(" ✓ Wrote second JSON file (%d bytes)", json2Stat.Size())
// Step 5: Compare the two JSON outputs (they should be identical after round-trip)
t.Log("Step 5: Comparing JSON outputs...")
// Read both JSON files
json1Data, err := os.ReadFile(json1Path)
require.NoError(t, err, "Failed to read first JSON file")
json2Data, err := os.ReadFile(json2Path)
require.NoError(t, err, "Failed to read second JSON file")
// Parse JSON into Database models for comparison
var db1, db2 models.Database
err = json.Unmarshal(json1Data, &db1)
require.NoError(t, err, "Failed to parse first JSON")
err = json.Unmarshal(json2Data, &db2)
require.NoError(t, err, "Failed to parse second JSON")
// Compare high-level structure
t.Log(" Comparing high-level structure...")
assert.Equal(t, len(db1.Schemas), len(db2.Schemas), "Schema count should match")
// Compare schemas
for i, schema1 := range db1.Schemas {
if i >= len(db2.Schemas) {
t.Errorf("Schema index %d out of bounds in second database", i)
continue
}
schema2 := db2.Schemas[i]
assert.Equal(t, schema1.Name, schema2.Name, "Schema names should match")
assert.Equal(t, len(schema1.Tables), len(schema2.Tables),
"Table count in schema '%s' should match", schema1.Name)
// Compare tables
for j, table1 := range schema1.Tables {
if j >= len(schema2.Tables) {
t.Errorf("Table index %d out of bounds in schema '%s'", j, schema1.Name)
continue
}
table2 := schema2.Tables[j]
assert.Equal(t, table1.Name, table2.Name,
"Table names should match in schema '%s'", schema1.Name)
assert.Equal(t, len(table1.Columns), len(table2.Columns),
"Column count in table '%s.%s' should match", schema1.Name, table1.Name)
}
}
// Summary
t.Log("Summary:")
t.Logf(" ✓ Round-trip completed: PostgreSQL → JSON → Models → JSON")
t.Logf(" ✓ Generated SQL file for reference")
t.Logf(" ✓ JSON files are structurally identical")
totalTables := 0
for _, schema := range db1.Schemas {
totalTables += len(schema.Tables)
}
t.Logf(" ✓ Total tables: %d", totalTables)
}
// TestDCTXToJSONPipeline demonstrates the DCTX → JSON → SQL pipeline
// Note: This test uses the large p1.dctx file and demonstrates the conversion
// without attempting to execute the SQL (which would require creating 121 tables)
func TestDCTXToJSONPipeline(t *testing.T) {
testDir := t.TempDir()
// Step 1: Read DCTX file
t.Log("Step 1: Reading DCTX file...")
dctxPath := filepath.Join("..", "assets", "dctx", "p1.dctx")
dctxReaderOpts := &readers.ReaderOptions{
FilePath: dctxPath,
}
dctxReader := dctxreader.NewReader(dctxReaderOpts)
db, err := dctxReader.ReadDatabase()
require.NoError(t, err, "Failed to read DCTX file")
require.NotNil(t, db, "Database should not be nil")
t.Logf(" ✓ Read database '%s' with %d schemas", db.Name, len(db.Schemas))
// Step 2: Write to JSON
t.Log("Step 2: Writing to JSON...")
jsonPath := filepath.Join(testDir, "from_dctx.json")
jsonWriterOpts := &writers.WriterOptions{
OutputPath: jsonPath,
}
jsonWriter := jsonwriter.NewWriter(jsonWriterOpts)
err = jsonWriter.WriteDatabase(db)
require.NoError(t, err, "Failed to write JSON")
jsonStat, err := os.Stat(jsonPath)
require.NoError(t, err, "JSON file should exist")
require.Greater(t, jsonStat.Size(), int64(0), "JSON file should not be empty")
t.Logf(" ✓ Wrote JSON file (%d bytes)", jsonStat.Size())
// Step 3: Read JSON back
t.Log("Step 3: Reading JSON and generating SQL...")
jsonReaderOpts := &readers.ReaderOptions{
FilePath: jsonPath,
}
jsonReader := jsonreader.NewReader(jsonReaderOpts)
dbFromJSON, err := jsonReader.ReadDatabase()
require.NoError(t, err, "Failed to read JSON file")
require.NotNil(t, dbFromJSON, "Database from JSON should not be nil")
t.Logf(" ✓ Read database from JSON with %d schemas", len(dbFromJSON.Schemas))
// Step 4: Generate SQL DDL
sqlPath := filepath.Join(testDir, "schema.sql")
pgsqlWriterOpts := &writers.WriterOptions{
OutputPath: sqlPath,
}
pgsqlWriter := pgsqlwriter.NewWriter(pgsqlWriterOpts)
err = pgsqlWriter.WriteDatabase(dbFromJSON)
require.NoError(t, err, "Failed to generate SQL")
sqlStat, err := os.Stat(sqlPath)
require.NoError(t, err, "SQL file should exist")
require.Greater(t, sqlStat.Size(), int64(0), "SQL file should not be empty")
t.Logf(" ✓ Generated SQL DDL (%d bytes)", sqlStat.Size())
// Step 5: Write back to JSON for comparison
t.Log("Step 4: Writing back to JSON for comparison...")
json2Path := filepath.Join(testDir, "roundtrip.json")
jsonWriter2Opts := &writers.WriterOptions{
OutputPath: json2Path,
}
jsonWriter2 := jsonwriter.NewWriter(jsonWriter2Opts)
err = jsonWriter2.WriteDatabase(dbFromJSON)
require.NoError(t, err, "Failed to write second JSON")
json2Stat, err := os.Stat(json2Path)
require.NoError(t, err, "Second JSON file should exist")
t.Logf(" ✓ Wrote second JSON file (%d bytes)", json2Stat.Size())
// Step 6: Compare JSON files
t.Log("Step 5: Comparing JSON outputs...")
json1Data, err := os.ReadFile(jsonPath)
require.NoError(t, err, "Failed to read first JSON")
json2Data, err := os.ReadFile(json2Path)
require.NoError(t, err, "Failed to read second JSON")
// They should be identical
assert.Equal(t, json1Data, json2Data, "JSON files should be identical after round-trip")
// Summary
t.Log("Summary:")
t.Logf(" ✓ DCTX → JSON → Models → SQL → JSON pipeline completed")
t.Logf(" ✓ JSON files are byte-identical")
t.Logf(" ✓ SQL file: %s (%d bytes)", sqlPath, sqlStat.Size())
for _, schema := range db.Schemas {
t.Logf(" ✓ Schema '%s': %d tables", schema.Name, len(schema.Tables))
}
}
// TestDCTXToJSON tests just the DCTX to JSON conversion
func TestDCTXToJSON(t *testing.T) {
testDir := t.TempDir()
// Read DCTX file
t.Log("Reading DCTX file...")
dctxPath := filepath.Join("..", "assets", "dctx", "p1.dctx")
dctxReaderOpts := &readers.ReaderOptions{
FilePath: dctxPath,
}
dctxReader := dctxreader.NewReader(dctxReaderOpts)
db, err := dctxReader.ReadDatabase()
require.NoError(t, err, "Failed to read DCTX file")
require.NotNil(t, db, "Database should not be nil")
t.Logf("Read database '%s' with %d schemas", db.Name, len(db.Schemas))
// Write to JSON
t.Log("Writing to JSON...")
jsonPath := filepath.Join(testDir, "output.json")
jsonWriterOpts := &writers.WriterOptions{
OutputPath: jsonPath,
}
jsonWriter := jsonwriter.NewWriter(jsonWriterOpts)
err = jsonWriter.WriteDatabase(db)
require.NoError(t, err, "Failed to write JSON")
// Verify JSON file
jsonStat, err := os.Stat(jsonPath)
require.NoError(t, err, "JSON file should exist")
require.Greater(t, jsonStat.Size(), int64(0), "JSON file should not be empty")
// Read back and verify it's valid JSON
jsonData, err := os.ReadFile(jsonPath)
require.NoError(t, err, "Failed to read JSON file")
var dbFromJSON models.Database
err = json.Unmarshal(jsonData, &dbFromJSON)
require.NoError(t, err, "JSON should be valid")
t.Logf("✓ Successfully converted DCTX to JSON (%d bytes)", jsonStat.Size())
t.Logf("✓ JSON contains %d schemas", len(dbFromJSON.Schemas))
for _, schema := range dbFromJSON.Schemas {
t.Logf(" - Schema '%s': %d tables", schema.Name, len(schema.Tables))
}
}
// TestDCTXToSQL tests DCTX to SQL conversion
func TestDCTXToSQL(t *testing.T) {
testDir := t.TempDir()
// Read DCTX file
t.Log("Reading DCTX file...")
dctxPath := filepath.Join("..", "assets", "dctx", "p1.dctx")
dctxReaderOpts := &readers.ReaderOptions{
FilePath: dctxPath,
}
dctxReader := dctxreader.NewReader(dctxReaderOpts)
db, err := dctxReader.ReadDatabase()
require.NoError(t, err, "Failed to read DCTX file")
require.NotNil(t, db, "Database should not be nil")
t.Logf("Read database '%s' with %d schemas", db.Name, len(db.Schemas))
// Write to SQL
t.Log("Writing to SQL...")
sqlPath := filepath.Join(testDir, "output.sql")
pgsqlWriterOpts := &writers.WriterOptions{
OutputPath: sqlPath,
}
pgsqlWriter := pgsqlwriter.NewWriter(pgsqlWriterOpts)
err = pgsqlWriter.WriteDatabase(db)
require.NoError(t, err, "Failed to write SQL")
// Verify SQL file
sqlStat, err := os.Stat(sqlPath)
require.NoError(t, err, "SQL file should exist")
require.Greater(t, sqlStat.Size(), int64(0), "SQL file should not be empty")
t.Logf("✓ Successfully converted DCTX to SQL (%d bytes)", sqlStat.Size())
t.Logf("✓ SQL file saved to: %s", sqlPath)
// Read first few lines to check for syntax
sqlContent, err := os.ReadFile(sqlPath)
require.NoError(t, err, "Failed to read SQL file")
lines := string(sqlContent)
if len(lines) > 500 {
t.Logf("First 500 chars of SQL:\n%s", lines[:500])
} else {
t.Logf("SQL content:\n%s", lines)
}
}
// TestComplexDCTXToPostgresRoundTrip tests the complete roundtrip:
// DCTX → JSON → SQL (as statements) → PostgreSQL → JSON → Compare
// This is the most comprehensive integration test using the large p1.dctx file
func TestComplexDCTXToPostgresRoundTrip(t *testing.T) {
connStr := getTestConnectionString(t)
testDir := t.TempDir()
ctx := context.Background()
// Step 1: Read DCTX file
t.Log("Step 1: Reading DCTX file...")
dctxPath := filepath.Join("..", "assets", "dctx", "p1.dctx")
dctxReaderOpts := &readers.ReaderOptions{
FilePath: dctxPath,
}
dctxReader := dctxreader.NewReader(dctxReaderOpts)
dbFromDCTX, err := dctxReader.ReadDatabase()
require.NoError(t, err, "Failed to read DCTX file")
require.NotNil(t, dbFromDCTX, "Database should not be nil")
t.Logf(" ✓ Read database '%s' with %d schemas", dbFromDCTX.Name, len(dbFromDCTX.Schemas))
// Step 2: Write to JSON (first output)
t.Log("Step 2: Writing to JSON (first output)...")
json1Path := filepath.Join(testDir, "from_dctx.json")
jsonWriter1Opts := &writers.WriterOptions{
OutputPath: json1Path,
}
jsonWriter1 := jsonwriter.NewWriter(jsonWriter1Opts)
err = jsonWriter1.WriteDatabase(dbFromDCTX)
require.NoError(t, err, "Failed to write first JSON")
json1Stat, err := os.Stat(json1Path)
require.NoError(t, err, "First JSON file should exist")
t.Logf(" ✓ Wrote JSON file (%d bytes)", json1Stat.Size())
// Step 3: Read JSON back
t.Log("Step 3: Reading JSON back...")
jsonReaderOpts := &readers.ReaderOptions{
FilePath: json1Path,
}
jsonReader := jsonreader.NewReader(jsonReaderOpts)
dbFromJSON, err := jsonReader.ReadDatabase()
require.NoError(t, err, "Failed to read JSON")
require.NotNil(t, dbFromJSON, "Database from JSON should not be nil")
t.Logf(" ✓ Read database from JSON with %d schemas", len(dbFromJSON.Schemas))
// Step 4: Generate SQL statements as a list
t.Log("Step 4: Generating SQL statements...")
pgsqlWriter := pgsqlwriter.NewWriter(&writers.WriterOptions{})
statements, err := pgsqlWriter.GenerateDatabaseStatements(dbFromJSON)
require.NoError(t, err, "Failed to generate SQL statements")
t.Logf(" ✓ Generated %d SQL statements", len(statements))
// Step 5: Connect to PostgreSQL
t.Log("Step 5: Connecting to PostgreSQL...")
conn, err := pgx.Connect(ctx, connStr)
require.NoError(t, err, "Failed to connect to PostgreSQL")
defer conn.Close(ctx)
t.Logf(" ✓ Connected to PostgreSQL")
// Step 6: Drop and recreate schema to ensure clean state
t.Log("Step 6: Cleaning up existing schemas...")
for _, schema := range dbFromJSON.Schemas {
_, err = conn.Exec(ctx, fmt.Sprintf("DROP SCHEMA IF EXISTS %s CASCADE", schema.Name))
if err != nil {
t.Logf(" Warning: Failed to drop schema %s: %v", schema.Name, err)
}
// Recreate the schema
_, err = conn.Exec(ctx, fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %s", schema.Name))
if err != nil {
t.Logf(" Warning: Failed to create schema %s: %v", schema.Name, err)
}
}
t.Logf(" ✓ Cleaned up and recreated schemas")
// Step 7: Execute SQL statements one by one
t.Log("Step 7: Executing SQL statements...")
successCount := 0
errorCount := 0
type FailedStatement struct {
Index int
Statement string
Error string
}
failedStatements := []FailedStatement{}
for i, stmt := range statements {
// Skip comments
trimmed := strings.TrimSpace(stmt)
if strings.HasPrefix(trimmed, "--") || trimmed == "" {
continue
}
_, err = conn.Exec(ctx, stmt)
if err != nil {
errorCount++
failedStatements = append(failedStatements, FailedStatement{
Index: i,
Statement: stmt,
Error: err.Error(),
})
// Log first 10 errors
if errorCount <= 10 {
t.Logf(" ⚠ Statement %d failed: %v", i, err)
}
} else {
successCount++
}
}
t.Logf(" ✓ Executed %d statements successfully", successCount)
if errorCount > 0 {
t.Logf(" ⚠ %d statements failed", errorCount)
if errorCount > 10 {
t.Logf(" ⚠ Showing first 10 errors, %d more errors not logged", errorCount-10)
}
// Save failed statements to file
failedStmtsPath := filepath.Join(testDir, "failed_statements.txt")
failedFile, err := os.Create(failedStmtsPath)
if err == nil {
defer failedFile.Close()
fmt.Fprintf(failedFile, "Failed SQL Statements Report\n")
fmt.Fprintf(failedFile, "============================\n\n")
fmt.Fprintf(failedFile, "Total Failed: %d / %d (%.1f%% failure rate)\n\n", errorCount, len(statements), float64(errorCount)/float64(len(statements))*100)
for _, failed := range failedStatements {
fmt.Fprintf(failedFile, "--- Statement #%d ---\n", failed.Index)
fmt.Fprintf(failedFile, "Error: %s\n", failed.Error)
fmt.Fprintf(failedFile, "SQL:\n%s\n\n", failed.Statement)
}
t.Logf(" 📝 Failed statements saved to: %s", failedStmtsPath)
}
}
// For this test, we require at least 80% success rate
if len(statements) > 0 {
successRate := float64(successCount) / float64(len(statements)) * 100
t.Logf(" Success rate: %.1f%%", successRate)
require.Greater(t, successRate, 80.0, "Success rate should be at least 80%%")
}
// Step 8: Read back from PostgreSQL
t.Log("Step 8: Reading from PostgreSQL...")
pgsqlReaderOpts := &readers.ReaderOptions{
ConnectionString: connStr,
}
pgsqlReader := pgsqlreader.NewReader(pgsqlReaderOpts)
dbFromPG, err := pgsqlReader.ReadDatabase()
require.NoError(t, err, "Failed to read from PostgreSQL")
require.NotNil(t, dbFromPG, "Database from PostgreSQL should not be nil")
t.Logf(" ✓ Read database from PostgreSQL with %d schemas", len(dbFromPG.Schemas))
// Step 9: Write to JSON (second output)
t.Log("Step 9: Writing to JSON (second output)...")
json2Path := filepath.Join(testDir, "from_postgres.json")
jsonWriter2Opts := &writers.WriterOptions{
OutputPath: json2Path,
}
jsonWriter2 := jsonwriter.NewWriter(jsonWriter2Opts)
err = jsonWriter2.WriteDatabase(dbFromPG)
require.NoError(t, err, "Failed to write second JSON")
json2Stat, err := os.Stat(json2Path)
require.NoError(t, err, "Second JSON file should exist")
t.Logf(" ✓ Wrote second JSON file (%d bytes)", json2Stat.Size())
// Step 10: Compare the outputs
t.Log("Step 10: Comparing results...")
// Read both JSON files
json1Data, err := os.ReadFile(json1Path)
require.NoError(t, err, "Failed to read first JSON")
json2Data, err := os.ReadFile(json2Path)
require.NoError(t, err, "Failed to read second JSON")
// Parse JSON into Database models
var db1, db2 models.Database
err = json.Unmarshal(json1Data, &db1)
require.NoError(t, err, "Failed to parse first JSON")
err = json.Unmarshal(json2Data, &db2)
require.NoError(t, err, "Failed to parse second JSON")
// Compare high-level structure
t.Log(" Comparing high-level structure...")
// We might have different schema counts if some failed to create
// So we'll compare the schemas that do exist
t.Logf(" Original schemas: %d", len(db1.Schemas))
t.Logf(" PostgreSQL schemas: %d", len(db2.Schemas))
// Find matching schemas and compare them
for _, schema1 := range db1.Schemas {
// Find corresponding schema in db2
var schema2 *models.Schema
for _, s := range db2.Schemas {
if s.Name == schema1.Name {
schema2 = s
break
}
}
if schema2 == nil {
t.Logf(" ⚠ Schema '%s' from DCTX not found in PostgreSQL", schema1.Name)
continue
}
t.Logf(" Comparing schema '%s'...", schema1.Name)
t.Logf(" Original tables: %d", len(schema1.Tables))
t.Logf(" PostgreSQL tables: %d", len(schema2.Tables))
// Note: We don't require exact matches due to potential SQL execution failures
// The important thing is that the pipeline works end-to-end
}
// Summary
t.Log("Summary:")
t.Logf(" ✓ Complete round-trip: DCTX → JSON → SQL → PostgreSQL → JSON")
t.Logf(" ✓ Processed %d tables from DCTX", countTables(db1))
t.Logf(" ✓ Created %d tables in PostgreSQL", countTables(db2))
t.Logf(" ✓ Executed %d/%d SQL statements successfully", successCount, len(statements))
if errorCount > 0 {
t.Logf(" ⚠ %d statements failed (see failed_statements.txt)", errorCount)
t.Logf(" 📂 Test output directory: %s", testDir)
}
// The test passes if we got through all steps without fatal errors
t.Logf(" ✓ Integration test completed successfully")
}
// Helper function to count tables across all schemas
func countTables(db models.Database) int {
count := 0
for _, schema := range db.Schemas {
count += len(schema.Tables)
}
return count
}