51 Commits

Author SHA1 Message Date
77436757c8 fix(type_mapper): update timestamp type mapping to use SqlTimeStamp
All checks were successful
CI / Test (1.24) (push) Successful in -25m13s
CI / Test (1.25) (push) Successful in -25m10s
CI / Build (push) Successful in -26m2s
CI / Lint (push) Successful in -25m39s
Release / Build and Release (push) Successful in -25m49s
Integration Tests / Integration Tests (push) Successful in -25m26s
2026-02-08 21:35:27 +02:00
5e6f03e412 feat(type_mapper): add support for serial types and auto-increment tags
All checks were successful
CI / Build (push) Successful in -25m39s
Integration Tests / Integration Tests (push) Successful in -25m15s
CI / Test (1.24) (push) Successful in -24m39s
CI / Test (1.25) (push) Successful in -24m24s
CI / Lint (push) Successful in -25m9s
Release / Build and Release (push) Successful in -25m21s
2026-02-08 17:48:58 +02:00
1dcbc79387 feat(pgsql): enhance data type mapping to support serial types
All checks were successful
CI / Test (1.25) (push) Successful in -24m18s
CI / Test (1.24) (push) Successful in -24m6s
CI / Build (push) Successful in -25m14s
CI / Lint (push) Successful in -24m47s
Release / Build and Release (push) Successful in -25m37s
Integration Tests / Integration Tests (push) Successful in -25m9s
2026-02-08 17:31:28 +02:00
59c4a5ebf8 test(writer): enhance has-many relationship tests with join tag verification
All checks were successful
CI / Test (1.24) (push) Successful in -25m9s
CI / Test (1.25) (push) Successful in -25m0s
CI / Build (push) Successful in -25m57s
CI / Lint (push) Successful in -25m29s
Release / Build and Release (push) Successful in -25m38s
Integration Tests / Integration Tests (push) Successful in -25m19s
2026-02-08 15:20:20 +02:00
091e1913ee feat(version): retrieve version and build date from VCS if unset
All checks were successful
CI / Test (1.24) (push) Successful in -25m19s
CI / Test (1.25) (push) Successful in -25m1s
CI / Build (push) Successful in -25m56s
CI / Lint (push) Successful in -25m33s
Integration Tests / Integration Tests (push) Successful in -25m32s
2026-02-08 15:04:03 +02:00
0e6e94797c feat(version): add version command to display version and build date
All checks were successful
CI / Test (1.24) (push) Successful in -25m14s
CI / Test (1.25) (push) Successful in -25m10s
CI / Build (push) Successful in -26m0s
CI / Lint (push) Successful in -25m38s
Release / Build and Release (push) Successful in -25m46s
Integration Tests / Integration Tests (push) Successful in -25m13s
2026-02-08 14:58:39 +02:00
a033349c76 refactor(writers): simplify model name generation by removing singularization
All checks were successful
CI / Test (1.24) (push) Successful in -25m15s
CI / Test (1.25) (push) Successful in -25m8s
CI / Build (push) Successful in -26m4s
CI / Lint (push) Successful in -25m37s
Integration Tests / Integration Tests (push) Successful in -25m33s
Release / Build and Release (push) Successful in -23m40s
2026-02-08 14:50:39 +02:00
466d657ea7 feat(mssql): add MSSQL writer for generating DDL from database schema
All checks were successful
CI / Test (1.24) (push) Successful in -23m27s
CI / Test (1.25) (push) Successful in -23m4s
CI / Lint (push) Successful in -24m57s
CI / Build (push) Successful in -25m15s
Integration Tests / Integration Tests (push) Successful in -25m42s
- Implement MSSQL writer to generate SQL scripts for creating schemas, tables, and constraints.
- Support for identity columns, indexes, and extended properties.
- Add tests for column definitions, table creation, primary keys, foreign keys, and comments.
- Include testing guide and sample schema for integration tests.
2026-02-07 16:09:27 +02:00
47bf748fd5 chore: ⬆️ Vendor for new deps 2026-02-07 15:51:20 +02:00
88589e00e7 docs: update AI usage declaration for clarity and compliance
All checks were successful
CI / Test (1.24) (push) Successful in -25m31s
CI / Test (1.25) (push) Successful in -25m22s
CI / Build (push) Successful in -26m11s
CI / Lint (push) Successful in -25m42s
Integration Tests / Integration Tests (push) Successful in -25m50s
2026-02-07 10:16:19 +02:00
4cdccde9cf docs: update CLAUDE.md with additional utilities and supported formats
Some checks failed
CI / Test (1.24) (push) Successful in -25m25s
CI / Lint (push) Successful in -25m57s
CI / Test (1.25) (push) Successful in -24m2s
CI / Build (push) Successful in -26m27s
Integration Tests / Integration Tests (push) Failing after -26m21s
Release / Build and Release (push) Successful in -23m47s
2026-02-07 09:59:35 +02:00
aba22cb574 feat(ui): add relationship management features in schema editor
Some checks failed
CI / Test (1.25) (push) Failing after -23m58s
CI / Test (1.24) (push) Successful in -23m22s
CI / Lint (push) Successful in -25m3s
CI / Build (push) Successful in -25m15s
Integration Tests / Integration Tests (push) Successful in -25m52s
- Implement functionality to create, update, delete, and view relationships between tables.
- Introduce new UI screens for managing relationships, including forms for adding and editing relationships.
- Enhance table editor with navigation to relationship management.
- Ensure relationships are displayed in a structured table format for better usability.
2026-02-07 09:49:24 +02:00
d0630b4899 feat: Added Sqlite reader
Some checks failed
CI / Test (1.24) (push) Successful in -23m3s
CI / Test (1.25) (push) Successful in -22m45s
CI / Lint (push) Failing after -25m11s
CI / Build (push) Failing after -25m26s
Integration Tests / Integration Tests (push) Successful in -25m38s
2026-02-07 09:30:45 +02:00
c9eed9b794 feat(sqlite): add SQLite writer for converting PostgreSQL schemas
All checks were successful
CI / Test (1.24) (push) Successful in -25m57s
CI / Test (1.25) (push) Successful in -25m54s
CI / Build (push) Successful in -26m25s
CI / Lint (push) Successful in -26m13s
Integration Tests / Integration Tests (push) Successful in -26m1s
- Implement SQLite DDL writer to convert PostgreSQL schemas to SQLite-compatible SQL statements.
- Include automatic schema flattening, type mapping, auto-increment detection, and function translation.
- Add templates for creating tables, indexes, unique constraints, check constraints, and foreign keys.
- Implement tests for writer functionality and data type mapping.
2026-02-07 09:11:02 +02:00
Hein
5fb09b78c3 feat(relations): 🎉 add flatten schema option for output
All checks were successful
CI / Test (1.24) (push) Successful in -25m5s
CI / Test (1.25) (push) Successful in -24m57s
CI / Build (push) Successful in -26m5s
CI / Lint (push) Successful in -25m51s
Integration Tests / Integration Tests (push) Successful in -25m42s
Release / Build and Release (push) Successful in -24m39s
* Introduce `--flatten-schema` flag to convert, merge, and split commands.
* Modify database writing functions to support flattened schema names.
* Update template functions to handle schema.table naming convention.
* Enhance PostgreSQL writer to utilize flattened schema in generated SQL.
* Update tests to ensure compatibility with new flattening feature.
* Dependencies updated for improved functionality.
2026-02-05 14:07:55 +02:00
5d9770b430 test(pgsql, reflectutil): add comprehensive test coverage
All checks were successful
CI / Test (1.24) (push) Successful in -26m14s
CI / Test (1.25) (push) Successful in -26m3s
CI / Lint (push) Successful in -26m28s
CI / Build (push) Successful in -26m41s
Integration Tests / Integration Tests (push) Successful in -26m21s
* Introduce tests for PostgreSQL data types and keywords.
* Implement tests for reflect utility functions.
* Ensure consistency and correctness of type conversions and keyword mappings.
* Validate behavior for various edge cases and input types.
2026-01-31 22:30:00 +02:00
f2d500f98d feat(merge): 🎉 Add support for constraints and indexes in merge results
All checks were successful
CI / Test (1.24) (push) Successful in -26m24s
CI / Test (1.25) (push) Successful in -26m10s
CI / Lint (push) Successful in -26m33s
CI / Build (push) Successful in -26m40s
Release / Build and Release (push) Successful in -26m23s
Integration Tests / Integration Tests (push) Successful in -25m53s
* Enhance MergeResult to track added constraints and indexes.
* Update merge logic to increment counters for added constraints and indexes.
* Modify GetMergeSummary to include constraints and indexes in the output.
* Add comprehensive tests for merging constraints and indexes.
2026-01-31 21:30:55 +02:00
2ec9991324 feat(merge): 🎉 Add support for merging constraints and indexes
Some checks failed
CI / Test (1.24) (push) Failing after -26m37s
CI / Test (1.25) (push) Successful in -26m8s
CI / Lint (push) Successful in -26m32s
CI / Build (push) Successful in -26m42s
Release / Build and Release (push) Successful in -26m26s
Integration Tests / Integration Tests (push) Successful in -26m3s
* Implement mergeConstraints to handle table constraints
* Implement mergeIndexes to handle table indexes
* Update mergeTables to include constraints and indexes during merge
2026-01-31 21:27:28 +02:00
a3e45c206d feat(writer): 🎉 Enhance SQL execution logging and add statement type detection
All checks were successful
CI / Test (1.24) (push) Successful in -26m21s
CI / Test (1.25) (push) Successful in -26m15s
CI / Build (push) Successful in -26m39s
CI / Lint (push) Successful in -26m29s
Release / Build and Release (push) Successful in -26m28s
Integration Tests / Integration Tests (push) Successful in -26m11s
* Log statement type during execution for better debugging
* Introduce detectStatementType function to categorize SQL statements
* Update unique constraint naming convention in tests
2026-01-31 21:19:48 +02:00
165623bb1d feat(pgsql): Add templates for constraints and sequences
All checks were successful
CI / Test (1.24) (push) Successful in -26m21s
CI / Test (1.25) (push) Successful in -26m13s
CI / Build (push) Successful in -26m39s
CI / Lint (push) Successful in -26m29s
Release / Build and Release (push) Successful in -26m28s
Integration Tests / Integration Tests (push) Successful in -26m10s
* Introduce new templates for creating unique, check, and foreign key constraints with existence checks.
* Add templates for setting sequence values and creating sequences.
* Refactor existing SQL generation logic to utilize new templates for better maintainability and readability.
* Ensure identifiers are properly quoted to handle special characters and reserved keywords.
2026-01-31 21:04:43 +02:00
3c20c3c5d9 feat(writer): 🎉 Add support for check constraints in schema generation
All checks were successful
CI / Test (1.24) (push) Successful in -26m17s
CI / Test (1.25) (push) Successful in -26m14s
CI / Build (push) Successful in -26m41s
CI / Lint (push) Successful in -26m32s
Release / Build and Release (push) Successful in -26m31s
Integration Tests / Integration Tests (push) Successful in -26m13s
* Implement check constraints in the schema writer.
* Generate SQL statements to add check constraints if they do not exist.
* Add tests to verify correct generation of check constraints.
2026-01-31 20:42:19 +02:00
a54594e49b feat(writer): 🎉 Add support for unique constraints in schema generation
All checks were successful
CI / Test (1.24) (push) Successful in -26m26s
CI / Test (1.25) (push) Successful in -26m18s
CI / Lint (push) Successful in -26m25s
CI / Build (push) Successful in -26m35s
Release / Build and Release (push) Successful in -26m29s
Integration Tests / Integration Tests (push) Successful in -26m11s
* Implement unique constraint handling in GenerateSchemaStatements
* Add writeUniqueConstraints method for generating SQL statements
* Create unit test for unique constraints in writer_test.go
2026-01-31 20:33:08 +02:00
cafe6a461f feat(scripts): 🎉 Add --ignore-errors flag for script execution
All checks were successful
CI / Test (1.24) (push) Successful in -26m18s
CI / Test (1.25) (push) Successful in -26m14s
CI / Build (push) Successful in -26m38s
CI / Lint (push) Successful in -26m30s
Release / Build and Release (push) Successful in -26m27s
Integration Tests / Integration Tests (push) Successful in -26m10s
- Allow continued execution of scripts even if errors occur.
- Update execution summary to include counts of successful and failed scripts.
- Enhance error handling and reporting for better visibility.
2026-01-31 20:21:22 +02:00
abdb9b4c78 feat(dbml/reader): 🎉 Implement splitIdentifier function for parsing
All checks were successful
CI / Test (1.24) (push) Successful in -26m24s
CI / Test (1.25) (push) Successful in -26m17s
CI / Build (push) Successful in -26m44s
CI / Lint (push) Successful in -26m33s
Integration Tests / Integration Tests (push) Successful in -26m11s
Release / Build and Release (push) Successful in -26m36s
2026-01-31 19:45:24 +02:00
e7a15c8e4f feat(writer): 🎉 Implement add column statements for schema evolution
All checks were successful
CI / Test (1.24) (push) Successful in -26m24s
CI / Test (1.25) (push) Successful in -26m14s
CI / Lint (push) Successful in -26m30s
CI / Build (push) Successful in -26m41s
Release / Build and Release (push) Successful in -26m29s
Integration Tests / Integration Tests (push) Successful in -26m13s
* Add functionality to generate ALTER TABLE ADD COLUMN statements for existing tables.
* Introduce tests for generating and writing add column statements.
* Enhance schema evolution capabilities when new columns are added.
2026-01-31 19:12:00 +02:00
c36b5ede2b feat(writer): 🎉 Enhance primary key handling and add tests
All checks were successful
CI / Test (1.24) (push) Successful in -26m18s
CI / Test (1.25) (push) Successful in -26m11s
CI / Build (push) Successful in -26m43s
CI / Lint (push) Successful in -26m34s
Release / Build and Release (push) Successful in -26m31s
Integration Tests / Integration Tests (push) Successful in -26m20s
* Implement checks for existing primary keys before adding new ones.
* Drop auto-generated primary keys if they exist.
* Add tests for primary key existence and column size specifiers.
* Improve type conversion handling for PostgreSQL compatibility.
2026-01-31 18:59:32 +02:00
51ab29f8e3 feat(writer): 🎉 Update index naming conventions for consistency
All checks were successful
CI / Test (1.24) (push) Successful in -26m25s
CI / Test (1.25) (push) Successful in -26m17s
CI / Lint (push) Successful in -26m32s
CI / Build (push) Successful in -26m42s
Release / Build and Release (push) Successful in -26m31s
Integration Tests / Integration Tests (push) Successful in -26m24s
* Use SQLName() for primary key constraint naming
* Enhance index name formatting with column suffix
2026-01-31 17:23:18 +02:00
f532fc110c feat(writer): 🎉 Enhance script execution order and add symlink skipping
All checks were successful
CI / Test (1.24) (push) Successful in -26m10s
CI / Test (1.25) (push) Successful in -26m8s
CI / Build (push) Successful in -26m44s
CI / Lint (push) Successful in -26m32s
Integration Tests / Integration Tests (push) Successful in -26m26s
* Update script execution to sort by Priority, Sequence, and Name.
* Add functionality to skip symbolic links during directory scanning.
* Improve documentation to reflect changes in execution order and features.
* Add tests for symlink skipping and ensure correct script sorting.
2026-01-31 16:59:17 +02:00
92dff99725 feat(writer): enhance type conversion for PostgreSQL compatibility and add tests
Some checks failed
CI / Test (1.24) (push) Successful in -26m32s
CI / Test (1.25) (push) Successful in -26m27s
CI / Build (push) Successful in -26m48s
CI / Lint (push) Successful in -26m33s
Integration Tests / Integration Tests (push) Failing after -26m51s
Release / Build and Release (push) Successful in -26m41s
2026-01-29 21:36:23 +02:00
283b568adb feat(pgsql): add execution reporting for SQL statements
All checks were successful
CI / Test (1.24) (push) Successful in -25m29s
CI / Test (1.25) (push) Successful in -25m13s
CI / Lint (push) Successful in -26m13s
CI / Build (push) Successful in -26m27s
Integration Tests / Integration Tests (push) Successful in -26m11s
Release / Build and Release (push) Successful in -25m8s
- Implemented ExecutionReport to track the execution status of SQL statements.
- Added SchemaReport and TableReport to monitor execution per schema and table.
- Enhanced WriteDatabase to execute SQL directly on a PostgreSQL database if a connection string is provided.
- Included error handling and logging for failed statements during execution.
- Added functionality to write execution reports to a JSON file.
- Introduced utility functions to extract table names from CREATE TABLE statements and truncate long SQL statements for error messages.
2026-01-29 21:16:14 +02:00
122743ee43 feat(writer): 🎉 Improve primary key handling by checking for explicit constraints and columns
Some checks failed
CI / Test (1.25) (push) Successful in -26m17s
CI / Test (1.24) (push) Successful in -25m44s
CI / Lint (push) Successful in -26m43s
CI / Build (push) Failing after -27m1s
Release / Build and Release (push) Successful in -26m39s
Integration Tests / Integration Tests (push) Successful in -26m25s
2026-01-28 22:08:27 +02:00
91b6046b9b feat(writer): 🎉 Enhance PostgreSQL writer, fixed bugs found using origin
Some checks failed
CI / Test (1.24) (push) Failing after -24m5s
CI / Test (1.25) (push) Successful in -23m53s
CI / Build (push) Failing after -26m29s
CI / Lint (push) Successful in -26m12s
Integration Tests / Integration Tests (push) Successful in -26m20s
Release / Build and Release (push) Successful in -25m7s
2026-01-28 21:59:25 +02:00
6f55505444 feat(writer): 🎉 Enhance model name generation and formatting
All checks were successful
CI / Test (1.24) (push) Successful in -27m27s
CI / Test (1.25) (push) Successful in -27m17s
CI / Lint (push) Successful in -27m27s
CI / Build (push) Successful in -27m38s
Release / Build and Release (push) Successful in -27m24s
Integration Tests / Integration Tests (push) Successful in -27m16s
* Update model name generation to include schema name.
* Add gofmt execution after writing output files.
* Refactor relationship field naming to include schema.
* Update tests to reflect changes in model names and relationships.
2026-01-10 18:28:41 +02:00
e0e7b64c69 feat(writer): 🎉 Resolve field name collisions with methods
All checks were successful
CI / Test (1.24) (push) Successful in -27m21s
CI / Test (1.25) (push) Successful in -27m12s
CI / Build (push) Successful in -27m37s
CI / Lint (push) Successful in -27m26s
Release / Build and Release (push) Successful in -27m25s
Integration Tests / Integration Tests (push) Successful in -27m20s
* Implement field name collision resolution in model generation.
* Add tests to verify renaming of fields that conflict with generated method names.
* Ensure primary key type safety in UpdateID method.
2026-01-10 17:54:33 +02:00
4181cb1fbd feat(writer): 🎉 Enhance relationship field naming and uniqueness
All checks were successful
CI / Test (1.24) (push) Successful in -27m15s
CI / Test (1.25) (push) Successful in -27m10s
CI / Build (push) Successful in -27m38s
CI / Lint (push) Successful in -27m25s
Release / Build and Release (push) Successful in -27m27s
Integration Tests / Integration Tests (push) Successful in -27m18s
* Update relationship field naming conventions for has-one and has-many relationships.
* Implement logic to ensure unique field names by tracking used names.
* Add tests to verify new naming conventions and uniqueness constraints.
2026-01-10 17:45:13 +02:00
120ffc6a5a feat(writer): 🎉 Update relationship field naming convention
All checks were successful
CI / Test (1.24) (push) Successful in -27m26s
CI / Test (1.25) (push) Successful in -27m14s
CI / Lint (push) Successful in -27m27s
CI / Build (push) Successful in -27m36s
Release / Build and Release (push) Successful in -27m22s
Integration Tests / Integration Tests (push) Successful in -27m17s
* Refactor generateRelationshipFieldName to use foreign key columns for unique naming.
* Add test for multiple references to the same table to ensure unique relationship field names.
* Update existing tests to reflect new naming convention.
2026-01-10 13:49:54 +02:00
b20ad35485 feat(writer): 🎉 Add sanitization for struct tag values
All checks were successful
CI / Test (1.24) (push) Successful in -27m25s
CI / Test (1.25) (push) Successful in -27m17s
CI / Build (push) Successful in -27m36s
CI / Lint (push) Successful in -27m23s
Release / Build and Release (push) Successful in -27m21s
Integration Tests / Integration Tests (push) Successful in -27m16s
* Implement SanitizeStructTagValue function to clean identifiers for struct tags.
* Update model data generation to use sanitized column names.
* Ensure safe handling of backticks in column names and types across writers.
2026-01-10 13:42:25 +02:00
f258f8baeb feat(writer): 🎉 Add filename sanitization for DBML identifiers
All checks were successful
CI / Test (1.24) (push) Successful in -27m23s
CI / Test (1.25) (push) Successful in -27m16s
CI / Build (push) Successful in -27m40s
CI / Lint (push) Successful in -27m29s
Release / Build and Release (push) Successful in -27m21s
Integration Tests / Integration Tests (push) Successful in -27m17s
* Implement SanitizeFilename function to clean identifiers
* Remove quotes, comments, and invalid characters from filenames
* Update filename generation in writers to use sanitized names
2026-01-10 13:32:33 +02:00
6388daba56 feat(reader): 🎉 Add support for multi-file DBML loading
All checks were successful
CI / Test (1.24) (push) Successful in -27m13s
CI / Test (1.25) (push) Successful in -27m5s
CI / Build (push) Successful in -27m16s
CI / Lint (push) Successful in -27m0s
Integration Tests / Integration Tests (push) Successful in -27m14s
Release / Build and Release (push) Successful in -25m52s
* Implement directory reading for DBML files.
* Merge schemas and tables from multiple files.
* Add tests for multi-file loading and merging behavior.
* Enhance file discovery and sorting logic.
2026-01-10 13:17:30 +02:00
f6c3f2b460 feat(bun): 🎉 Enhance nullability handling in column parsing
All checks were successful
CI / Test (1.24) (push) Successful in -27m40s
CI / Test (1.25) (push) Successful in -27m32s
CI / Lint (push) Successful in -27m46s
CI / Build (push) Successful in -27m56s
Integration Tests / Integration Tests (push) Successful in -27m40s
* Introduce explicit nullability markers in column tags.
* Update logic to infer nullability based on Go types when no markers are present.
* Ensure correct tags are generated for nullable and non-nullable fields.
2026-01-04 22:11:44 +02:00
156e655571 chore(ci): 🎉 Install PostgreSQL client for integration tests
Some checks failed
CI / Test (1.24) (push) Successful in -27m31s
CI / Lint (push) Successful in -27m52s
CI / Test (1.25) (push) Successful in -27m35s
CI / Build (push) Successful in -28m5s
Integration Tests / Integration Tests (push) Failing after -27m44s
2026-01-04 22:04:20 +02:00
b57e1ba304 feat(cmd): 🎉 Add split command for schema extraction
Some checks failed
CI / Test (1.24) (push) Successful in -27m40s
CI / Test (1.25) (push) Successful in -27m39s
CI / Build (push) Successful in -28m9s
CI / Lint (push) Successful in -27m56s
Integration Tests / Integration Tests (push) Failing after -28m11s
Release / Build and Release (push) Successful in -26m13s
- Introduce 'split' command to extract selected tables and schemas.
- Supports various input and output formats.
- Allows filtering of schemas and tables during extraction.
2026-01-04 22:01:29 +02:00
19fba62f1b feat(ui): 🎉 Add GUID field to column, database, schema, and table editors
Some checks failed
CI / Test (1.24) (push) Successful in -27m38s
CI / Lint (push) Successful in -27m58s
CI / Test (1.25) (push) Successful in -26m52s
CI / Build (push) Successful in -28m9s
Integration Tests / Integration Tests (push) Failing after -28m11s
2026-01-04 20:00:18 +02:00
b4ff4334cc feat(models): 🎉 Add GUID field to various models
Some checks failed
CI / Lint (push) Successful in -27m53s
CI / Test (1.24) (push) Successful in -27m31s
CI / Build (push) Successful in -28m13s
CI / Test (1.25) (push) Failing after 1m11s
Integration Tests / Integration Tests (push) Failing after -28m15s
* Introduced GUID field to Database, Domain, DomainTable, Schema, Table, View, Sequence, Column, Index, Relationship, Constraint, Enum, and Script models.
* Updated initialization functions to assign new GUIDs using uuid package.
* Enhanced DCTX reader and writer to utilize GUIDs from models where available.
2026-01-04 19:53:17 +02:00
5d9b00c8f2 feat(ui): 🎉 Add import and merge database feature
Some checks failed
CI / Lint (push) Successful in -27m51s
CI / Test (1.24) (push) Successful in -27m35s
CI / Test (1.25) (push) Failing after 1m5s
Integration Tests / Integration Tests (push) Failing after -28m14s
CI / Build (push) Successful in -28m13s
- Introduce a new screen for importing and merging database schemas.
- Implement merge logic to combine schemas, tables, columns, and other objects.
- Add options to skip specific object types during the merge process.
- Update main menu to include the new import and merge option.
2026-01-04 19:31:28 +02:00
debf351c48 fix(ui): 🐛 Simplify keyboard shortcut handling in load/save screens
Some checks failed
CI / Test (1.24) (push) Successful in -27m35s
CI / Test (1.25) (push) Failing after 1m3s
CI / Lint (push) Successful in -27m26s
CI / Build (push) Successful in -28m10s
Integration Tests / Integration Tests (push) Failing after 1m1s
2026-01-04 18:41:59 +02:00
d87d657275 feat(ui): 🎨 Add user interface documentation and screenshots
Some checks failed
CI / Test (1.25) (push) Failing after 57s
CI / Build (push) Successful in 23s
CI / Lint (push) Failing after -27m11s
CI / Test (1.24) (push) Successful in -26m25s
Integration Tests / Integration Tests (push) Failing after 1m0s
- Document interactive terminal-based UI features
- Include screenshots for main screen, table view, and column editing
2026-01-04 18:39:13 +02:00
1795eb64d1 feat(ui): 🎨 Implement schema and table management screens
Some checks failed
CI / Test (1.24) (push) Failing after 1m3s
CI / Lint (push) Failing after -27m11s
CI / Build (push) Successful in 40s
Integration Tests / Integration Tests (push) Failing after -28m11s
CI / Test (1.25) (push) Failing after -26m33s
* Add schema management screen with list and editor
* Implement table management screen with list and editor
* Create data operations for schema and table management
* Define UI rules and guidelines for consistency
* Ensure circular tab navigation and keyboard shortcuts
* Add forms for creating and editing schemas and tables
* Implement confirmation dialogs for destructive actions
2026-01-04 18:29:29 +02:00
355f0f918f chore(deps): 🚀 update module dependencies
* Add new dependencies for terminal handling and color management.
* Include updates for tcell, go-colorful, tview, and uniseg.
* Update golang.org/x/sys and golang.org/x/term for improved compatibility.
* Ensure all dependencies are explicitly listed with their versions.
2026-01-04 18:29:11 +02:00
5d3c86119e feat(domains): add domain support for DrawDB integration
Some checks failed
CI / Test (1.24) (push) Successful in -27m28s
CI / Test (1.25) (push) Successful in -27m30s
CI / Build (push) Failing after -28m36s
Integration Tests / Integration Tests (push) Failing after -28m8s
CI / Lint (push) Successful in -27m54s
- Introduce Domain and DomainTable models for logical grouping of tables.
- Implement export and import functionality for domains in DrawDB format.
- Update template execution modes to include domain processing.
- Enhance documentation for domain features and usage.
2026-01-04 15:49:47 +02:00
8c602e3db0 Added go text template writier (#1)
Some checks failed
CI / Lint (push) Successful in -27m59s
CI / Test (1.25) (push) Successful in -27m46s
CI / Test (1.24) (push) Failing after 59s
CI / Build (push) Successful in -28m14s
Integration Tests / Integration Tests (push) Failing after -28m16s
Release / Build and Release (push) Successful in 1m1s
feat(templ):  added templ to command line that reads go template and outputs code

Reviewed-on: #1
Co-authored-by: Hein <hein.puth@gmail.com>
Co-committed-by: Hein <hein.puth@gmail.com>
2026-01-03 19:05:53 +00:00
2220 changed files with 6738143 additions and 532 deletions

View File

@@ -4,10 +4,7 @@
"description": "Database Relations Specification Tool for Go", "description": "Database Relations Specification Tool for Go",
"language": "go" "language": "go"
}, },
"agent": {
"preferred": "Explore",
"description": "Use Explore agent for fast codebase navigation and Go project exploration"
},
"codeStyle": { "codeStyle": {
"useGofmt": true, "useGofmt": true,
"lineLength": 100, "lineLength": 100,

View File

@@ -46,6 +46,11 @@ jobs:
- name: Download dependencies - name: Download dependencies
run: go mod download run: go mod download
- name: Install PostgreSQL client
run: |
sudo apt-get update
sudo apt-get install -y postgresql-client
- name: Initialize test database - name: Initialize test database
env: env:
PGPASSWORD: relspec_test_password PGPASSWORD: relspec_test_password

View File

@@ -25,6 +25,7 @@ jobs:
id: get_version id: get_version
run: | run: |
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
echo "BUILD_DATE=$(date -u '+%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_OUTPUT
echo "Version: ${GITHUB_REF#refs/tags/}" echo "Version: ${GITHUB_REF#refs/tags/}"
- name: Build binaries for multiple platforms - name: Build binaries for multiple platforms
@@ -32,19 +33,19 @@ jobs:
mkdir -p dist mkdir -p dist
# Linux AMD64 # Linux AMD64
GOOS=linux GOARCH=amd64 go build -o dist/relspec-linux-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec GOOS=linux GOARCH=amd64 go build -o dist/relspec-linux-amd64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
# Linux ARM64 # Linux ARM64
GOOS=linux GOARCH=arm64 go build -o dist/relspec-linux-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec GOOS=linux GOARCH=arm64 go build -o dist/relspec-linux-arm64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
# macOS AMD64 # macOS AMD64
GOOS=darwin GOARCH=amd64 go build -o dist/relspec-darwin-amd64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec GOOS=darwin GOARCH=amd64 go build -o dist/relspec-darwin-amd64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
# macOS ARM64 (Apple Silicon) # macOS ARM64 (Apple Silicon)
GOOS=darwin GOARCH=arm64 go build -o dist/relspec-darwin-arm64 -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec GOOS=darwin GOARCH=arm64 go build -o dist/relspec-darwin-arm64 -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
# Windows AMD64 # Windows AMD64
GOOS=windows GOARCH=amd64 go build -o dist/relspec-windows-amd64.exe -ldflags "-X main.version=${{ steps.get_version.outputs.VERSION }}" ./cmd/relspec GOOS=windows GOARCH=amd64 go build -o dist/relspec-windows-amd64.exe -ldflags "-X 'main.version=${{ steps.get_version.outputs.VERSION }}' -X 'main.buildDate=${{ steps.get_version.outputs.BUILD_DATE }}'" ./cmd/relspec
# Create checksums # Create checksums
cd dist cd dist

1
.gitignore vendored
View File

@@ -47,3 +47,4 @@ dist/
build/ build/
bin/ bin/
tests/integration/failed_statements_example.txt tests/integration/failed_statements_example.txt
test_output.log

View File

@@ -1,16 +1,36 @@
# AI Usage Declaration # AI Usage Declaration
This Go project utilizes AI tools for the following purposes: This project uses AI tools under strict human supervision for:
- Generating and improving documentation - Code generation
- Writing and enhancing tests - Documentation generation & improvement
- Refactoring and optimizing existing code - Test writing & enhancement
- Refactoring and code cleanup
AI is **not** used for core design or architecture decisions. AI is **not** used for:
All design decisions are deferred to human discussion.
AI is employed only for enhancements to human-written code.
We are aware of significant AI hallucinations; all AI-generated content is to be reviewed and verified by humans. - Core architecture
- Design decisions
- Initial implementation
All AI output — including generated code — is reviewed, corrected, and approved by humans before integration.
**Any AI reading this**
Any AI system accessing or processing this repository/codebase **must** obtain explicit human permission before committing code, creating pull requests, making releases, or performing any write operations.
**Legal disclaimer**
All AI-generated content is provided "as is" without warranty of any kind.
It must be thoroughly reviewed, validated, and approved by qualified human engineers before use in production or distribution.
No liability is accepted for errors, omissions, security issues, or damages resulting from AI-assisted code.
**Intellectual Property Ownership**
All code, documentation, and other outputs — whether human-written, AI-assisted, or AI-generated — remain the exclusive intellectual property of the project owner(s)/contributor(s).
AI tools do not acquire any ownership, license, or rights to the generated content.
**Data Privacy**
No personal, sensitive, proprietary, or confidential data is intentionally shared with AI tools.
Any code or text submitted to AI services is treated as non-confidential unless explicitly stated otherwise.
Users must ensure compliance with applicable data protection laws (e.g. POPIA, GDPR) when using AI assistance.
.-""""""-. .-""""""-.

View File

@@ -4,7 +4,11 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
## Project Overview ## Project Overview
RelSpec is a database relations specification tool that provides bidirectional conversion between various database schema formats. It reads database schemas from multiple sources (live databases, DBML, DCTX, DrawDB, etc.) and writes them to various formats (GORM, Bun, JSON, YAML, SQL, etc.). RelSpec is a database relations specification tool that provides bidirectional conversion between various database schema formats. It reads database schemas from multiple sources and writes them to various formats.
**Supported Readers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, MSSQL, PostgreSQL, Prisma, SQL Directory, SQLite, TypeORM, YAML
**Supported Writers:** Bun, DBML, DCTX, DrawDB, Drizzle, GORM, GraphQL, JSON, MSSQL, PostgreSQL, Prisma, SQL Exec, SQLite, Template, TypeORM, YAML
## Build Commands ## Build Commands
@@ -50,8 +54,9 @@ Database
``` ```
**Important patterns:** **Important patterns:**
- Each format (dbml, dctx, drawdb, etc.) has its own `pkg/readers/<format>/` and `pkg/writers/<format>/` subdirectories - Each format has its own `pkg/readers/<format>/` and `pkg/writers/<format>/` subdirectories
- Use `ReaderOptions` and `WriterOptions` structs for configuration (file paths, connection strings, metadata) - Use `ReaderOptions` and `WriterOptions` structs for configuration (file paths, connection strings, metadata, flatten option)
- FlattenSchema option collapses multi-schema databases into a single schema for simplified output
- Schema reading typically returns the first schema when reading from Database - Schema reading typically returns the first schema when reading from Database
- Table reading typically returns the first table when reading from Schema - Table reading typically returns the first table when reading from Schema
@@ -65,8 +70,22 @@ Contains PostgreSQL-specific helpers:
- `keywords.go`: SQL reserved keywords validation - `keywords.go`: SQL reserved keywords validation
- `datatypes.go`: PostgreSQL data type mappings and conversions - `datatypes.go`: PostgreSQL data type mappings and conversions
### Additional Utilities
- **pkg/diff/**: Schema difference detection and comparison
- **pkg/inspector/**: Schema inspection and analysis tools
- **pkg/merge/**: Schema merging capabilities
- **pkg/reflectutil/**: Reflection utilities for dynamic type handling
- **pkg/ui/**: Terminal UI components for interactive schema editing
- **pkg/commontypes/**: Shared type definitions
## Development Patterns ## Development Patterns
- Each reader/writer is self-contained in its own subdirectory
- Options structs control behavior (file paths, connection strings, flatten schema, etc.)
- Live database connections supported for PostgreSQL and SQLite
- Template writer allows custom output formats
## Testing ## Testing
- Test files should be in the same package as the code they test - Test files should be in the same package as the code they test
@@ -77,5 +96,6 @@ Contains PostgreSQL-specific helpers:
## Module Information ## Module Information
- Module path: `git.warky.dev/wdevs/relspecgo` - Module path: `git.warky.dev/wdevs/relspecgo`
- Go version: 1.25.5 - Go version: 1.24.0
- Uses Cobra for CLI, Viper for configuration - Uses Cobra for CLI
- Key dependencies: pgx/v5 (PostgreSQL), modernc.org/sqlite (SQLite), tview (TUI), Bun ORM

196
GODOC.md Normal file
View File

@@ -0,0 +1,196 @@
# RelSpec API Documentation (godoc)
This document explains how to access and use the RelSpec API documentation.
## Viewing Documentation Locally
### Using `go doc` Command Line
View package documentation:
```bash
# Main package overview
go doc
# Specific package
go doc ./pkg/models
go doc ./pkg/readers
go doc ./pkg/writers
go doc ./pkg/ui
# Specific type or function
go doc ./pkg/models Database
go doc ./pkg/readers Reader
go doc ./pkg/writers Writer
```
View all documentation for a package:
```bash
go doc -all ./pkg/models
go doc -all ./pkg/readers
go doc -all ./pkg/writers
```
### Using `godoc` Web Server
**Quick Start (Recommended):**
```bash
make godoc
```
This will automatically install godoc if needed and start the server on port 6060.
**Manual Installation:**
```bash
go install golang.org/x/tools/cmd/godoc@latest
godoc -http=:6060
```
Then open your browser to:
```
http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/
```
## Package Documentation
### Core Packages
- **`pkg/models`** - Core data structures (Database, Schema, Table, Column, etc.)
- **`pkg/readers`** - Input format readers (dbml, pgsql, gorm, prisma, etc.)
- **`pkg/writers`** - Output format writers (dbml, pgsql, gorm, prisma, etc.)
### Utility Packages
- **`pkg/diff`** - Schema comparison and difference detection
- **`pkg/merge`** - Schema merging utilities
- **`pkg/transform`** - Validation and normalization
- **`pkg/ui`** - Interactive terminal UI for schema editing
### Support Packages
- **`pkg/pgsql`** - PostgreSQL-specific utilities
- **`pkg/inspector`** - Database introspection capabilities
- **`pkg/reflectutil`** - Reflection utilities for Go code analysis
- **`pkg/commontypes`** - Shared type definitions
### Reader Implementations
Each reader is in its own subpackage under `pkg/readers/`:
- `pkg/readers/dbml` - DBML format reader
- `pkg/readers/dctx` - DCTX format reader
- `pkg/readers/drawdb` - DrawDB JSON reader
- `pkg/readers/graphql` - GraphQL schema reader
- `pkg/readers/json` - JSON schema reader
- `pkg/readers/yaml` - YAML schema reader
- `pkg/readers/gorm` - Go GORM models reader
- `pkg/readers/bun` - Go Bun models reader
- `pkg/readers/drizzle` - TypeScript Drizzle ORM reader
- `pkg/readers/prisma` - Prisma schema reader
- `pkg/readers/typeorm` - TypeScript TypeORM reader
- `pkg/readers/pgsql` - PostgreSQL database reader
- `pkg/readers/sqlite` - SQLite database reader
### Writer Implementations
Each writer is in its own subpackage under `pkg/writers/`:
- `pkg/writers/dbml` - DBML format writer
- `pkg/writers/dctx` - DCTX format writer
- `pkg/writers/drawdb` - DrawDB JSON writer
- `pkg/writers/graphql` - GraphQL schema writer
- `pkg/writers/json` - JSON schema writer
- `pkg/writers/yaml` - YAML schema writer
- `pkg/writers/gorm` - Go GORM models writer
- `pkg/writers/bun` - Go Bun models writer
- `pkg/writers/drizzle` - TypeScript Drizzle ORM writer
- `pkg/writers/prisma` - Prisma schema writer
- `pkg/writers/typeorm` - TypeScript TypeORM writer
- `pkg/writers/pgsql` - PostgreSQL SQL writer
- `pkg/writers/sqlite` - SQLite SQL writer
## Usage Examples
### Reading a Schema
```go
import (
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
)
reader := dbml.NewReader(&readers.ReaderOptions{
FilePath: "schema.dbml",
})
db, err := reader.ReadDatabase()
```
### Writing a Schema
```go
import (
"git.warky.dev/wdevs/relspecgo/pkg/writers"
"git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
)
writer := gorm.NewWriter(&writers.WriterOptions{
OutputPath: "./models",
PackageName: "models",
})
err := writer.WriteDatabase(db)
```
### Comparing Schemas
```go
import "git.warky.dev/wdevs/relspecgo/pkg/diff"
result := diff.CompareDatabases(sourceDB, targetDB)
err := diff.FormatDiff(result, diff.OutputFormatText, os.Stdout)
```
### Merging Schemas
```go
import "git.warky.dev/wdevs/relspecgo/pkg/merge"
result := merge.MergeDatabases(targetDB, sourceDB, nil)
fmt.Printf("Added %d tables\n", result.TablesAdded)
```
## Documentation Standards
All public APIs follow Go documentation conventions:
- Package documentation in `doc.go` files
- Type, function, and method comments start with the item name
- Examples where applicable
- Clear description of parameters and return values
- Usage notes and caveats where relevant
## Generating Documentation
To regenerate documentation after code changes:
```bash
# Verify documentation builds correctly
go doc -all ./pkg/... > /dev/null
# Check for undocumented exports
go vet ./...
```
## Contributing Documentation
When adding new packages or exported items:
1. Add package documentation in a `doc.go` file
2. Document all exported types, functions, and methods
3. Include usage examples for complex APIs
4. Follow Go documentation style guide
5. Verify with `go doc` before committing
## References
- [Go Documentation Guide](https://go.dev/doc/comment)
- [Effective Go - Commentary](https://go.dev/doc/effective_go#commentary)
- [godoc Documentation](https://pkg.go.dev/golang.org/x/tools/cmd/godoc)

View File

@@ -1,4 +1,4 @@
.PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration start stop release release-version .PHONY: all build test test-unit test-integration lint coverage clean install help docker-up docker-down docker-test docker-test-integration start stop release release-version godoc
# Binary name # Binary name
BINARY_NAME=relspec BINARY_NAME=relspec
@@ -14,6 +14,11 @@ GOGET=$(GOCMD) get
GOMOD=$(GOCMD) mod GOMOD=$(GOCMD) mod
GOCLEAN=$(GOCMD) clean GOCLEAN=$(GOCMD) clean
# Version information
VERSION := $(shell git describe --tags --always --dirty 2>/dev/null || echo "dev")
BUILD_DATE := $(shell date -u +"%Y-%m-%d %H:%M:%S UTC")
LDFLAGS := -X 'main.version=$(VERSION)' -X 'main.buildDate=$(BUILD_DATE)'
# Auto-detect container runtime (Docker or Podman) # Auto-detect container runtime (Docker or Podman)
CONTAINER_RUNTIME := $(shell \ CONTAINER_RUNTIME := $(shell \
if command -v podman > /dev/null 2>&1; then \ if command -v podman > /dev/null 2>&1; then \
@@ -36,10 +41,10 @@ COMPOSE_CMD := $(shell \
all: lint test build ## Run linting, tests, and build all: lint test build ## Run linting, tests, and build
build: ## Build the binary build: deps ## Build the binary
@echo "Building $(BINARY_NAME)..." @echo "Building $(BINARY_NAME) $(VERSION)..."
@mkdir -p $(BUILD_DIR) @mkdir -p $(BUILD_DIR)
$(GOBUILD) -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec $(GOBUILD) -ldflags "$(LDFLAGS)" -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/relspec
@echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)" @echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)"
test: test-unit ## Run all unit tests (alias for test-unit) test: test-unit ## Run all unit tests (alias for test-unit)
@@ -91,8 +96,8 @@ clean: ## Clean build artifacts
@echo "Clean complete" @echo "Clean complete"
install: ## Install the binary to $GOPATH/bin install: ## Install the binary to $GOPATH/bin
@echo "Installing $(BINARY_NAME)..." @echo "Installing $(BINARY_NAME) $(VERSION)..."
$(GOCMD) install ./cmd/relspec $(GOCMD) install -ldflags "$(LDFLAGS)" ./cmd/relspec
@echo "Install complete" @echo "Install complete"
deps: ## Download dependencies deps: ## Download dependencies
@@ -101,6 +106,29 @@ deps: ## Download dependencies
$(GOMOD) tidy $(GOMOD) tidy
@echo "Dependencies updated" @echo "Dependencies updated"
godoc: ## Start godoc server on http://localhost:6060
@echo "Starting godoc server..."
@GOBIN=$$(go env GOPATH)/bin; \
if command -v godoc > /dev/null 2>&1; then \
echo "godoc server running on http://localhost:6060"; \
echo "View documentation at: http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/"; \
echo "Press Ctrl+C to stop"; \
godoc -http=:6060; \
elif [ -f "$$GOBIN/godoc" ]; then \
echo "godoc server running on http://localhost:6060"; \
echo "View documentation at: http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/"; \
echo "Press Ctrl+C to stop"; \
$$GOBIN/godoc -http=:6060; \
else \
echo "godoc not installed. Installing..."; \
go install golang.org/x/tools/cmd/godoc@latest; \
echo "godoc installed. Starting server..."; \
echo "godoc server running on http://localhost:6060"; \
echo "View documentation at: http://localhost:6060/pkg/git.warky.dev/wdevs/relspecgo/"; \
echo "Press Ctrl+C to stop"; \
$$GOBIN/godoc -http=:6060; \
fi
start: docker-up ## Alias for docker-up (start PostgreSQL test database) start: docker-up ## Alias for docker-up (start PostgreSQL test database)
stop: docker-down ## Alias for docker-down (stop PostgreSQL test database) stop: docker-down ## Alias for docker-down (stop PostgreSQL test database)

View File

@@ -37,6 +37,7 @@ RelSpec can read database schemas from multiple sources:
#### Database Inspection #### Database Inspection
- [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection - [PostgreSQL](pkg/readers/pgsql/README.md) - Direct PostgreSQL database introspection
- [SQLite](pkg/readers/sqlite/README.md) - Direct SQLite database introspection
#### Schema Formats #### Schema Formats
- [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io) - [DBML](pkg/readers/dbml/README.md) - Database Markup Language (dbdiagram.io)
@@ -59,6 +60,7 @@ RelSpec can write database schemas to multiple formats:
#### Database DDL #### Database DDL
- [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.) - [PostgreSQL](pkg/writers/pgsql/README.md) - PostgreSQL DDL (CREATE TABLE, etc.)
- [SQLite](pkg/writers/sqlite/README.md) - SQLite DDL with automatic schema flattening
#### Schema Formats #### Schema Formats
- [DBML](pkg/writers/dbml/README.md) - Database Markup Language - [DBML](pkg/writers/dbml/README.md) - Database Markup Language
@@ -85,6 +87,29 @@ RelSpec includes a powerful schema validation and linting tool:
## Use of AI ## Use of AI
[Rules and use of AI](./AI_USE.md) [Rules and use of AI](./AI_USE.md)
## User Interface
RelSpec provides an interactive terminal-based user interface for managing and editing database schemas. The UI allows you to:
- **Browse Databases** - Navigate through your database structure with an intuitive menu system
- **Edit Schemas** - Create, modify, and organize database schemas
- **Manage Tables** - Add, update, or delete tables with full control over structure
- **Configure Columns** - Define column properties, data types, constraints, and relationships
- **Interactive Editing** - Real-time validation and feedback as you make changes
The interface supports multiple input formats, making it easy to load, edit, and save your database definitions in various formats.
<p align="center" width="100%">
<img src="./assets/image/screenshots/main_screen.jpg">
</p>
<p align="center" width="100%">
<img src="./assets/image/screenshots/table_view.jpg">
</p>
<p align="center" width="100%">
<img src="./assets/image/screenshots/edit_column.jpg">
</p>
## Installation ## Installation
```bash ```bash
@@ -95,6 +120,55 @@ go install -v git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
## Usage ## Usage
### Interactive Schema Editor
```bash
# Launch interactive editor with a DBML schema
relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
# Edit PostgreSQL database in place
relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
--to pgsql --to-conn "postgres://user:pass@localhost/mydb"
# Edit JSON schema and save as GORM models
relspec edit --from json --from-path db.json --to gorm --to-path models/
```
The `edit` command launches an interactive terminal user interface where you can:
- Browse and navigate your database structure
- Create, modify, and delete schemas, tables, and columns
- Configure column properties, constraints, and relationships
- Save changes to various formats
- Import and merge schemas from other databases
### Schema Merging
```bash
# Merge two JSON schemas (additive merge - adds missing items only)
relspec merge --target json --target-path base.json \
--source json --source-path additions.json \
--output json --output-path merged.json
# Merge PostgreSQL database into JSON, skipping specific tables
relspec merge --target json --target-path current.json \
--source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
--output json --output-path updated.json \
--skip-tables "audit_log,temp_tables"
# Cross-format merge (DBML + YAML → JSON)
relspec merge --target dbml --target-path base.dbml \
--source yaml --source-path additions.yaml \
--output json --output-path result.json \
--skip-relations --skip-views
```
The `merge` command combines two database schemas additively:
- Adds missing schemas, tables, columns, and other objects
- Never modifies or deletes existing items (safe operation)
- Supports selective merging with skip options (domains, relations, enums, views, sequences, specific tables)
- Works across any combination of supported formats
- Perfect for integrating multiple schema definitions or applying patches
### Schema Conversion ### Schema Conversion
```bash ```bash
@@ -113,6 +187,10 @@ relspec convert --from pgsql --from-conn "postgres://..." \
# Convert DBML to PostgreSQL SQL # Convert DBML to PostgreSQL SQL
relspec convert --from dbml --from-path schema.dbml \ relspec convert --from dbml --from-path schema.dbml \
--to pgsql --to-path schema.sql --to pgsql --to-path schema.sql
# Convert PostgreSQL database to SQLite (with automatic schema flattening)
relspec convert --from pgsql --from-conn "postgres://..." \
--to sqlite --to-path sqlite_schema.sql
``` ```
### Schema Validation ### Schema Validation

25
TODO.md
View File

@@ -1,16 +1,15 @@
# RelSpec - TODO List # RelSpec - TODO List
## Input Readers / Writers ## Input Readers / Writers
- [✔️] **Database Inspector** - [✔️] **Database Inspector**
- [✔️] PostgreSQL driver - [✔️] PostgreSQL driver (reader + writer)
- [ ] MySQL driver - [ ] MySQL driver
- [ ] SQLite driver - [✔️] SQLite driver (reader + writer with automatic schema flattening)
- [ ] MSSQL driver - [ ] MSSQL driver
- [✔️] Foreign key detection - [✔️] Foreign key detection
- [✔️] Index extraction - [✔️] Index extraction
- [*] .sql file generation with sequence and priority - [✔️] .sql file generation (PostgreSQL, SQLite)
- [✔️] .dbml: Database Markup Language (DBML) for textual schema representation. - [✔️] .dbml: Database Markup Language (DBML) for textual schema representation.
- [✔️] Prisma schema support (PSL format) .prisma - [✔️] Prisma schema support (PSL format) .prisma
- [✔️] Drizzle ORM support .ts (TypeScript / JavaScript) (Mr. Edd wanted to move from Prisma to Drizzle. If you are bugs, you are welcome to do pull requests or issues) - [✔️] Drizzle ORM support .ts (TypeScript / JavaScript) (Mr. Edd wanted to move from Prisma to Drizzle. If you are bugs, you are welcome to do pull requests or issues)
@@ -21,12 +20,25 @@
- [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me) - [] .avsc: Avro schema (JSON format for data serialization) (💲 Someone can do this, not me)
- [✔️] GraphQL schema generation - [✔️] GraphQL schema generation
## UI
- [✔️] Basic UI (I went with tview)
- [✔️] Save / Load Database
- [✔️] Schemas / Domains / Tables
- [✔️] Add Relations
- [ ] Add Indexes
- [ ] Add Views
- [ ] Add Sequences
- [ ] Add Scripts
- [ ] Domain / Table Assignment
## Documentation ## Documentation
- [ ] API documentation (godoc)
- [✔️] API documentation (godoc)
- [ ] Usage examples for each format combination - [ ] Usage examples for each format combination
## Advanced Features ## Advanced Features
- [ ] Dry-run mode for validation - [ ] Dry-run mode for validation
- [x] Diff tool for comparing specifications - [x] Diff tool for comparing specifications
- [ ] Migration script generation - [ ] Migration script generation
@@ -35,12 +47,13 @@
- [ ] Watch mode for auto-regeneration - [ ] Watch mode for auto-regeneration
## Future Considerations ## Future Considerations
- [ ] Web UI for visual editing - [ ] Web UI for visual editing
- [ ] REST API server mode - [ ] REST API server mode
- [ ] Support for NoSQL databases - [ ] Support for NoSQL databases
## Performance ## Performance
- [ ] Concurrent processing for multiple tables - [ ] Concurrent processing for multiple tables
- [ ] Streaming for large databases - [ ] Streaming for large databases
- [ ] Memory optimization - [ ] Memory optimization

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

View File

@@ -18,8 +18,10 @@ import (
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm" "git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql" "git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json" "git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/mssql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql" "git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma" "git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm" "git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml" "git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
"git.warky.dev/wdevs/relspecgo/pkg/writers" "git.warky.dev/wdevs/relspecgo/pkg/writers"
@@ -31,8 +33,10 @@ import (
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm" wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql" wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json" wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
wmssql "git.warky.dev/wdevs/relspecgo/pkg/writers/mssql"
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql" wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma" wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm" wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml" wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
) )
@@ -45,6 +49,7 @@ var (
convertTargetPath string convertTargetPath string
convertPackageName string convertPackageName string
convertSchemaFilter string convertSchemaFilter string
convertFlattenSchema bool
) )
var convertCmd = &cobra.Command{ var convertCmd = &cobra.Command{
@@ -69,6 +74,8 @@ Input formats:
- prisma: Prisma schema files (.prisma) - prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript) - typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL database (live connection) - pgsql: PostgreSQL database (live connection)
- mssql: Microsoft SQL Server database (live connection)
- sqlite: SQLite database file
Output formats: Output formats:
- dbml: DBML schema files - dbml: DBML schema files
@@ -83,14 +90,22 @@ Output formats:
- prisma: Prisma schema files (.prisma) - prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript) - typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL SQL schema - pgsql: PostgreSQL SQL schema
- mssql: Microsoft SQL Server SQL schema
- sqlite: SQLite SQL schema (with automatic schema flattening)
PostgreSQL Connection String Examples: Connection String Examples:
PostgreSQL:
postgres://username:password@localhost:5432/database_name postgres://username:password@localhost:5432/database_name
postgres://username:password@localhost/database_name postgres://username:password@localhost/database_name
postgresql://user:pass@host:5432/dbname?sslmode=disable postgresql://user:pass@host:5432/dbname?sslmode=disable
postgresql://user:pass@host/dbname?sslmode=require postgresql://user:pass@host/dbname?sslmode=require
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
SQLite:
/path/to/database.db
./relative/path/database.sqlite
database.db
Examples: Examples:
# Convert DBML to GORM models # Convert DBML to GORM models
@@ -135,19 +150,28 @@ Examples:
# Convert Bun models directory to JSON # Convert Bun models directory to JSON
relspec convert --from bun --from-path ./models \ relspec convert --from bun --from-path ./models \
--to json --to-path schema.json`, --to json --to-path schema.json
# Convert SQLite database to JSON
relspec convert --from sqlite --from-path database.db \
--to json --to-path schema.json
# Convert SQLite to PostgreSQL SQL
relspec convert --from sqlite --from-path database.db \
--to pgsql --to-path schema.sql`,
RunE: runConvert, RunE: runConvert,
} }
func init() { func init() {
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)") convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql, sqlite)")
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)") convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)") convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for pgsql) or file path (for sqlite)")
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)") convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)") convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)") convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)") convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
convertCmd.Flags().BoolVar(&convertFlattenSchema, "flatten-schema", false, "Flatten schema.table names to schema_table (useful for databases like SQLite that do not support schemas)")
err := convertCmd.MarkFlagRequired("from") err := convertCmd.MarkFlagRequired("from")
if err != nil { if err != nil {
@@ -202,7 +226,7 @@ func runConvert(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter) fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
} }
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter); err != nil { if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter, convertFlattenSchema); err != nil {
return fmt.Errorf("failed to write target: %w", err) return fmt.Errorf("failed to write target: %w", err)
} }
@@ -289,6 +313,23 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
} }
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath}) reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "mssql", "sqlserver", "mssql2016", "mssql2017", "mssql2019", "mssql2022":
if connString == "" {
return nil, fmt.Errorf("connection string is required for MSSQL format")
}
reader = mssql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
dbPath := filePath
if dbPath == "" {
dbPath = connString
}
if dbPath == "" {
return nil, fmt.Errorf("file path or connection string is required for SQLite format")
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
default: default:
return nil, fmt.Errorf("unsupported source format: %s", dbType) return nil, fmt.Errorf("unsupported source format: %s", dbType)
} }
@@ -301,12 +342,13 @@ func readDatabaseForConvert(dbType, filePath, connString string) (*models.Databa
return db, nil return db, nil
} }
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string) error { func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string, flattenSchema bool) error {
var writer writers.Writer var writer writers.Writer
writerOpts := &writers.WriterOptions{ writerOpts := &writers.WriterOptions{
OutputPath: outputPath, OutputPath: outputPath,
PackageName: packageName, PackageName: packageName,
FlattenSchema: flattenSchema,
} }
switch strings.ToLower(dbType) { switch strings.ToLower(dbType) {
@@ -343,6 +385,12 @@ func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaF
case "pgsql", "postgres", "postgresql", "sql": case "pgsql", "postgres", "postgresql", "sql":
writer = wpgsql.NewWriter(writerOpts) writer = wpgsql.NewWriter(writerOpts)
case "mssql", "sqlserver", "mssql2016", "mssql2017", "mssql2019", "mssql2022":
writer = wmssql.NewWriter(writerOpts)
case "sqlite", "sqlite3":
writer = wsqlite.NewWriter(writerOpts)
case "prisma": case "prisma":
writer = wprisma.NewWriter(writerOpts) writer = wprisma.NewWriter(writerOpts)

View File

@@ -16,6 +16,7 @@ import (
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb" "git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json" "git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql" "git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml" "git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
) )
@@ -254,6 +255,17 @@ func readDatabase(dbType, filePath, connString, label string) (*models.Database,
} }
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString}) reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
dbPath := filePath
if dbPath == "" {
dbPath = connString
}
if dbPath == "" {
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
default: default:
return nil, fmt.Errorf("%s: unsupported database format: %s", label, dbType) return nil, fmt.Errorf("%s: unsupported database format: %s", label, dbType)
} }

361
cmd/relspec/edit.go Normal file
View File

@@ -0,0 +1,361 @@
package main
import (
"fmt"
"os"
"strings"
"github.com/spf13/cobra"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
"git.warky.dev/wdevs/relspecgo/pkg/ui"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
)
var (
editSourceType string
editSourcePath string
editSourceConn string
editTargetType string
editTargetPath string
editSchemaFilter string
)
var editCmd = &cobra.Command{
Use: "edit",
Short: "Edit database schema interactively with TUI",
Long: `Edit database schemas from various formats using an interactive terminal UI.
Allows you to:
- List and navigate schemas and tables
- Create, edit, and delete schemas
- Create, edit, and delete tables
- Add, edit, and delete columns
- Set table and column properties
- Add constraints, indexes, and relationships
Supports reading from and writing to all supported formats:
Input formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- graphql: GraphQL schema files (.graphql, SDL)
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go, file or directory)
- bun: Bun model files (Go, file or directory)
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
- prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL database (live connection)
- sqlite: SQLite database file
Output formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- graphql: GraphQL schema files (.graphql, SDL)
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go)
- bun: Bun model files (Go)
- drizzle: Drizzle ORM schema files (TypeScript)
- prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL SQL schema
- sqlite: SQLite SQL schema (with automatic schema flattening)
Connection String Examples:
PostgreSQL:
postgres://username:password@localhost:5432/database_name
postgres://username:password@localhost/database_name
postgresql://user:pass@host:5432/dbname?sslmode=disable
postgresql://user:pass@host/dbname?sslmode=require
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
SQLite:
/path/to/database.db
./relative/path/database.sqlite
database.db
Examples:
# Edit a DBML schema file
relspec edit --from dbml --from-path schema.dbml --to dbml --to-path schema.dbml
# Edit a PostgreSQL database
relspec edit --from pgsql --from-conn "postgres://user:pass@localhost/mydb" \
--to pgsql --to-conn "postgres://user:pass@localhost/mydb"
# Edit JSON schema and output to GORM
relspec edit --from json --from-path db.json --to gorm --to-path models/
# Edit GORM models in place
relspec edit --from gorm --from-path ./models --to gorm --to-path ./models
# Edit SQLite database
relspec edit --from sqlite --from-path database.db --to sqlite --to-path database.db
# Convert SQLite to DBML
relspec edit --from sqlite --from-path database.db --to dbml --to-path schema.dbml`,
RunE: runEdit,
}
func init() {
editCmd.Flags().StringVar(&editSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql, sqlite)")
editCmd.Flags().StringVar(&editSourcePath, "from-path", "", "Source file path (for file-based formats)")
editCmd.Flags().StringVar(&editSourceConn, "from-conn", "", "Source connection string (for pgsql) or file path (for sqlite)")
editCmd.Flags().StringVar(&editTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql, sqlite)")
editCmd.Flags().StringVar(&editTargetPath, "to-path", "", "Target file path (for file-based formats)")
editCmd.Flags().StringVar(&editSchemaFilter, "schema", "", "Filter to a specific schema by name")
// Flags are now optional - if not provided, UI will prompt for load/save options
}
func runEdit(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Editor ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
var db *models.Database
var loadConfig *ui.LoadConfig
var saveConfig *ui.SaveConfig
var err error
// Check if source parameters are provided
if editSourceType != "" {
// Read source database
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", editSourceType)
if editSourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", editSourcePath)
}
if editSourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(editSourceConn))
}
db, err = readDatabaseForEdit(editSourceType, editSourcePath, editSourceConn, "Source")
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
// Apply schema filter if specified
if editSchemaFilter != "" {
db = filterDatabaseBySchema(db, editSchemaFilter)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
totalTables := 0
for _, schema := range db.Schemas {
totalTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
// Store load config
loadConfig = &ui.LoadConfig{
SourceType: editSourceType,
FilePath: editSourcePath,
ConnString: editSourceConn,
}
} else {
// No source parameters provided, UI will show load screen
fmt.Fprintf(os.Stderr, "[1/2] No source specified, editor will prompt for database\n\n")
}
// Store save config if target parameters are provided
if editTargetType != "" {
saveConfig = &ui.SaveConfig{
TargetType: editTargetType,
FilePath: editTargetPath,
}
}
// Launch interactive TUI
if editSourceType != "" {
fmt.Fprintf(os.Stderr, "[2/3] Launching interactive editor...\n")
} else {
fmt.Fprintf(os.Stderr, "[2/2] Launching interactive editor...\n")
}
fmt.Fprintf(os.Stderr, " Use arrow keys and shortcuts to navigate\n")
fmt.Fprintf(os.Stderr, " Press ? for help\n\n")
editor := ui.NewSchemaEditorWithConfigs(db, loadConfig, saveConfig)
if err := editor.Run(); err != nil {
return fmt.Errorf("editor failed: %w", err)
}
// Only write to output if target parameters were provided and database was loaded from command line
if editTargetType != "" && editSourceType != "" && db != nil {
fmt.Fprintf(os.Stderr, "[3/3] Writing changes to output...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", editTargetType)
if editTargetPath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", editTargetPath)
}
// Get the potentially modified database from the editor
err = writeDatabaseForEdit(editTargetType, editTargetPath, "", editor.GetDatabase(), "Target")
if err != nil {
return fmt.Errorf("failed to write output: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully written database\n")
}
fmt.Fprintf(os.Stderr, "\n=== Edit complete ===\n")
return nil
}
func readDatabaseForEdit(dbType, filePath, connString, label string) (*models.Database, error) {
var reader readers.Reader
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "graphql":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "bun":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql":
if connString == "" {
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
dbPath := filePath
if dbPath == "" {
dbPath = connString
}
if dbPath == "" {
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
default:
return nil, fmt.Errorf("%s: unsupported format: %s", label, dbType)
}
db, err := reader.ReadDatabase()
if err != nil {
return nil, fmt.Errorf("%s: %w", label, err)
}
return db, nil
}
func writeDatabaseForEdit(dbType, filePath, connString string, db *models.Database, label string) error {
var writer writers.Writer
switch strings.ToLower(dbType) {
case "dbml":
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "dctx":
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "drawdb":
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "graphql":
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "json":
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "yaml":
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "gorm":
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "bun":
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "drizzle":
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "prisma":
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "typeorm":
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "sqlite", "sqlite3":
writer = wsqlite.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "pgsql":
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
default:
return fmt.Errorf("%s: unsupported format: %s", label, dbType)
}
err := writer.WriteDatabase(db)
if err != nil {
return fmt.Errorf("%s: %w", label, err)
}
return nil
}

View File

@@ -20,6 +20,7 @@ import (
"git.warky.dev/wdevs/relspecgo/pkg/readers/json" "git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql" "git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma" "git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm" "git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml" "git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
) )
@@ -288,6 +289,17 @@ func readDatabaseForInspect(dbType, filePath, connString string) (*models.Databa
} }
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString}) reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
dbPath := filePath
if dbPath == "" {
dbPath = connString
}
if dbPath == "" {
return nil, fmt.Errorf("file path or connection string is required for SQLite format")
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
default: default:
return nil, fmt.Errorf("unsupported database type: %s", dbType) return nil, fmt.Errorf("unsupported database type: %s", dbType)
} }

467
cmd/relspec/merge.go Normal file
View File

@@ -0,0 +1,467 @@
package main
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/spf13/cobra"
"git.warky.dev/wdevs/relspecgo/pkg/merge"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
"git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
"git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
wsqlite "git.warky.dev/wdevs/relspecgo/pkg/writers/sqlite"
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
)
var (
mergeTargetType string
mergeTargetPath string
mergeTargetConn string
mergeSourceType string
mergeSourcePath string
mergeSourceConn string
mergeOutputType string
mergeOutputPath string
mergeOutputConn string
mergeSkipDomains bool
mergeSkipRelations bool
mergeSkipEnums bool
mergeSkipViews bool
mergeSkipSequences bool
mergeSkipTables string // Comma-separated table names to skip
mergeVerbose bool
mergeReportPath string // Path to write merge report
mergeFlattenSchema bool
)
var mergeCmd = &cobra.Command{
Use: "merge",
Short: "Merge database schemas (additive only - adds missing items)",
Long: `Merge one database schema into another. Performs additive merging only:
adds missing schemas, tables, columns, and other objects without modifying
or deleting existing items.
The target database is loaded first, then the source database is merged into it.
The result can be saved to a new format or updated in place.
Examples:
# Merge two JSON schemas
relspec merge --target json --target-path base.json \
--source json --source-path additional.json \
--output json --output-path merged.json
# Merge from PostgreSQL into JSON
relspec merge --target json --target-path mydb.json \
--source pgsql --source-conn "postgres://user:pass@localhost/source_db" \
--output json --output-path combined.json
# Merge and execute on PostgreSQL database with report
relspec merge --target json --target-path base.json \
--source json --source-path additional.json \
--output pgsql --output-conn "postgres://user:pass@localhost/target_db" \
--merge-report merge-report.json
# Merge DBML and YAML, skip relations
relspec merge --target dbml --target-path schema.dbml \
--source yaml --source-path tables.yaml \
--output dbml --output-path merged.dbml \
--skip-relations
# Merge and save back to target format
relspec merge --target json --target-path base.json \
--source json --source-path patch.json \
--output json --output-path base.json`,
RunE: runMerge,
}
func init() {
// Target database flags
mergeCmd.Flags().StringVar(&mergeTargetType, "target", "", "Target format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
mergeCmd.Flags().StringVar(&mergeTargetPath, "target-path", "", "Target file path (required for file-based formats)")
mergeCmd.Flags().StringVar(&mergeTargetConn, "target-conn", "", "Target connection string (required for pgsql)")
// Source database flags
mergeCmd.Flags().StringVar(&mergeSourceType, "source", "", "Source format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
mergeCmd.Flags().StringVar(&mergeSourcePath, "source-path", "", "Source file path (required for file-based formats)")
mergeCmd.Flags().StringVar(&mergeSourceConn, "source-conn", "", "Source connection string (required for pgsql)")
// Output flags
mergeCmd.Flags().StringVar(&mergeOutputType, "output", "", "Output format (required): dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql")
mergeCmd.Flags().StringVar(&mergeOutputPath, "output-path", "", "Output file path (required for file-based formats)")
mergeCmd.Flags().StringVar(&mergeOutputConn, "output-conn", "", "Output connection string (for pgsql)")
// Merge options
mergeCmd.Flags().BoolVar(&mergeSkipDomains, "skip-domains", false, "Skip domains during merge")
mergeCmd.Flags().BoolVar(&mergeSkipRelations, "skip-relations", false, "Skip relations during merge")
mergeCmd.Flags().BoolVar(&mergeSkipEnums, "skip-enums", false, "Skip enums during merge")
mergeCmd.Flags().BoolVar(&mergeSkipViews, "skip-views", false, "Skip views during merge")
mergeCmd.Flags().BoolVar(&mergeSkipSequences, "skip-sequences", false, "Skip sequences during merge")
mergeCmd.Flags().StringVar(&mergeSkipTables, "skip-tables", "", "Comma-separated list of table names to skip during merge")
mergeCmd.Flags().BoolVar(&mergeVerbose, "verbose", false, "Show verbose output")
mergeCmd.Flags().StringVar(&mergeReportPath, "merge-report", "", "Path to write merge report (JSON format)")
mergeCmd.Flags().BoolVar(&mergeFlattenSchema, "flatten-schema", false, "Flatten schema.table names to schema_table (useful for databases like SQLite that do not support schemas)")
}
func runMerge(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Merge ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
// Validate required flags
if mergeTargetType == "" {
return fmt.Errorf("--target format is required")
}
if mergeSourceType == "" {
return fmt.Errorf("--source format is required")
}
if mergeOutputType == "" {
return fmt.Errorf("--output format is required")
}
// Validate and expand file paths
if mergeTargetType != "pgsql" {
if mergeTargetPath == "" {
return fmt.Errorf("--target-path is required for %s format", mergeTargetType)
}
mergeTargetPath = expandPath(mergeTargetPath)
} else if mergeTargetConn == "" {
return fmt.Errorf("--target-conn is required for pgsql format")
}
if mergeSourceType != "pgsql" {
if mergeSourcePath == "" {
return fmt.Errorf("--source-path is required for %s format", mergeSourceType)
}
mergeSourcePath = expandPath(mergeSourcePath)
} else if mergeSourceConn == "" {
return fmt.Errorf("--source-conn is required for pgsql format")
}
if mergeOutputType != "pgsql" {
if mergeOutputPath == "" {
return fmt.Errorf("--output-path is required for %s format", mergeOutputType)
}
mergeOutputPath = expandPath(mergeOutputPath)
}
// Step 1: Read target database
fmt.Fprintf(os.Stderr, "[1/3] Reading target database...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeTargetType)
if mergeTargetPath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeTargetPath)
}
if mergeTargetConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeTargetConn))
}
targetDB, err := readDatabaseForMerge(mergeTargetType, mergeTargetPath, mergeTargetConn, "Target")
if err != nil {
return fmt.Errorf("failed to read target database: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read target database '%s'\n", targetDB.Name)
printDatabaseStats(targetDB)
// Step 2: Read source database
fmt.Fprintf(os.Stderr, "\n[2/3] Reading source database...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeSourceType)
if mergeSourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeSourcePath)
}
if mergeSourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(mergeSourceConn))
}
sourceDB, err := readDatabaseForMerge(mergeSourceType, mergeSourcePath, mergeSourceConn, "Source")
if err != nil {
return fmt.Errorf("failed to read source database: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read source database '%s'\n", sourceDB.Name)
printDatabaseStats(sourceDB)
// Step 3: Merge databases
fmt.Fprintf(os.Stderr, "\n[3/3] Merging databases...\n")
opts := &merge.MergeOptions{
SkipDomains: mergeSkipDomains,
SkipRelations: mergeSkipRelations,
SkipEnums: mergeSkipEnums,
SkipViews: mergeSkipViews,
SkipSequences: mergeSkipSequences,
}
// Parse skip-tables flag
if mergeSkipTables != "" {
opts.SkipTableNames = parseSkipTables(mergeSkipTables)
if len(opts.SkipTableNames) > 0 {
fmt.Fprintf(os.Stderr, " Skipping tables: %s\n", mergeSkipTables)
}
}
result := merge.MergeDatabases(targetDB, sourceDB, opts)
// Update timestamp
targetDB.UpdateDate()
// Print merge summary
fmt.Fprintf(os.Stderr, " ✓ Merge complete\n\n")
fmt.Fprintf(os.Stderr, "%s\n", merge.GetMergeSummary(result))
// Step 4: Write output
fmt.Fprintf(os.Stderr, "\n[4/4] Writing output...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", mergeOutputType)
if mergeOutputPath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", mergeOutputPath)
}
err = writeDatabaseForMerge(mergeOutputType, mergeOutputPath, mergeOutputConn, targetDB, "Output", mergeFlattenSchema)
if err != nil {
return fmt.Errorf("failed to write output: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully written merged database\n")
fmt.Fprintf(os.Stderr, "\n=== Merge complete ===\n")
return nil
}
func readDatabaseForMerge(dbType, filePath, connString, label string) (*models.Database, error) {
var reader readers.Reader
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "graphql":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GraphQL format", label)
}
reader = graphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "gorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for GORM format", label)
}
reader = gorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "bun":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Bun format", label)
}
reader = bun.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drizzle":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Drizzle format", label)
}
reader = drizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "prisma":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for Prisma format", label)
}
reader = prisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "typeorm":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for TypeORM format", label)
}
reader = typeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql":
if connString == "" {
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
case "sqlite", "sqlite3":
// SQLite can use either file path or connection string
dbPath := filePath
if dbPath == "" {
dbPath = connString
}
if dbPath == "" {
return nil, fmt.Errorf("%s: file path or connection string is required for SQLite format", label)
}
reader = sqlite.NewReader(&readers.ReaderOptions{FilePath: dbPath})
default:
return nil, fmt.Errorf("%s: unsupported format '%s'", label, dbType)
}
db, err := reader.ReadDatabase()
if err != nil {
return nil, err
}
return db, nil
}
func writeDatabaseForMerge(dbType, filePath, connString string, db *models.Database, label string, flattenSchema bool) error {
var writer writers.Writer
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return fmt.Errorf("%s: file path is required for DBML format", label)
}
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "dctx":
if filePath == "" {
return fmt.Errorf("%s: file path is required for DCTX format", label)
}
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "drawdb":
if filePath == "" {
return fmt.Errorf("%s: file path is required for DrawDB format", label)
}
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "graphql":
if filePath == "" {
return fmt.Errorf("%s: file path is required for GraphQL format", label)
}
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "json":
if filePath == "" {
return fmt.Errorf("%s: file path is required for JSON format", label)
}
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "yaml":
if filePath == "" {
return fmt.Errorf("%s: file path is required for YAML format", label)
}
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "gorm":
if filePath == "" {
return fmt.Errorf("%s: file path is required for GORM format", label)
}
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "bun":
if filePath == "" {
return fmt.Errorf("%s: file path is required for Bun format", label)
}
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "drizzle":
if filePath == "" {
return fmt.Errorf("%s: file path is required for Drizzle format", label)
}
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "prisma":
if filePath == "" {
return fmt.Errorf("%s: file path is required for Prisma format", label)
}
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "typeorm":
if filePath == "" {
return fmt.Errorf("%s: file path is required for TypeORM format", label)
}
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "sqlite", "sqlite3":
writer = wsqlite.NewWriter(&writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema})
case "pgsql":
writerOpts := &writers.WriterOptions{OutputPath: filePath, FlattenSchema: flattenSchema}
if connString != "" {
writerOpts.Metadata = map[string]interface{}{
"connection_string": connString,
}
// Add report path if merge report is enabled
if mergeReportPath != "" {
writerOpts.Metadata["report_path"] = mergeReportPath
}
}
writer = wpgsql.NewWriter(writerOpts)
default:
return fmt.Errorf("%s: unsupported format '%s'", label, dbType)
}
return writer.WriteDatabase(db)
}
func expandPath(path string) string {
if len(path) > 0 && path[0] == '~' {
home, err := os.UserHomeDir()
if err == nil {
return filepath.Join(home, path[1:])
}
}
return path
}
func printDatabaseStats(db *models.Database) {
totalTables := 0
totalColumns := 0
totalConstraints := 0
totalIndexes := 0
for _, schema := range db.Schemas {
totalTables += len(schema.Tables)
for _, table := range schema.Tables {
totalColumns += len(table.Columns)
totalConstraints += len(table.Constraints)
totalIndexes += len(table.Indexes)
}
}
fmt.Fprintf(os.Stderr, " Schemas: %d, Tables: %d, Columns: %d, Constraints: %d, Indexes: %d\n",
len(db.Schemas), totalTables, totalColumns, totalConstraints, totalIndexes)
}
func parseSkipTables(skipTablesStr string) map[string]bool {
skipTables := make(map[string]bool)
if skipTablesStr == "" {
return skipTables
}
// Split by comma and trim whitespace
parts := strings.Split(skipTablesStr, ",")
for _, part := range parts {
trimmed := strings.TrimSpace(part)
if trimmed != "" {
// Store in lowercase for case-insensitive matching
skipTables[strings.ToLower(trimmed)] = true
}
}
return skipTables
}

View File

@@ -1,9 +1,49 @@
package main package main
import ( import (
"fmt"
"runtime/debug"
"time"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
var (
// Version information, set via ldflags during build
version = "dev"
buildDate = "unknown"
)
func init() {
// If version wasn't set via ldflags, try to get it from build info
if version == "dev" {
if info, ok := debug.ReadBuildInfo(); ok {
// Try to get version from VCS
var vcsRevision, vcsTime string
for _, setting := range info.Settings {
switch setting.Key {
case "vcs.revision":
if len(setting.Value) >= 7 {
vcsRevision = setting.Value[:7]
}
case "vcs.time":
vcsTime = setting.Value
}
}
if vcsRevision != "" {
version = vcsRevision
}
if vcsTime != "" {
if t, err := time.Parse(time.RFC3339, vcsTime); err == nil {
buildDate = t.UTC().Format("2006-01-02 15:04:05 UTC")
}
}
}
}
}
var rootCmd = &cobra.Command{ var rootCmd = &cobra.Command{
Use: "relspec", Use: "relspec",
Short: "RelSpec - Database schema conversion and analysis tool", Short: "RelSpec - Database schema conversion and analysis tool",
@@ -13,6 +53,9 @@ bidirectional conversion between various database schema formats.
It reads database schemas from multiple sources (live databases, DBML, It reads database schemas from multiple sources (live databases, DBML,
DCTX, DrawDB, etc.) and writes them to various formats (GORM, Bun, DCTX, DrawDB, etc.) and writes them to various formats (GORM, Bun,
JSON, YAML, SQL, etc.).`, JSON, YAML, SQL, etc.).`,
PersistentPreRun: func(cmd *cobra.Command, args []string) {
fmt.Printf("RelSpec %s (built: %s)\n\n", version, buildDate)
},
} }
func init() { func init() {
@@ -20,4 +63,9 @@ func init() {
rootCmd.AddCommand(diffCmd) rootCmd.AddCommand(diffCmd)
rootCmd.AddCommand(inspectCmd) rootCmd.AddCommand(inspectCmd)
rootCmd.AddCommand(scriptsCmd) rootCmd.AddCommand(scriptsCmd)
rootCmd.AddCommand(templCmd)
rootCmd.AddCommand(editCmd)
rootCmd.AddCommand(mergeCmd)
rootCmd.AddCommand(splitCmd)
rootCmd.AddCommand(versionCmd)
} }

View File

@@ -18,6 +18,7 @@ var (
scriptsConn string scriptsConn string
scriptsSchemaName string scriptsSchemaName string
scriptsDBName string scriptsDBName string
scriptsIgnoreErrors bool
) )
var scriptsCmd = &cobra.Command{ var scriptsCmd = &cobra.Command{
@@ -39,8 +40,8 @@ Example filenames (hyphen format):
1-002-create-posts.sql # Priority 1, Sequence 2 1-002-create-posts.sql # Priority 1, Sequence 2
10-10-create-newid.pgsql # Priority 10, Sequence 10 10-10-create-newid.pgsql # Priority 10, Sequence 10
Both formats can be mixed in the same directory. Both formats can be mixed in the same directory and subdirectories.
Scripts are executed in order: Priority (ascending), then Sequence (ascending).`, Scripts are executed in order: Priority (ascending), Sequence (ascending), Name (alphabetical).`,
} }
var scriptsListCmd = &cobra.Command{ var scriptsListCmd = &cobra.Command{
@@ -48,8 +49,8 @@ var scriptsListCmd = &cobra.Command{
Short: "List SQL scripts from a directory", Short: "List SQL scripts from a directory",
Long: `List SQL scripts from a directory and show their execution order. Long: `List SQL scripts from a directory and show their execution order.
The scripts are read from the specified directory and displayed in the order The scripts are read recursively from the specified directory and displayed in the order
they would be executed (Priority ascending, then Sequence ascending). they would be executed: Priority (ascending), then Sequence (ascending), then Name (alphabetical).
Example: Example:
relspec scripts list --dir ./migrations`, relspec scripts list --dir ./migrations`,
@@ -61,10 +62,10 @@ var scriptsExecuteCmd = &cobra.Command{
Short: "Execute SQL scripts against a database", Short: "Execute SQL scripts against a database",
Long: `Execute SQL scripts from a directory against a PostgreSQL database. Long: `Execute SQL scripts from a directory against a PostgreSQL database.
Scripts are executed in order: Priority (ascending), then Sequence (ascending). Scripts are executed in order: Priority (ascending), Sequence (ascending), Name (alphabetical).
Execution stops immediately on the first error. By default, execution stops immediately on the first error. Use --ignore-errors to continue execution.
The directory is scanned recursively for files matching the patterns: The directory is scanned recursively for all subdirectories and files matching the patterns:
{priority}_{sequence}_{name}.sql or .pgsql (underscore format) {priority}_{sequence}_{name}.sql or .pgsql (underscore format)
{priority}-{sequence}-{name}.sql or .pgsql (hyphen format) {priority}-{sequence}-{name}.sql or .pgsql (hyphen format)
@@ -75,7 +76,7 @@ PostgreSQL Connection String Examples:
postgresql://user:pass@host/dbname?sslmode=require postgresql://user:pass@host/dbname?sslmode=require
Examples: Examples:
# Execute migration scripts # Execute migration scripts from a directory (including subdirectories)
relspec scripts execute --dir ./migrations \ relspec scripts execute --dir ./migrations \
--conn "postgres://user:pass@localhost:5432/mydb" --conn "postgres://user:pass@localhost:5432/mydb"
@@ -86,7 +87,12 @@ Examples:
# Execute with SSL disabled # Execute with SSL disabled
relspec scripts execute --dir ./sql \ relspec scripts execute --dir ./sql \
--conn "postgres://user:pass@localhost/db?sslmode=disable"`, --conn "postgres://user:pass@localhost/db?sslmode=disable"
# Continue executing even if errors occur
relspec scripts execute --dir ./migrations \
--conn "postgres://localhost/mydb" \
--ignore-errors`,
RunE: runScriptsExecute, RunE: runScriptsExecute,
} }
@@ -105,6 +111,7 @@ func init() {
scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)") scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)")
scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)") scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)") scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
scriptsExecuteCmd.Flags().BoolVar(&scriptsIgnoreErrors, "ignore-errors", false, "Continue executing scripts even if errors occur")
err = scriptsExecuteCmd.MarkFlagRequired("dir") err = scriptsExecuteCmd.MarkFlagRequired("dir")
if err != nil { if err != nil {
@@ -149,7 +156,7 @@ func runScriptsList(cmd *cobra.Command, args []string) error {
return nil return nil
} }
// Sort scripts by Priority then Sequence // Sort scripts by Priority, Sequence, then Name
sortedScripts := make([]*struct { sortedScripts := make([]*struct {
name string name string
priority int priority int
@@ -186,7 +193,10 @@ func runScriptsList(cmd *cobra.Command, args []string) error {
if sortedScripts[i].priority != sortedScripts[j].priority { if sortedScripts[i].priority != sortedScripts[j].priority {
return sortedScripts[i].priority < sortedScripts[j].priority return sortedScripts[i].priority < sortedScripts[j].priority
} }
if sortedScripts[i].sequence != sortedScripts[j].sequence {
return sortedScripts[i].sequence < sortedScripts[j].sequence return sortedScripts[i].sequence < sortedScripts[j].sequence
}
return sortedScripts[i].name < sortedScripts[j].name
}) })
fmt.Fprintf(os.Stderr, "Found %d script(s) in execution order:\n\n", len(sortedScripts)) fmt.Fprintf(os.Stderr, "Found %d script(s) in execution order:\n\n", len(sortedScripts))
@@ -242,22 +252,44 @@ func runScriptsExecute(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, " ✓ Found %d script(s)\n\n", len(schema.Scripts)) fmt.Fprintf(os.Stderr, " ✓ Found %d script(s)\n\n", len(schema.Scripts))
// Step 2: Execute scripts // Step 2: Execute scripts
fmt.Fprintf(os.Stderr, "[2/2] Executing scripts in order (Priority → Sequence)...\n\n") fmt.Fprintf(os.Stderr, "[2/2] Executing scripts in order (Priority → Sequence → Name)...\n\n")
writer := sqlexec.NewWriter(&writers.WriterOptions{ writer := sqlexec.NewWriter(&writers.WriterOptions{
Metadata: map[string]any{ Metadata: map[string]any{
"connection_string": scriptsConn, "connection_string": scriptsConn,
"ignore_errors": scriptsIgnoreErrors,
}, },
}) })
if err := writer.WriteSchema(schema); err != nil { if err := writer.WriteSchema(schema); err != nil {
fmt.Fprintf(os.Stderr, "\n") fmt.Fprintf(os.Stderr, "\n")
return fmt.Errorf("execution failed: %w", err) return fmt.Errorf("script execution failed: %w", err)
}
// Get execution results from writer metadata
totalCount := len(schema.Scripts)
successCount := totalCount
failedCount := 0
opts := writer.Options()
if total, exists := opts.Metadata["execution_total"].(int); exists {
totalCount = total
}
if success, exists := opts.Metadata["execution_success"].(int); exists {
successCount = success
}
if failed, exists := opts.Metadata["execution_failed"].(int); exists {
failedCount = failed
} }
fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n") fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n")
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp()) fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
fmt.Fprintf(os.Stderr, "Successfully executed %d script(s)\n\n", len(schema.Scripts)) fmt.Fprintf(os.Stderr, "Total scripts: %d\n", totalCount)
fmt.Fprintf(os.Stderr, "Successful: %d\n", successCount)
if failedCount > 0 {
fmt.Fprintf(os.Stderr, "Failed: %d\n", failedCount)
}
fmt.Fprintf(os.Stderr, "\n")
return nil return nil
} }

319
cmd/relspec/split.go Normal file
View File

@@ -0,0 +1,319 @@
package main
import (
"fmt"
"os"
"strings"
"github.com/spf13/cobra"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
var (
splitSourceType string
splitSourcePath string
splitSourceConn string
splitTargetType string
splitTargetPath string
splitSchemas string
splitTables string
splitPackageName string
splitDatabaseName string
splitExcludeSchema string
splitExcludeTables string
)
var splitCmd = &cobra.Command{
Use: "split",
Short: "Split database schemas to extract selected tables into a separate database",
Long: `Extract selected schemas and tables from a database and write them to a separate output.
The split command allows you to:
- Select specific schemas to include in the output
- Select specific tables within schemas
- Exclude specific schemas or tables if preferred
- Export the selected subset to any supported format
Input formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- graphql: GraphQL schema files (.graphql, SDL)
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go, file or directory)
- bun: Bun model files (Go, file or directory)
- drizzle: Drizzle ORM schema files (TypeScript, file or directory)
- prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL database (live connection)
Output formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- graphql: GraphQL schema files (.graphql, SDL)
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go)
- bun: Bun model files (Go)
- drizzle: Drizzle ORM schema files (TypeScript)
- prisma: Prisma schema files (.prisma)
- typeorm: TypeORM entity files (TypeScript)
- pgsql: PostgreSQL SQL schema
Examples:
# Split specific schemas from DBML
relspec split --from dbml --from-path schema.dbml \
--schemas public,auth \
--to json --to-path subset.json
# Extract specific tables from PostgreSQL
relspec split --from pgsql \
--from-conn "postgres://user:pass@localhost:5432/mydb" \
--schemas public \
--tables users,orders,products \
--to dbml --to-path subset.dbml
# Exclude specific tables
relspec split --from json --from-path schema.json \
--exclude-tables "audit_log,system_config,temp_data" \
--to json --to-path public_schema.json
# Split and convert to GORM
relspec split --from json --from-path schema.json \
--tables "users,posts,comments" \
--to gorm --to-path models/ --package models \
--database-name MyAppDB
# Exclude specific schema and tables
relspec split --from pgsql \
--from-conn "postgres://user:pass@localhost/db" \
--exclude-schema pg_catalog,information_schema \
--exclude-tables "temp_users,debug_logs" \
--to json --to-path public_schema.json`,
RunE: runSplit,
}
func init() {
splitCmd.Flags().StringVar(&splitSourceType, "from", "", "Source format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
splitCmd.Flags().StringVar(&splitSourcePath, "from-path", "", "Source file path (for file-based formats)")
splitCmd.Flags().StringVar(&splitSourceConn, "from-conn", "", "Source connection string (for database formats)")
splitCmd.Flags().StringVar(&splitTargetType, "to", "", "Target format (dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql)")
splitCmd.Flags().StringVar(&splitTargetPath, "to-path", "", "Target output path (file or directory)")
splitCmd.Flags().StringVar(&splitPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
splitCmd.Flags().StringVar(&splitDatabaseName, "database-name", "", "Override database name in output")
splitCmd.Flags().StringVar(&splitSchemas, "schemas", "", "Comma-separated list of schema names to include")
splitCmd.Flags().StringVar(&splitTables, "tables", "", "Comma-separated list of table names to include (case-insensitive)")
splitCmd.Flags().StringVar(&splitExcludeSchema, "exclude-schema", "", "Comma-separated list of schema names to exclude")
splitCmd.Flags().StringVar(&splitExcludeTables, "exclude-tables", "", "Comma-separated list of table names to exclude (case-insensitive)")
err := splitCmd.MarkFlagRequired("from")
if err != nil {
fmt.Fprintf(os.Stderr, "Error marking from flag as required: %v\n", err)
}
err = splitCmd.MarkFlagRequired("to")
if err != nil {
fmt.Fprintf(os.Stderr, "Error marking to flag as required: %v\n", err)
}
err = splitCmd.MarkFlagRequired("to-path")
if err != nil {
fmt.Fprintf(os.Stderr, "Error marking to-path flag as required: %v\n", err)
}
}
func runSplit(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Split ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
// Read source database
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", splitSourceType)
if splitSourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", splitSourcePath)
}
if splitSourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(splitSourceConn))
}
db, err := readDatabaseForConvert(splitSourceType, splitSourcePath, splitSourceConn)
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
totalTables := 0
for _, schema := range db.Schemas {
totalTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
// Filter the database
fmt.Fprintf(os.Stderr, "[2/3] Filtering schemas and tables...\n")
filteredDB, err := filterDatabase(db)
if err != nil {
return fmt.Errorf("failed to filter database: %w", err)
}
if splitDatabaseName != "" {
filteredDB.Name = splitDatabaseName
}
filteredTables := 0
for _, schema := range filteredDB.Schemas {
filteredTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " ✓ Filtered to: %d schema(s), %d table(s)\n\n", len(filteredDB.Schemas), filteredTables)
// Write to target format
fmt.Fprintf(os.Stderr, "[3/3] Writing to target format...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", splitTargetType)
fmt.Fprintf(os.Stderr, " Output: %s\n", splitTargetPath)
if splitPackageName != "" {
fmt.Fprintf(os.Stderr, " Package: %s\n", splitPackageName)
}
err = writeDatabase(
filteredDB,
splitTargetType,
splitTargetPath,
splitPackageName,
"", // no schema filter for split
false, // no flatten-schema for split
)
if err != nil {
return fmt.Errorf("failed to write output: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully written to '%s'\n\n", splitTargetPath)
fmt.Fprintf(os.Stderr, "=== Split Completed Successfully ===\n")
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
return nil
}
// filterDatabase filters the database based on provided criteria
func filterDatabase(db *models.Database) (*models.Database, error) {
filteredDB := &models.Database{
Name: db.Name,
Description: db.Description,
Comment: db.Comment,
DatabaseType: db.DatabaseType,
DatabaseVersion: db.DatabaseVersion,
SourceFormat: db.SourceFormat,
UpdatedAt: db.UpdatedAt,
GUID: db.GUID,
Schemas: []*models.Schema{},
Domains: db.Domains, // Keep domains for now
}
// Parse filter flags
includeSchemas := parseCommaSeparated(splitSchemas)
includeTables := parseCommaSeparated(splitTables)
excludeSchemas := parseCommaSeparated(splitExcludeSchema)
excludeTables := parseCommaSeparated(splitExcludeTables)
// Convert table names to lowercase for case-insensitive matching
includeTablesLower := make(map[string]bool)
for _, t := range includeTables {
includeTablesLower[strings.ToLower(t)] = true
}
excludeTablesLower := make(map[string]bool)
for _, t := range excludeTables {
excludeTablesLower[strings.ToLower(t)] = true
}
// Iterate through schemas
for _, schema := range db.Schemas {
// Check if schema should be excluded
if contains(excludeSchemas, schema.Name) {
continue
}
// Check if schema should be included
if len(includeSchemas) > 0 && !contains(includeSchemas, schema.Name) {
continue
}
// Create a copy of the schema with filtered tables
filteredSchema := &models.Schema{
Name: schema.Name,
Description: schema.Description,
Owner: schema.Owner,
Permissions: schema.Permissions,
Comment: schema.Comment,
Metadata: schema.Metadata,
Scripts: schema.Scripts,
Sequence: schema.Sequence,
Relations: schema.Relations,
Enums: schema.Enums,
UpdatedAt: schema.UpdatedAt,
GUID: schema.GUID,
Tables: []*models.Table{},
Views: schema.Views,
Sequences: schema.Sequences,
}
// Filter tables within the schema
for _, table := range schema.Tables {
tableLower := strings.ToLower(table.Name)
// Check if table should be excluded
if excludeTablesLower[tableLower] {
continue
}
// If specific tables are requested, only include those
if len(includeTablesLower) > 0 {
if !includeTablesLower[tableLower] {
continue
}
}
filteredSchema.Tables = append(filteredSchema.Tables, table)
}
// Only add schema if it has tables (unless no table filter was specified)
if len(filteredSchema.Tables) > 0 || (len(includeTablesLower) == 0 && len(excludeTablesLower) == 0) {
filteredDB.Schemas = append(filteredDB.Schemas, filteredSchema)
}
}
if len(filteredDB.Schemas) == 0 {
return nil, fmt.Errorf("no schemas matched the filter criteria")
}
return filteredDB, nil
}
// parseCommaSeparated parses a comma-separated string into a slice, trimming whitespace
func parseCommaSeparated(s string) []string {
if s == "" {
return []string{}
}
parts := strings.Split(s, ",")
result := make([]string, 0, len(parts))
for _, p := range parts {
trimmed := strings.TrimSpace(p)
if trimmed != "" {
result = append(result, trimmed)
}
}
return result
}
// contains checks if a string is in a slice
func contains(slice []string, item string) bool {
for _, s := range slice {
if s == item {
return true
}
}
return false
}

167
cmd/relspec/templ.go Normal file
View File

@@ -0,0 +1,167 @@
package main
import (
"fmt"
"os"
"github.com/spf13/cobra"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
wtemplate "git.warky.dev/wdevs/relspecgo/pkg/writers/template"
)
var (
templSourceType string
templSourcePath string
templSourceConn string
templTemplatePath string
templOutputPath string
templSchemaFilter string
templMode string
templFilenamePattern string
)
var templCmd = &cobra.Command{
Use: "templ",
Short: "Apply custom templates to database schemas",
Long: `Apply custom Go text templates to database schemas with flexible execution modes.
The templ command allows you to transform database schemas using custom Go text
templates. It supports multiple execution modes for different use cases:
Execution Modes:
database Execute template once for entire database (single output file)
schema Execute template once per schema (one file per schema)
script Execute template once per script (one file per script)
table Execute template once per table (one file per table)
Supported Input Formats:
dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql
Template Functions:
String utilities: toUpper, toLower, toCamelCase, toPascalCase, toSnakeCase, toKebabCase,
pluralize, singularize, title, trim, split, join, replace
Type conversion: sqlToGo, sqlToTypeScript, sqlToJava, sqlToPython, sqlToRust,
sqlToCSharp, sqlToPhp
Filtering: filterTables, filterColumns, filterPrimaryKeys, filterForeignKeys,
filterNullable, filterNotNull, filterColumnsByType
Formatting: toJSON, toJSONPretty, toYAML, indent, escape, comment
Loop helpers: enumerate, batch, reverse, first, last, skip, take, concat,
unique, sortBy, groupBy
Safe access: get, getOr, getPath, has, keys, values, merge, pick, omit,
sliceContains, indexOf, pluck
Examples:
# Generate documentation from PostgreSQL database
relspec templ --from pgsql --from-conn "postgres://user:pass@localhost/db" \
--template docs.tmpl --output schema-docs.md
# Generate one TypeScript model file per table
relspec templ --from dbml --from-path schema.dbml \
--template ts-model.tmpl --mode table \
--output ./models/ \
--filename-pattern "{{.Name | toCamelCase}}.ts"
# Generate schema documentation files
relspec templ --from json --from-path db.json \
--template schema.tmpl --mode schema \
--output ./docs/ \
--filename-pattern "{{.Name}}_schema.md"`,
RunE: runTempl,
}
func init() {
templCmd.Flags().StringVar(&templSourceType, "from", "", "Source format (dbml, pgsql, json, etc.)")
templCmd.Flags().StringVar(&templSourcePath, "from-path", "", "Source file path (for file-based sources)")
templCmd.Flags().StringVar(&templSourceConn, "from-conn", "", "Source connection string (for database sources)")
templCmd.Flags().StringVar(&templTemplatePath, "template", "", "Template file path (required)")
templCmd.Flags().StringVar(&templOutputPath, "output", "", "Output path (file or directory, empty for stdout)")
templCmd.Flags().StringVar(&templSchemaFilter, "schema", "", "Filter to specific schema")
templCmd.Flags().StringVar(&templMode, "mode", "database", "Execution mode: database, schema, script, or table")
templCmd.Flags().StringVar(&templFilenamePattern, "filename-pattern", "{{.Name}}.txt", "Filename pattern for multi-output modes")
_ = templCmd.MarkFlagRequired("from")
_ = templCmd.MarkFlagRequired("template")
}
func runTempl(cmd *cobra.Command, args []string) error {
// Print header
fmt.Fprintf(os.Stderr, "=== RelSpec Template Execution ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
// Read database using the same function as convert
fmt.Fprintf(os.Stderr, "Reading from %s...\n", templSourceType)
db, err := readDatabaseForConvert(templSourceType, templSourcePath, templSourceConn)
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
// Print database stats
schemaCount := len(db.Schemas)
tableCount := 0
for _, schema := range db.Schemas {
tableCount += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, "✓ Successfully read database: %s\n", db.Name)
fmt.Fprintf(os.Stderr, " Schemas: %d\n", schemaCount)
fmt.Fprintf(os.Stderr, " Tables: %d\n\n", tableCount)
// Apply schema filter if specified
if templSchemaFilter != "" {
fmt.Fprintf(os.Stderr, "Filtering to schema: %s\n", templSchemaFilter)
found := false
for _, schema := range db.Schemas {
if schema.Name == templSchemaFilter {
db.Schemas = []*models.Schema{schema}
found = true
break
}
}
if !found {
return fmt.Errorf("schema not found: %s", templSchemaFilter)
}
}
// Create template writer
fmt.Fprintf(os.Stderr, "Loading template: %s\n", templTemplatePath)
fmt.Fprintf(os.Stderr, "Execution mode: %s\n", templMode)
metadata := map[string]interface{}{
"template_path": templTemplatePath,
"mode": templMode,
"filename_pattern": templFilenamePattern,
}
writerOpts := &writers.WriterOptions{
OutputPath: templOutputPath,
Metadata: metadata,
}
writer, err := wtemplate.NewWriter(writerOpts)
if err != nil {
return fmt.Errorf("failed to create template writer: %w", err)
}
// Execute template
fmt.Fprintf(os.Stderr, "\nExecuting template...\n")
if err := writer.WriteDatabase(db); err != nil {
return fmt.Errorf("failed to execute template: %w", err)
}
// Print success message
fmt.Fprintf(os.Stderr, "\n✓ Template executed successfully\n")
if templOutputPath != "" {
fmt.Fprintf(os.Stderr, "Output written to: %s\n", templOutputPath)
} else {
fmt.Fprintf(os.Stderr, "Output written to stdout\n")
}
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
return nil
}

16
cmd/relspec/version.go Normal file
View File

@@ -0,0 +1,16 @@
package main
import (
"fmt"
"github.com/spf13/cobra"
)
var versionCmd = &cobra.Command{
Use: "version",
Short: "Print version information",
Run: func(cmd *cobra.Command, args []string) {
fmt.Printf("RelSpec %s\n", version)
fmt.Printf("Built: %s\n", buildDate)
},
}

108
doc.go Normal file
View File

@@ -0,0 +1,108 @@
// Package relspecgo provides bidirectional conversion between database schema formats.
//
// RelSpec is a comprehensive database schema tool that reads, writes, and transforms
// database schemas across multiple formats including live databases, ORM models,
// schema definition languages, and data interchange formats.
//
// # Features
//
// - Read from 15+ formats: PostgreSQL, SQLite, DBML, GORM, Prisma, Drizzle, and more
// - Write to 15+ formats: SQL, ORM models, schema definitions, JSON/YAML
// - Interactive TUI editor for visual schema management
// - Schema diff and merge capabilities
// - Format-agnostic intermediate representation
//
// # Architecture
//
// RelSpec uses a hub-and-spoke architecture with models.Database as the central type:
//
// Input Format → Reader → models.Database → Writer → Output Format
//
// This allows any supported input format to be converted to any supported output format
// without requiring N² conversion implementations.
//
// # Key Packages
//
// - pkg/models: Core data structures (Database, Schema, Table, Column, etc.)
// - pkg/readers: Input format readers (dbml, pgsql, gorm, etc.)
// - pkg/writers: Output format writers (dbml, pgsql, gorm, etc.)
// - pkg/ui: Interactive terminal UI for schema editing
// - pkg/diff: Schema comparison and difference detection
// - pkg/merge: Schema merging utilities
// - pkg/transform: Validation and normalization
//
// # Installation
//
// go install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest
//
// # Usage
//
// Command-line conversion:
//
// relspec convert --from dbml --from-path schema.dbml \
// --to gorm --to-path ./models
//
// Interactive editor:
//
// relspec edit --from pgsql --from-conn "postgres://..." \
// --to dbml --to-path schema.dbml
//
// Schema comparison:
//
// relspec diff --source-type pgsql --source-conn "postgres://..." \
// --target-type dbml --target-path schema.dbml
//
// Merge schemas:
//
// relspec merge --target schema1.dbml --sources schema2.dbml,schema3.dbml
//
// # Supported Formats
//
// Input/Output Formats:
// - dbml: Database Markup Language
// - dctx: DCTX schema files
// - drawdb: DrawDB JSON format
// - graphql: GraphQL schema definition
// - json: JSON schema representation
// - yaml: YAML schema representation
// - gorm: Go GORM models
// - bun: Go Bun models
// - drizzle: TypeScript Drizzle ORM
// - prisma: Prisma schema language
// - typeorm: TypeScript TypeORM entities
// - pgsql: PostgreSQL (live DB or SQL)
// - sqlite: SQLite (database file or SQL)
//
// # Library Usage
//
// RelSpec can be used as a Go library:
//
// import (
// "git.warky.dev/wdevs/relspecgo/pkg/models"
// "git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
// "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
// )
//
// // Read DBML
// reader := dbml.NewReader(&readers.ReaderOptions{
// FilePath: "schema.dbml",
// })
// db, err := reader.ReadDatabase()
//
// // Write GORM models
// writer := gorm.NewWriter(&writers.WriterOptions{
// OutputPath: "./models",
// PackageName: "models",
// })
// err = writer.WriteDatabase(db)
//
// # Documentation
//
// Full documentation available at: https://git.warky.dev/wdevs/relspecgo
//
// API documentation: go doc git.warky.dev/wdevs/relspecgo/...
//
// # License
//
// See LICENSE file in the repository root.
package relspecgo

View File

@@ -1,6 +1,21 @@
version: '3.8' version: '3.8'
services: services:
mssql:
image: mcr.microsoft.com/mssql/server:2022-latest
environment:
- ACCEPT_EULA=Y
- SA_PASSWORD=StrongPassword123!
- MSSQL_PID=Express
ports:
- "1433:1433"
volumes:
- ./test_data/mssql/test_schema.sql:/test_schema.sql
healthcheck:
test: ["CMD", "/opt/mssql-tools/bin/sqlcmd", "-S", "localhost", "-U", "sa", "-P", "StrongPassword123!", "-Q", "SELECT 1"]
interval: 5s
timeout: 3s
retries: 10
postgres: postgres:
image: postgres:16-alpine image: postgres:16-alpine
container_name: relspec-test-postgres container_name: relspec-test-postgres

149
docs/DOMAINS_DRAWDB.md Normal file
View File

@@ -0,0 +1,149 @@
# Domains and DrawDB Areas Integration
## Overview
Domains provide a way to organize tables from potentially multiple schemas into logical business groupings. When working with DrawDB format, domains are automatically imported/exported as **Subject Areas** - a native DrawDB feature for visually grouping tables.
## How It Works
### Writing Domains to DrawDB (Export)
When you export a database with domains to DrawDB format:
1. **Schema Areas** are created automatically for each schema (existing behavior)
2. **Domain Areas** are created for each domain, calculated based on the positions of the tables they contain
3. The domain area bounds are automatically calculated to encompass all its tables with a small padding
```go
// Example: Creating a domain and exporting to DrawDB
db := models.InitDatabase("mydb")
// Create an "authentication" domain
authDomain := models.InitDomain("authentication")
authDomain.Tables = append(authDomain.Tables,
models.InitDomainTable("users", "public"),
models.InitDomainTable("roles", "public"),
models.InitDomainTable("permissions", "public"),
)
db.Domains = append(db.Domains, authDomain)
// Create a "financial" domain spanning multiple schemas
finDomain := models.InitDomain("financial")
finDomain.Tables = append(finDomain.Tables,
models.InitDomainTable("accounts", "public"),
models.InitDomainTable("transactions", "public"),
models.InitDomainTable("ledger", "finance"), // Different schema!
)
db.Domains = append(db.Domains, finDomain)
// Write to DrawDB - domains become subject areas
writer := drawdb.NewWriter(&writers.WriterOptions{
OutputPath: "schema.json",
})
writer.WriteDatabase(db)
```
The resulting DrawDB JSON will have Subject Areas for both:
- "authentication" area containing the auth tables
- "financial" area containing the financial tables from both schemas
### Reading Domains from DrawDB (Import)
When you import a DrawDB file with Subject Areas:
1. **Subject Areas** are automatically converted to **Domains**
2. Tables are assigned to a domain if they fall within the area's visual bounds
3. Table references include both the table name and schema name
```go
// Example: Reading DrawDB with areas
reader := drawdb.NewReader(&readers.ReaderOptions{
FilePath: "schema.json",
})
db, err := reader.ReadDatabase()
if err != nil {
log.Fatal(err)
}
// Access domains
for _, domain := range db.Domains {
fmt.Printf("Domain: %s\n", domain.Name)
for _, domainTable := range domain.Tables {
fmt.Printf(" - %s.%s\n", domainTable.SchemaName, domainTable.TableName)
// Access the actual table reference if loaded
if domainTable.RefTable != nil {
fmt.Printf(" Description: %s\n", domainTable.RefTable.Description)
}
}
}
```
## Domain Structure
```go
type Domain struct {
Name string // Domain name (e.g., "authentication", "user_data")
Description string // Optional human-readable description
Tables []*DomainTable // Tables belonging to this domain
Comment string // Optional comment
Metadata map[string]any // Extensible metadata
Sequence uint // Ordering hint
}
type DomainTable struct {
TableName string // Table name
SchemaName string // Schema containing the table
Sequence uint // Ordering hint
RefTable *Table // Pointer to actual table (in-memory only, not serialized)
}
```
## Multi-Schema Domains
One of the key features of domains is that they can span multiple schemas:
```
Domain: "user_data"
├── public.users
├── public.profiles
├── public.user_preferences
├── auth.user_sessions
└── auth.mfa_devices
```
This allows you to organize related tables even when they're stored in different schemas.
## Visual Organization in DrawDB
When viewing the exported DrawDB file in DrawDB Editor:
1. **Schema areas** appear in one color (original behavior)
2. **Domain areas** appear in a different color
3. Domain area bounds are calculated to fit all contained tables
4. Areas can overlap - a table can visually belong to multiple areas
## Integration with Other Formats
Currently, domain/area integration is implemented for DrawDB format.
To implement similar functionality for other formats:
1. Identify if the format has a native grouping/area feature
2. Add conversion logic in the reader to map format areas → Domain model
3. Add conversion logic in the writer to map Domain model → format areas
Example formats that could support domains:
- **DBML**: Could use DBML's `TableGroup` feature
- **DrawDB**: ✅ Already implemented (Subject Areas)
- **GraphQL**: Could use schema directives
- **Custom formats**: Implement as needed
## Tips and Best Practices
1. **Keep domains focused**: Each domain should represent a distinct business area
2. **Document purposes**: Use Description and Comment fields to explain each domain
3. **Use meaningful names**: Domain names should clearly reflect their purpose
4. **Maintain schema consistency**: Keep related tables together in the same schema when possible
5. **Use metadata**: Store tool-specific information in the Metadata field

572
docs/TEMPLATE_MODE.md Normal file
View File

@@ -0,0 +1,572 @@
# RelSpec Template Mode
The `templ` command allows you to transform database schemas using custom Go text templates. It provides powerful template functions and flexible execution modes for generating any type of output from your database schema.
## Table of Contents
- [Quick Start](#quick-start)
- [Execution Modes](#execution-modes)
- [Template Functions](#template-functions)
- [String Utilities](#string-utilities)
- [Type Conversion](#type-conversion)
- [Filtering](#filtering)
- [Formatting](#formatting)
- [Loop Helpers](#loop-helpers)
- [Sorting Helpers](#sorting-helpers)
- [Safe Access](#safe-access)
- [Utility Functions](#utility-functions)
- [Data Model](#data-model)
- [Examples](#examples)
## Quick Start
```bash
# Generate documentation from a database
relspec templ --from pgsql --from-conn "postgres://user:pass@localhost/db" \
--template docs.tmpl --output schema-docs.md
# Generate TypeScript models (one file per table)
relspec templ --from dbml --from-path schema.dbml \
--template model.tmpl --mode table \
--output ./models/ \
--filename-pattern "{{.Name | toCamelCase}}.ts"
# Output to stdout
relspec templ --from json --from-path schema.json \
--template report.tmpl
```
## Execution Modes
The `--mode` flag controls how the template is executed:
| Mode | Description | Output | When to Use |
|------|-------------|--------|-------------|
| `database` | Execute once for entire database | Single file | Documentation, reports, overview files |
| `schema` | Execute once per schema | One file per schema | Schema-specific documentation |
| `domain` | Execute once per domain | One file per domain | Domain-based documentation, domain exports |
| `script` | Execute once per script | One file per script | Script processing |
| `table` | Execute once per table | One file per table | Model generation, table docs |
### Filename Patterns
For multi-file modes (`schema`, `domain`, `script`, `table`), use `--filename-pattern` to control output filenames:
```bash
# Default pattern
--filename-pattern "{{.Name}}.txt"
# With transformations
--filename-pattern "{{.Name | toCamelCase}}.ts"
# Nested directories
--filename-pattern "{{.Schema}}/{{.Name}}.md"
# Complex patterns
--filename-pattern "{{.ParentSchema.Name}}/models/{{.Name | toPascalCase}}Model.java"
```
## Template Functions
### String Utilities
Transform and manipulate strings in your templates.
| Function | Description | Example | Output |
|----------|-------------|---------|--------|
| `toUpper` | Convert to uppercase | `{{ "hello" \| toUpper }}` | `HELLO` |
| `toLower` | Convert to lowercase | `{{ "HELLO" \| toLower }}` | `hello` |
| `toCamelCase` | Convert to camelCase | `{{ "user_name" \| toCamelCase }}` | `userName` |
| `toPascalCase` | Convert to PascalCase | `{{ "user_name" \| toPascalCase }}` | `UserName` |
| `toSnakeCase` | Convert to snake_case | `{{ "UserName" \| toSnakeCase }}` | `user_name` |
| `toKebabCase` | Convert to kebab-case | `{{ "UserName" \| toKebabCase }}` | `user-name` |
| `pluralize` | Convert to plural | `{{ "user" \| pluralize }}` | `users` |
| `singularize` | Convert to singular | `{{ "users" \| singularize }}` | `user` |
| `title` | Capitalize first letter | `{{ "hello world" \| title }}` | `Hello World` |
| `trim` | Trim whitespace | `{{ " hello " \| trim }}` | `hello` |
| `trimPrefix` | Remove prefix | `{{ trimPrefix "tbl_users" "tbl_" }}` | `users` |
| `trimSuffix` | Remove suffix | `{{ trimSuffix "users_old" "_old" }}` | `users` |
| `replace` | Replace occurrences | `{{ replace "hello" "l" "L" -1 }}` | `heLLo` |
| `stringContains` | Check if contains substring | `{{ stringContains "hello" "ell" }}` | `true` |
| `hasPrefix` | Check if starts with | `{{ hasPrefix "hello" "hel" }}` | `true` |
| `hasSuffix` | Check if ends with | `{{ hasSuffix "hello" "llo" }}` | `true` |
| `split` | Split by separator | `{{ split "a,b,c" "," }}` | `[a b c]` |
| `join` | Join with separator | `{{ join (list "a" "b") "," }}` | `a,b` |
### Type Conversion
Convert SQL types to various programming language types.
| Function | Parameters | Description | Example |
|----------|------------|-------------|---------|
| `sqlToGo` | `sqlType`, `nullable` | SQL to Go | `{{ sqlToGo "varchar" true }}``string` |
| `sqlToTypeScript` | `sqlType`, `nullable` | SQL to TypeScript | `{{ sqlToTypeScript "integer" false }}``number \| null` |
| `sqlToJava` | `sqlType`, `nullable` | SQL to Java | `{{ sqlToJava "varchar" true }}``String` |
| `sqlToPython` | `sqlType` | SQL to Python | `{{ sqlToPython "integer" }}``int` |
| `sqlToRust` | `sqlType`, `nullable` | SQL to Rust | `{{ sqlToRust "varchar" false }}``Option<String>` |
| `sqlToCSharp` | `sqlType`, `nullable` | SQL to C# | `{{ sqlToCSharp "integer" false }}``int?` |
| `sqlToPhp` | `sqlType`, `nullable` | SQL to PHP | `{{ sqlToPhp "varchar" false }}``?string` |
**Supported SQL Types:**
- Integer: `integer`, `int`, `smallint`, `bigint`, `serial`, `bigserial`
- String: `text`, `varchar`, `char`, `character`, `citext`
- Boolean: `boolean`, `bool`
- Float: `real`, `float`, `double precision`, `numeric`, `decimal`
- Date/Time: `timestamp`, `date`, `time`, `timestamptz`
- Binary: `bytea`
- Special: `uuid`, `json`, `jsonb`, `array`
### Filtering
Filter and select specific database objects.
| Function | Description | Example |
|----------|-------------|---------|
| `filterTables` | Filter tables by pattern | `{{ filterTables .Schema.Tables "user_*" }}` |
| `filterTablesByPattern` | Alias for filterTables | `{{ filterTablesByPattern .Schema.Tables "temp_*" }}` |
| `filterColumns` | Filter columns by pattern | `{{ filterColumns .Table.Columns "*_id" }}` |
| `filterColumnsByType` | Filter by SQL type | `{{ filterColumnsByType .Table.Columns "varchar" }}` |
| `filterPrimaryKeys` | Get primary key columns | `{{ filterPrimaryKeys .Table.Columns }}` |
| `filterForeignKeys` | Get foreign key constraints | `{{ filterForeignKeys .Table.Constraints }}` |
| `filterUniqueConstraints` | Get unique constraints | `{{ filterUniqueConstraints .Table.Constraints }}` |
| `filterCheckConstraints` | Get check constraints | `{{ filterCheckConstraints .Table.Constraints }}` |
| `filterNullable` | Get nullable columns | `{{ filterNullable .Table.Columns }}` |
| `filterNotNull` | Get non-nullable columns | `{{ filterNotNull .Table.Columns }}` |
**Pattern Matching:**
- `*` - Match any characters
- `?` - Match single character
- Example: `user_*` matches `user_profile`, `user_settings`
### Formatting
Format output and add structure to generated code.
| Function | Description | Example |
|----------|-------------|---------|
| `toJSON` | Convert to JSON | `{{ .Database \| toJSON }}` |
| `toJSONPretty` | Pretty-print JSON | `{{ toJSONPretty .Table " " }}` |
| `toYAML` | Convert to YAML | `{{ .Schema \| toYAML }}` |
| `indent` | Indent by spaces | `{{ indent .Column.Description 4 }}` |
| `indentWith` | Indent with prefix | `{{ indentWith .Comment " " }}` |
| `escape` | Escape special chars | `{{ escape .Column.Default }}` |
| `escapeQuotes` | Escape quotes only | `{{ escapeQuotes .String }}` |
| `comment` | Add comment prefix | `{{ comment .Description "//" }}` |
| `quoteString` | Add quotes | `{{ quoteString "value" }}``"value"` |
| `unquoteString` | Remove quotes | `{{ unquoteString "\"value\"" }}``value` |
**Comment Styles:**
- `//` - C/Go/JavaScript style
- `#` - Python/Shell style
- `--` - SQL style
- `/* */` - Block comment style
### Loop Helpers
Iterate and manipulate collections.
| Function | Description | Example |
|----------|-------------|---------|
| `enumerate` | Add index to items | `{{ range enumerate .Tables }}{{ .Index }}: {{ .Value.Name }}{{ end }}` |
| `batch` | Split into chunks | `{{ range batch .Columns 3 }}...{{ end }}` |
| `chunk` | Alias for batch | `{{ range chunk .Columns 5 }}...{{ end }}` |
| `reverse` | Reverse order | `{{ range reverse .Tables }}...{{ end }}` |
| `first` | Get first N items | `{{ range first .Tables 5 }}...{{ end }}` |
| `last` | Get last N items | `{{ range last .Tables 3 }}...{{ end }}` |
| `skip` | Skip first N items | `{{ range skip .Tables 2 }}...{{ end }}` |
| `take` | Take first N (alias) | `{{ range take .Tables 10 }}...{{ end }}` |
| `concat` | Concatenate slices | `{{ $all := concat .Schema1.Tables .Schema2.Tables }}` |
| `unique` | Remove duplicates | `{{ $unique := unique .Items }}` |
| `sortBy` | Sort by field | `{{ $sorted := sortBy .Tables "Name" }}` |
| `groupBy` | Group by field | `{{ $grouped := groupBy .Tables "Schema" }}` |
### Sorting Helpers
Sort database objects by name or sequence number. All sort functions modify the slice in-place.
**Schema Sorting:**
| Function | Description | Example |
|----------|-------------|---------|
| `sortSchemasByName` | Sort schemas by name | `{{ sortSchemasByName .Database.Schemas false }}` |
| `sortSchemasBySequence` | Sort schemas by sequence | `{{ sortSchemasBySequence .Database.Schemas false }}` |
**Table Sorting:**
| Function | Description | Example |
|----------|-------------|---------|
| `sortTablesByName` | Sort tables by name | `{{ sortTablesByName .Schema.Tables false }}` |
| `sortTablesBySequence` | Sort tables by sequence | `{{ sortTablesBySequence .Schema.Tables true }}` |
**Column Sorting:**
| Function | Description | Example |
|----------|-------------|---------|
| `sortColumnsMapByName` | Convert column map to sorted slice by name | `{{ $cols := sortColumnsMapByName .Table.Columns false }}` |
| `sortColumnsMapBySequence` | Convert column map to sorted slice by sequence | `{{ $cols := sortColumnsMapBySequence .Table.Columns false }}` |
| `sortColumnsByName` | Sort column slice by name | `{{ sortColumnsByName $columns false }}` |
| `sortColumnsBySequence` | Sort column slice by sequence | `{{ sortColumnsBySequence $columns true }}` |
**Other Object Sorting:**
| Function | Description | Example |
|----------|-------------|---------|
| `sortViewsByName` | Sort views by name | `{{ sortViewsByName .Schema.Views false }}` |
| `sortViewsBySequence` | Sort views by sequence | `{{ sortViewsBySequence .Schema.Views false }}` |
| `sortSequencesByName` | Sort sequences by name | `{{ sortSequencesByName .Schema.Sequences false }}` |
| `sortSequencesBySequence` | Sort sequences by sequence | `{{ sortSequencesBySequence .Schema.Sequences false }}` |
| `sortIndexesMapByName` | Convert index map to sorted slice by name | `{{ $idx := sortIndexesMapByName .Table.Indexes false }}` |
| `sortIndexesMapBySequence` | Convert index map to sorted slice by sequence | `{{ $idx := sortIndexesMapBySequence .Table.Indexes false }}` |
| `sortIndexesByName` | Sort index slice by name | `{{ sortIndexesByName $indexes false }}` |
| `sortIndexesBySequence` | Sort index slice by sequence | `{{ sortIndexesBySequence $indexes false }}` |
| `sortConstraintsMapByName` | Convert constraint map to sorted slice by name | `{{ $cons := sortConstraintsMapByName .Table.Constraints false }}` |
| `sortConstraintsByName` | Sort constraint slice by name | `{{ sortConstraintsByName $constraints false }}` |
| `sortRelationshipsMapByName` | Convert relationship map to sorted slice by name | `{{ $rels := sortRelationshipsMapByName .Table.Relationships false }}` |
| `sortRelationshipsByName` | Sort relationship slice by name | `{{ sortRelationshipsByName $relationships false }}` |
| `sortScriptsByName` | Sort scripts by name | `{{ sortScriptsByName .Schema.Scripts false }}` |
| `sortEnumsByName` | Sort enums by name | `{{ sortEnumsByName .Schema.Enums false }}` |
**Sort Parameters:**
- Second parameter: `false` = ascending, `true` = descending
- Example: `{{ sortTablesByName .Schema.Tables true }}` sorts descending (Z-A)
### Safe Access
Safely access nested data without panicking.
| Function | Description | Example |
|----------|-------------|---------|
| `get` | Get map value | `{{ get .Metadata "key" }}` |
| `getOr` | Get with default | `{{ getOr .Metadata "key" "default" }}` |
| `getPath` | Nested access | `{{ getPath .Config "database.host" }}` |
| `getPathOr` | Nested with default | `{{ getPathOr .Config "db.port" 5432 }}` |
| `safeIndex` | Safe array access | `{{ safeIndex .Tables 0 }}` |
| `safeIndexOr` | Safe with default | `{{ safeIndexOr .Tables 0 nil }}` |
| `has` | Check key exists | `{{ if has .Metadata "key" }}...{{ end }}` |
| `hasPath` | Check nested path | `{{ if hasPath .Config "db.host" }}...{{ end }}` |
| `keys` | Get map keys | `{{ range keys .Metadata }}...{{ end }}` |
| `values` | Get map values | `{{ range values .Table.Columns }}...{{ end }}` |
| `merge` | Merge maps | `{{ $merged := merge .Map1 .Map2 }}` |
| `pick` | Select keys | `{{ $subset := pick .Metadata "name" "desc" }}` |
| `omit` | Exclude keys | `{{ $filtered := omit .Metadata "internal" }}` |
| `sliceContains` | Check contains | `{{ if sliceContains .Names "admin" }}...{{ end }}` |
| `indexOf` | Find index | `{{ $idx := indexOf .Names "admin" }}` |
| `pluck` | Extract field | `{{ $names := pluck .Tables "Name" }}` |
### Utility Functions
General-purpose template helpers.
| Function | Description | Example |
|----------|-------------|---------|
| `add` | Add numbers | `{{ add 5 3 }}``8` |
| `sub` | Subtract | `{{ sub 10 3 }}``7` |
| `mul` | Multiply | `{{ mul 4 5 }}``20` |
| `div` | Divide | `{{ div 10 2 }}``5` |
| `mod` | Modulo | `{{ mod 10 3 }}``1` |
| `default` | Default value | `{{ default "unknown" .Name }}` |
| `dict` | Create map | `{{ $m := dict "key1" "val1" "key2" "val2" }}` |
| `list` | Create list | `{{ $l := list "a" "b" "c" }}` |
| `seq` | Number sequence | `{{ range seq 1 5 }}{{ . }}{{ end }}``12345` |
## Data Model
The data available in templates depends on the execution mode:
### Database Mode
```go
.Database // *models.Database - Full database
.ParentDatabase // *models.Database - Same as .Database
.FlatColumns // []*models.FlatColumn - All columns flattened
.FlatTables // []*models.FlatTable - All tables flattened
.FlatConstraints // []*models.FlatConstraint - All constraints
.FlatRelationships // []*models.FlatRelationship - All relationships
.Summary // *models.DatabaseSummary - Statistics
.Metadata // map[string]interface{} - User metadata
```
### Schema Mode
```go
.Schema // *models.Schema - Current schema
.ParentDatabase // *models.Database - Parent database context
.FlatColumns // []*models.FlatColumn - Schema's columns flattened
.FlatTables // []*models.FlatTable - Schema's tables flattened
.FlatConstraints // []*models.FlatConstraint - Schema's constraints
.FlatRelationships // []*models.FlatRelationship - Schema's relationships
.Summary // *models.DatabaseSummary - Statistics
.Metadata // map[string]interface{} - User metadata
```
### Domain Mode
```go
.Domain // *models.Domain - Current domain
.ParentDatabase // *models.Database - Parent database context
.Metadata // map[string]interface{} - User metadata
```
### Table Mode
```go
.Table // *models.Table - Current table
.ParentSchema // *models.Schema - Parent schema
.ParentDatabase // *models.Database - Parent database context
.Metadata // map[string]interface{} - User metadata
```
### Script Mode
```go
.Script // *models.Script - Current script
.ParentSchema // *models.Schema - Parent schema
.ParentDatabase // *models.Database - Parent database context
.Metadata // map[string]interface{} - User metadata
```
### Model Structures
**Database:**
- `.Name` - Database name
- `.Schemas` - List of schemas
- `.Domains` - List of domains (business domain groupings)
- `.Description`, `.Comment` - Documentation
**Schema:**
- `.Name` - Schema name
- `.Tables` - List of tables
- `.Views`, `.Sequences`, `.Scripts` - Other objects
- `.Enums` - Enum types
**Domain:**
- `.Name` - Domain name
- `.Tables` - List of DomainTable references
- `.Description`, `.Comment` - Documentation
- `.Metadata` - Custom metadata map
**DomainTable:**
- `.TableName` - Name of the table
- `.SchemaName` - Schema containing the table
- `.RefTable` - Pointer to actual Table object (if loaded)
**Table:**
- `.Name` - Table name
- `.Schema` - Schema name
- `.Columns` - Map of columns (use `values` function to iterate)
- `.Constraints` - Map of constraints
- `.Indexes` - Map of indexes
- `.Relationships` - Map of relationships
- `.Description`, `.Comment` - Documentation
**Column:**
- `.Name` - Column name
- `.Type` - SQL type
- `.NotNull` - Is NOT NULL
- `.IsPrimaryKey` - Is primary key
- `.Default` - Default value
- `.Description`, `.Comment` - Documentation
## Examples
### Example 1: TypeScript Interfaces (Table Mode)
**Template:** `typescript-interface.tmpl`
```typescript
// Generated from {{ .ParentDatabase.Name }}.{{ .ParentSchema.Name }}.{{ .Table.Name }}
export interface {{ .Table.Name | toPascalCase }} {
{{- range .Table.Columns | values }}
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type .NotNull }};
{{- end }}
}
{{- $fks := filterForeignKeys .Table.Constraints }}
{{- if $fks }}
// Foreign Keys:
{{- range $fks }}
// - {{ .Name }}: references {{ .ReferencedTable }}
{{- end }}
{{- end }}
```
**Command:**
```bash
relspec templ --from pgsql --from-conn "..." \
--template typescript-interface.tmpl \
--mode table \
--output ./src/types/ \
--filename-pattern "{{.Name | toCamelCase}}.ts"
```
### Example 2: Markdown Documentation (Database Mode)
**Template:** `database-docs.tmpl`
```markdown
# Database: {{ .Database.Name }}
{{ if .Database.Description }}{{ .Database.Description }}{{ end }}
**Statistics:**
- Schemas: {{ len .Database.Schemas }}
- Tables: {{ .Summary.TotalTables }}
- Columns: {{ .Summary.TotalColumns }}
{{ range .Database.Schemas }}
## Schema: {{ .Name }}
{{ range .Tables }}
### {{ .Name }}
{{ if .Description }}{{ .Description }}{{ end }}
**Columns:**
| Column | Type | Nullable | PK | Description |
|--------|------|----------|----|----|
{{- range .Columns | values }}
| {{ .Name }} | `{{ .Type }}` | {{ if .NotNull }}No{{ else }}Yes{{ end }} | {{ if .IsPrimaryKey }}✓{{ end }} | {{ .Description }} |
{{- end }}
{{- $fks := filterForeignKeys .Constraints }}
{{- if $fks }}
**Foreign Keys:**
{{ range $fks }}
- `{{ .Name }}`: {{ join .Columns ", " }} → {{ .ReferencedTable }}({{ join .ReferencedColumns ", " }})
{{- end }}
{{- end }}
{{ end }}
{{ end }}
```
### Example 3: Python SQLAlchemy Models (Table Mode)
**Template:** `python-model.tmpl`
```python
"""{{ .Table.Name | toPascalCase }} model for {{ .ParentDatabase.Name }}.{{ .ParentSchema.Name }}"""
from sqlalchemy import Column
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class {{ .Table.Name | toPascalCase }}(Base):
"""{{ if .Table.Description }}{{ .Table.Description }}{{ else }}{{ .Table.Name }} table{{ end }}"""
__tablename__ = "{{ .Table.Name }}"
__table_args__ = {"schema": "{{ .ParentSchema.Name }}"}
{{- range .Table.Columns | values }}
{{ .Name }} = Column({{ sqlToPython .Type }}{{ if .IsPrimaryKey }}, primary_key=True{{ end }}{{ if .NotNull }}, nullable=False{{ end }})
{{- end }}
```
### Example 4: GraphQL Schema (Schema Mode)
**Template:** `graphql-schema.tmpl`
```graphql
"""{{ .Schema.Name }} schema"""
{{ range .Schema.Tables }}
type {{ .Name | toPascalCase }} {
{{- range .Columns | values }}
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type .NotNull | replace " | null" "" }}{{ if not .NotNull }}{{ end }}
{{- end }}
}
input {{ .Name | toPascalCase }}Input {
{{- $cols := filterNotNull .Columns | filterPrimaryKeys }}
{{- range $cols }}
{{ .Name | toCamelCase }}: {{ sqlToTypeScript .Type true | replace " | null" "" }}!
{{- end }}
}
{{ end }}
```
### Example 5: SQL Migration (Database Mode)
**Template:** `migration.tmpl`
```sql
-- Migration for {{ .Database.Name }}
-- Generated: {{ .Metadata.timestamp }}
BEGIN;
{{ range .Database.Schemas }}
-- Schema: {{ .Name }}
CREATE SCHEMA IF NOT EXISTS {{ .Name }};
{{ range .Tables }}
CREATE TABLE {{ $.Database.Name }}.{{ .Schema }}.{{ .Name }} (
{{- range $i, $col := .Columns | values }}
{{- if $i }},{{ end }}
{{ $col.Name }} {{ $col.Type }}{{ if $col.NotNull }} NOT NULL{{ end }}{{ if $col.Default }} DEFAULT {{ $col.Default }}{{ end }}
{{- end }}
);
{{- $pks := filterPrimaryKeys .Columns }}
{{- if $pks }}
ALTER TABLE {{ $.Database.Name }}.{{ .Schema }}.{{ .Name }}
ADD PRIMARY KEY ({{ range $i, $pk := $pks }}{{ if $i }}, {{ end }}{{ $pk.Name }}{{ end }});
{{- end }}
{{ end }}
{{ end }}
COMMIT;
```
## Best Practices
1. **Use Hyphen for Whitespace Control:**
```
{{- removes whitespace before
-}} removes whitespace after
```
2. **Store Intermediate Results:**
```
{{ $pks := filterPrimaryKeys .Table.Columns }}
{{ if $pks }}...{{ end }}
```
3. **Check Before Accessing:**
```
{{ if .Table.Description }}{{ .Table.Description }}{{ end }}
```
4. **Use Safe Access for Maps:**
```
{{ getOr .Metadata "key" "default-value" }}
```
5. **Iterate Map Values:**
```
{{ range .Table.Columns | values }}...{{ end }}
```
## Troubleshooting
**Error: "wrong type for value"**
- Check function parameter order (e.g., `sqlToGo .Type .NotNull` not `.NotNull .Type`)
**Error: "can't evaluate field"**
- Field doesn't exist on the object
- Use `{{ if .Field }}` to check before accessing
**Empty Output:**
- Check your mode matches your template expectations
- Verify data exists (use `{{ .Database | toJSON }}` to inspect)
**Whitespace Issues:**
- Use `{{-` and `-}}` to control whitespace
- Run output through a formatter if needed
## Additional Resources
- [Go Template Documentation](https://pkg.go.dev/text/template)
- [RelSpec Documentation](../README.md)
- [Model Structure Reference](../pkg/models/)
- [Example Templates](../examples/templates/)

24
go.mod
View File

@@ -3,29 +3,49 @@ module git.warky.dev/wdevs/relspecgo
go 1.24.0 go 1.24.0
require ( require (
github.com/gdamore/tcell/v2 v2.8.1
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/jackc/pgx/v5 v5.7.6 github.com/jackc/pgx/v5 v5.7.6
github.com/microsoft/go-mssqldb v1.9.6
github.com/rivo/tview v0.42.0
github.com/spf13/cobra v1.10.2 github.com/spf13/cobra v1.10.2
github.com/stretchr/testify v1.11.1 github.com/stretchr/testify v1.11.1
github.com/uptrace/bun v1.2.16 github.com/uptrace/bun v1.2.16
golang.org/x/text v0.31.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
modernc.org/sqlite v1.44.3
) )
require ( require (
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gdamore/encoding v1.0.1 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/golang-sql/sqlexp v0.1.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect
github.com/kr/pretty v0.3.1 // indirect github.com/kr/pretty v0.3.1 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/ncruces/go-strftime v1.0.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect github.com/spf13/pflag v1.0.10 // indirect
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
golang.org/x/crypto v0.41.0 // indirect golang.org/x/crypto v0.45.0 // indirect
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect
golang.org/x/sys v0.38.0 // indirect golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.28.0 // indirect golang.org/x/term v0.37.0 // indirect
modernc.org/libc v1.67.6 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
) )

166
go.sum
View File

@@ -1,10 +1,39 @@
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1/go.mod h1:JdM5psgjfBf5fo2uWOZhflPWyDBZ/O/CNAH9CtsuZE4=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1 h1:Wgf5rZba3YZqeTNJPtvqZoBu1sBN/L4sry+u2U3Y75w=
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1/go.mod h1:xxCBG/f/4Vbmh2XQJBsOmNdxWUY5j/s27jujKPbQf14=
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuoEKg+gImo7pvkiQEFAc8ocibADgXeiLAxWhWmkI=
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
github.com/gdamore/tcell/v2 v2.8.1 h1:KPNxyqclpWpWQlPLx6Xui1pMk8S+7+R37h3g07997NU=
github.com/gdamore/tcell/v2 v2.8.1/go.mod h1:bj8ori1BG3OYMjmb3IklZVWfZUJ1UBQt9JXrOCOhGWw=
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
@@ -21,15 +50,39 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/microsoft/go-mssqldb v1.9.6 h1:1MNQg5UiSsokiPz3++K2KPx4moKrwIqly1wv+RyCKTw=
github.com/microsoft/go-mssqldb v1.9.6/go.mod h1:yYMPDufyoF2vVuVCUGtZARr06DKFIhMrluTcgWlXpr4=
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg= github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/tview v0.42.0 h1:b/ftp+RxtDsHSaynXTbJb+/n/BxDEi+W3UfF5jILK6c=
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
@@ -48,18 +101,119 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.30.1 h1:4r4U1J6Fhj98NKfSjnPUN7Ze2c6MnAdL0hWw6+LrJpc=
modernc.org/ccgo/v4 v4.30.1/go.mod h1:bIOeI1JL54Utlxn+LwrFyjCx2n2RDiYEaJVSrgdrRfM=
modernc.org/fileutil v1.3.40 h1:ZGMswMNc9JOCrcrakF1HrvmergNLAmxOPjizirpfqBA=
modernc.org/fileutil v1.3.40/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
modernc.org/gc/v3 v3.1.1 h1:k8T3gkXWY9sEiytKhcgyiZ2L0DTyCQ/nvX+LoCljoRE=
modernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
modernc.org/libc v1.67.6 h1:eVOQvpModVLKOdT+LvBPjdQqfrZq+pC39BygcT+E7OI=
modernc.org/libc v1.67.6/go.mod h1:JAhxUVlolfYDErnwiqaLvUqc8nfb2r6S6slAgZOnaiE=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.44.3 h1:+39JvV/HWMcYslAwRxHb8067w+2zowvFOUrOWIy9PjY=
modernc.org/sqlite v1.44.3/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=

View File

@@ -0,0 +1,714 @@
package commontypes
import (
"testing"
)
func TestExtractBaseType(t *testing.T) {
tests := []struct {
name string
sqlType string
want string
}{
{"varchar with length", "varchar(100)", "varchar"},
{"VARCHAR uppercase with length", "VARCHAR(255)", "varchar"},
{"numeric with precision", "numeric(10,2)", "numeric"},
{"NUMERIC uppercase", "NUMERIC(18,4)", "numeric"},
{"decimal with precision", "decimal(15,3)", "decimal"},
{"char with length", "char(50)", "char"},
{"simple integer", "integer", "integer"},
{"simple text", "text", "text"},
{"bigint", "bigint", "bigint"},
{"With spaces", " varchar(100) ", "varchar"},
{"No parentheses", "boolean", "boolean"},
{"Empty string", "", ""},
{"Mixed case", "VarChar(100)", "varchar"},
{"timestamp with time zone", "timestamp(6) with time zone", "timestamp"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := ExtractBaseType(tt.sqlType)
if got != tt.want {
t.Errorf("ExtractBaseType(%q) = %q, want %q", tt.sqlType, got, tt.want)
}
})
}
}
func TestNormalizeType(t *testing.T) {
// NormalizeType is an alias for ExtractBaseType, test that they behave the same
testCases := []string{
"varchar(100)",
"numeric(10,2)",
"integer",
"text",
" VARCHAR(255) ",
}
for _, tc := range testCases {
t.Run(tc, func(t *testing.T) {
extracted := ExtractBaseType(tc)
normalized := NormalizeType(tc)
if extracted != normalized {
t.Errorf("ExtractBaseType(%q) = %q, but NormalizeType(%q) = %q",
tc, extracted, tc, normalized)
}
})
}
}
func TestSQLToGo(t *testing.T) {
tests := []struct {
name string
sqlType string
nullable bool
want string
}{
// Integer types (nullable)
{"integer nullable", "integer", true, "int32"},
{"bigint nullable", "bigint", true, "int64"},
{"smallint nullable", "smallint", true, "int16"},
{"serial nullable", "serial", true, "int32"},
// Integer types (not nullable)
{"integer not nullable", "integer", false, "*int32"},
{"bigint not nullable", "bigint", false, "*int64"},
{"smallint not nullable", "smallint", false, "*int16"},
// String types (nullable)
{"text nullable", "text", true, "string"},
{"varchar nullable", "varchar", true, "string"},
{"varchar with length nullable", "varchar(100)", true, "string"},
// String types (not nullable)
{"text not nullable", "text", false, "*string"},
{"varchar not nullable", "varchar", false, "*string"},
// Boolean
{"boolean nullable", "boolean", true, "bool"},
{"boolean not nullable", "boolean", false, "*bool"},
// Float types
{"real nullable", "real", true, "float32"},
{"double precision nullable", "double precision", true, "float64"},
{"real not nullable", "real", false, "*float32"},
{"double precision not nullable", "double precision", false, "*float64"},
// Date/Time types
{"timestamp nullable", "timestamp", true, "time.Time"},
{"date nullable", "date", true, "time.Time"},
{"timestamp not nullable", "timestamp", false, "*time.Time"},
// Binary
{"bytea nullable", "bytea", true, "[]byte"},
{"bytea not nullable", "bytea", false, "[]byte"}, // Slices don't get pointer
// UUID
{"uuid nullable", "uuid", true, "string"},
{"uuid not nullable", "uuid", false, "*string"},
// JSON
{"json nullable", "json", true, "string"},
{"jsonb nullable", "jsonb", true, "string"},
// Array
{"array nullable", "array", true, "[]string"},
{"array not nullable", "array", false, "[]string"}, // Slices don't get pointer
// Unknown types
{"unknown type nullable", "unknowntype", true, "interface{}"},
{"unknown type not nullable", "unknowntype", false, "interface{}"}, // Interface doesn't get pointer
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SQLToGo(tt.sqlType, tt.nullable)
if got != tt.want {
t.Errorf("SQLToGo(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
}
})
}
}
func TestSQLToTypeScript(t *testing.T) {
tests := []struct {
name string
sqlType string
nullable bool
want string
}{
// Integer types
{"integer nullable", "integer", true, "number"},
{"integer not nullable", "integer", false, "number | null"},
{"bigint nullable", "bigint", true, "number"},
{"bigint not nullable", "bigint", false, "number | null"},
// String types
{"text nullable", "text", true, "string"},
{"text not nullable", "text", false, "string | null"},
{"varchar nullable", "varchar", true, "string"},
{"varchar(100) nullable", "varchar(100)", true, "string"},
// Boolean
{"boolean nullable", "boolean", true, "boolean"},
{"boolean not nullable", "boolean", false, "boolean | null"},
// Float types
{"real nullable", "real", true, "number"},
{"double precision nullable", "double precision", true, "number"},
// Date/Time types
{"timestamp nullable", "timestamp", true, "Date"},
{"date nullable", "date", true, "Date"},
{"timestamp not nullable", "timestamp", false, "Date | null"},
// Binary
{"bytea nullable", "bytea", true, "Buffer"},
{"bytea not nullable", "bytea", false, "Buffer | null"},
// JSON
{"json nullable", "json", true, "any"},
{"jsonb nullable", "jsonb", true, "any"},
// UUID
{"uuid nullable", "uuid", true, "string"},
// Unknown types
{"unknown type nullable", "unknowntype", true, "any"},
{"unknown type not nullable", "unknowntype", false, "any | null"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SQLToTypeScript(tt.sqlType, tt.nullable)
if got != tt.want {
t.Errorf("SQLToTypeScript(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
}
})
}
}
func TestSQLToPython(t *testing.T) {
tests := []struct {
name string
sqlType string
want string
}{
// Integer types
{"integer", "integer", "int"},
{"bigint", "bigint", "int"},
{"smallint", "smallint", "int"},
// String types
{"text", "text", "str"},
{"varchar", "varchar", "str"},
{"varchar(100)", "varchar(100)", "str"},
// Boolean
{"boolean", "boolean", "bool"},
// Float types
{"real", "real", "float"},
{"double precision", "double precision", "float"},
{"numeric", "numeric", "Decimal"},
{"decimal", "decimal", "Decimal"},
// Date/Time types
{"timestamp", "timestamp", "datetime"},
{"date", "date", "date"},
{"time", "time", "time"},
// Binary
{"bytea", "bytea", "bytes"},
// JSON
{"json", "json", "dict"},
{"jsonb", "jsonb", "dict"},
// UUID
{"uuid", "uuid", "UUID"},
// Array
{"array", "array", "list"},
// Unknown types
{"unknown type", "unknowntype", "Any"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SQLToPython(tt.sqlType)
if got != tt.want {
t.Errorf("SQLToPython(%q) = %q, want %q", tt.sqlType, got, tt.want)
}
})
}
}
func TestSQLToCSharp(t *testing.T) {
tests := []struct {
name string
sqlType string
nullable bool
want string
}{
// Integer types (nullable)
{"integer nullable", "integer", true, "int"},
{"bigint nullable", "bigint", true, "long"},
{"smallint nullable", "smallint", true, "short"},
// Integer types (not nullable - value types get ?)
{"integer not nullable", "integer", false, "int?"},
{"bigint not nullable", "bigint", false, "long?"},
{"smallint not nullable", "smallint", false, "short?"},
// String types (reference types, no ? needed)
{"text nullable", "text", true, "string"},
{"text not nullable", "text", false, "string"},
{"varchar nullable", "varchar", true, "string"},
{"varchar(100) nullable", "varchar(100)", true, "string"},
// Boolean
{"boolean nullable", "boolean", true, "bool"},
{"boolean not nullable", "boolean", false, "bool?"},
// Float types
{"real nullable", "real", true, "float"},
{"double precision nullable", "double precision", true, "double"},
{"decimal nullable", "decimal", true, "decimal"},
{"real not nullable", "real", false, "float?"},
{"double precision not nullable", "double precision", false, "double?"},
{"decimal not nullable", "decimal", false, "decimal?"},
// Date/Time types
{"timestamp nullable", "timestamp", true, "DateTime"},
{"date nullable", "date", true, "DateTime"},
{"timestamptz nullable", "timestamptz", true, "DateTimeOffset"},
{"timestamp not nullable", "timestamp", false, "DateTime?"},
{"timestamptz not nullable", "timestamptz", false, "DateTimeOffset?"},
// Binary (array type, no ?)
{"bytea nullable", "bytea", true, "byte[]"},
{"bytea not nullable", "bytea", false, "byte[]"},
// UUID
{"uuid nullable", "uuid", true, "Guid"},
{"uuid not nullable", "uuid", false, "Guid?"},
// JSON
{"json nullable", "json", true, "string"},
// Unknown types (object is reference type)
{"unknown type nullable", "unknowntype", true, "object"},
{"unknown type not nullable", "unknowntype", false, "object"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SQLToCSharp(tt.sqlType, tt.nullable)
if got != tt.want {
t.Errorf("SQLToCSharp(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
}
})
}
}
func TestNeedsTimeImport(t *testing.T) {
tests := []struct {
name string
goType string
want bool
}{
{"time.Time type", "time.Time", true},
{"pointer to time.Time", "*time.Time", true},
{"int32 type", "int32", false},
{"string type", "string", false},
{"bool type", "bool", false},
{"[]byte type", "[]byte", false},
{"interface{}", "interface{}", false},
{"empty string", "", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := NeedsTimeImport(tt.goType)
if got != tt.want {
t.Errorf("NeedsTimeImport(%q) = %v, want %v", tt.goType, got, tt.want)
}
})
}
}
func TestGoTypeMap(t *testing.T) {
// Test that the map contains expected entries
expectedMappings := map[string]string{
"integer": "int32",
"bigint": "int64",
"text": "string",
"boolean": "bool",
"double precision": "float64",
"bytea": "[]byte",
"timestamp": "time.Time",
"uuid": "string",
"json": "string",
}
for sqlType, expectedGoType := range expectedMappings {
if goType, ok := GoTypeMap[sqlType]; !ok {
t.Errorf("GoTypeMap missing entry for %q", sqlType)
} else if goType != expectedGoType {
t.Errorf("GoTypeMap[%q] = %q, want %q", sqlType, goType, expectedGoType)
}
}
if len(GoTypeMap) == 0 {
t.Error("GoTypeMap is empty")
}
}
func TestTypeScriptTypeMap(t *testing.T) {
expectedMappings := map[string]string{
"integer": "number",
"bigint": "number",
"text": "string",
"boolean": "boolean",
"double precision": "number",
"bytea": "Buffer",
"timestamp": "Date",
"uuid": "string",
"json": "any",
}
for sqlType, expectedTSType := range expectedMappings {
if tsType, ok := TypeScriptTypeMap[sqlType]; !ok {
t.Errorf("TypeScriptTypeMap missing entry for %q", sqlType)
} else if tsType != expectedTSType {
t.Errorf("TypeScriptTypeMap[%q] = %q, want %q", sqlType, tsType, expectedTSType)
}
}
if len(TypeScriptTypeMap) == 0 {
t.Error("TypeScriptTypeMap is empty")
}
}
func TestPythonTypeMap(t *testing.T) {
expectedMappings := map[string]string{
"integer": "int",
"bigint": "int",
"text": "str",
"boolean": "bool",
"real": "float",
"numeric": "Decimal",
"bytea": "bytes",
"date": "date",
"uuid": "UUID",
"json": "dict",
}
for sqlType, expectedPyType := range expectedMappings {
if pyType, ok := PythonTypeMap[sqlType]; !ok {
t.Errorf("PythonTypeMap missing entry for %q", sqlType)
} else if pyType != expectedPyType {
t.Errorf("PythonTypeMap[%q] = %q, want %q", sqlType, pyType, expectedPyType)
}
}
if len(PythonTypeMap) == 0 {
t.Error("PythonTypeMap is empty")
}
}
func TestCSharpTypeMap(t *testing.T) {
expectedMappings := map[string]string{
"integer": "int",
"bigint": "long",
"smallint": "short",
"text": "string",
"boolean": "bool",
"double precision": "double",
"decimal": "decimal",
"bytea": "byte[]",
"timestamp": "DateTime",
"uuid": "Guid",
"json": "string",
}
for sqlType, expectedCSType := range expectedMappings {
if csType, ok := CSharpTypeMap[sqlType]; !ok {
t.Errorf("CSharpTypeMap missing entry for %q", sqlType)
} else if csType != expectedCSType {
t.Errorf("CSharpTypeMap[%q] = %q, want %q", sqlType, csType, expectedCSType)
}
}
if len(CSharpTypeMap) == 0 {
t.Error("CSharpTypeMap is empty")
}
}
func TestSQLToJava(t *testing.T) {
tests := []struct {
name string
sqlType string
nullable bool
want string
}{
// Integer types
{"integer nullable", "integer", true, "Integer"},
{"integer not nullable", "integer", false, "Integer"},
{"bigint nullable", "bigint", true, "Long"},
{"smallint nullable", "smallint", true, "Short"},
// String types
{"text nullable", "text", true, "String"},
{"varchar nullable", "varchar", true, "String"},
{"varchar(100) nullable", "varchar(100)", true, "String"},
// Boolean
{"boolean nullable", "boolean", true, "Boolean"},
// Float types
{"real nullable", "real", true, "Float"},
{"double precision nullable", "double precision", true, "Double"},
{"numeric nullable", "numeric", true, "BigDecimal"},
// Date/Time types
{"timestamp nullable", "timestamp", true, "Timestamp"},
{"date nullable", "date", true, "Date"},
{"time nullable", "time", true, "Time"},
// Binary
{"bytea nullable", "bytea", true, "byte[]"},
// UUID
{"uuid nullable", "uuid", true, "UUID"},
// JSON
{"json nullable", "json", true, "String"},
// Unknown types
{"unknown type nullable", "unknowntype", true, "Object"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SQLToJava(tt.sqlType, tt.nullable)
if got != tt.want {
t.Errorf("SQLToJava(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
}
})
}
}
func TestSQLToPhp(t *testing.T) {
tests := []struct {
name string
sqlType string
nullable bool
want string
}{
// Integer types (nullable)
{"integer nullable", "integer", true, "int"},
{"bigint nullable", "bigint", true, "int"},
{"smallint nullable", "smallint", true, "int"},
// Integer types (not nullable)
{"integer not nullable", "integer", false, "?int"},
{"bigint not nullable", "bigint", false, "?int"},
// String types
{"text nullable", "text", true, "string"},
{"text not nullable", "text", false, "?string"},
{"varchar nullable", "varchar", true, "string"},
{"varchar(100) nullable", "varchar(100)", true, "string"},
// Boolean
{"boolean nullable", "boolean", true, "bool"},
{"boolean not nullable", "boolean", false, "?bool"},
// Float types
{"real nullable", "real", true, "float"},
{"double precision nullable", "double precision", true, "float"},
{"real not nullable", "real", false, "?float"},
// Date/Time types
{"timestamp nullable", "timestamp", true, "\\DateTime"},
{"date nullable", "date", true, "\\DateTime"},
{"timestamp not nullable", "timestamp", false, "?\\DateTime"},
// Binary
{"bytea nullable", "bytea", true, "string"},
{"bytea not nullable", "bytea", false, "?string"},
// JSON
{"json nullable", "json", true, "array"},
{"json not nullable", "json", false, "?array"},
// UUID
{"uuid nullable", "uuid", true, "string"},
// Unknown types
{"unknown type nullable", "unknowntype", true, "mixed"},
{"unknown type not nullable", "unknowntype", false, "mixed"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SQLToPhp(tt.sqlType, tt.nullable)
if got != tt.want {
t.Errorf("SQLToPhp(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
}
})
}
}
func TestSQLToRust(t *testing.T) {
tests := []struct {
name string
sqlType string
nullable bool
want string
}{
// Integer types (nullable)
{"integer nullable", "integer", true, "i32"},
{"bigint nullable", "bigint", true, "i64"},
{"smallint nullable", "smallint", true, "i16"},
// Integer types (not nullable)
{"integer not nullable", "integer", false, "Option<i32>"},
{"bigint not nullable", "bigint", false, "Option<i64>"},
{"smallint not nullable", "smallint", false, "Option<i16>"},
// String types
{"text nullable", "text", true, "String"},
{"text not nullable", "text", false, "Option<String>"},
{"varchar nullable", "varchar", true, "String"},
{"varchar(100) nullable", "varchar(100)", true, "String"},
// Boolean
{"boolean nullable", "boolean", true, "bool"},
{"boolean not nullable", "boolean", false, "Option<bool>"},
// Float types
{"real nullable", "real", true, "f32"},
{"double precision nullable", "double precision", true, "f64"},
{"real not nullable", "real", false, "Option<f32>"},
{"double precision not nullable", "double precision", false, "Option<f64>"},
// Date/Time types
{"timestamp nullable", "timestamp", true, "NaiveDateTime"},
{"timestamptz nullable", "timestamptz", true, "DateTime<Utc>"},
{"date nullable", "date", true, "NaiveDate"},
{"time nullable", "time", true, "NaiveTime"},
{"timestamp not nullable", "timestamp", false, "Option<NaiveDateTime>"},
// Binary
{"bytea nullable", "bytea", true, "Vec<u8>"},
{"bytea not nullable", "bytea", false, "Option<Vec<u8>>"},
// JSON
{"json nullable", "json", true, "serde_json::Value"},
{"json not nullable", "json", false, "Option<serde_json::Value>"},
// UUID
{"uuid nullable", "uuid", true, "String"},
// Unknown types
{"unknown type nullable", "unknowntype", true, "String"},
{"unknown type not nullable", "unknowntype", false, "Option<String>"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SQLToRust(tt.sqlType, tt.nullable)
if got != tt.want {
t.Errorf("SQLToRust(%q, %v) = %q, want %q", tt.sqlType, tt.nullable, got, tt.want)
}
})
}
}
func TestJavaTypeMap(t *testing.T) {
expectedMappings := map[string]string{
"integer": "Integer",
"bigint": "Long",
"smallint": "Short",
"text": "String",
"boolean": "Boolean",
"double precision": "Double",
"numeric": "BigDecimal",
"bytea": "byte[]",
"timestamp": "Timestamp",
"uuid": "UUID",
"date": "Date",
}
for sqlType, expectedJavaType := range expectedMappings {
if javaType, ok := JavaTypeMap[sqlType]; !ok {
t.Errorf("JavaTypeMap missing entry for %q", sqlType)
} else if javaType != expectedJavaType {
t.Errorf("JavaTypeMap[%q] = %q, want %q", sqlType, javaType, expectedJavaType)
}
}
if len(JavaTypeMap) == 0 {
t.Error("JavaTypeMap is empty")
}
}
func TestPHPTypeMap(t *testing.T) {
expectedMappings := map[string]string{
"integer": "int",
"bigint": "int",
"text": "string",
"boolean": "bool",
"double precision": "float",
"bytea": "string",
"timestamp": "\\DateTime",
"uuid": "string",
"json": "array",
}
for sqlType, expectedPHPType := range expectedMappings {
if phpType, ok := PHPTypeMap[sqlType]; !ok {
t.Errorf("PHPTypeMap missing entry for %q", sqlType)
} else if phpType != expectedPHPType {
t.Errorf("PHPTypeMap[%q] = %q, want %q", sqlType, phpType, expectedPHPType)
}
}
if len(PHPTypeMap) == 0 {
t.Error("PHPTypeMap is empty")
}
}
func TestRustTypeMap(t *testing.T) {
expectedMappings := map[string]string{
"integer": "i32",
"bigint": "i64",
"smallint": "i16",
"text": "String",
"boolean": "bool",
"double precision": "f64",
"real": "f32",
"bytea": "Vec<u8>",
"timestamp": "NaiveDateTime",
"timestamptz": "DateTime<Utc>",
"date": "NaiveDate",
"json": "serde_json::Value",
}
for sqlType, expectedRustType := range expectedMappings {
if rustType, ok := RustTypeMap[sqlType]; !ok {
t.Errorf("RustTypeMap missing entry for %q", sqlType)
} else if rustType != expectedRustType {
t.Errorf("RustTypeMap[%q] = %q, want %q", sqlType, rustType, expectedRustType)
}
}
if len(RustTypeMap) == 0 {
t.Error("RustTypeMap is empty")
}
}

74
pkg/commontypes/csharp.go Normal file
View File

@@ -0,0 +1,74 @@
package commontypes
import "strings"
// CSharpTypeMap maps PostgreSQL types to C# types
var CSharpTypeMap = map[string]string{
// Integer types
"integer": "int",
"int": "int",
"int4": "int",
"smallint": "short",
"int2": "short",
"bigint": "long",
"int8": "long",
"serial": "int",
"bigserial": "long",
"smallserial": "short",
// String types
"text": "string",
"varchar": "string",
"char": "string",
"character": "string",
"citext": "string",
"bpchar": "string",
"uuid": "Guid",
// Boolean
"boolean": "bool",
"bool": "bool",
// Float types
"real": "float",
"float4": "float",
"double precision": "double",
"float8": "double",
"numeric": "decimal",
"decimal": "decimal",
// Date/Time types
"timestamp": "DateTime",
"timestamp without time zone": "DateTime",
"timestamp with time zone": "DateTimeOffset",
"timestamptz": "DateTimeOffset",
"date": "DateTime",
"time": "TimeSpan",
"time without time zone": "TimeSpan",
"time with time zone": "DateTimeOffset",
"timetz": "DateTimeOffset",
// Binary
"bytea": "byte[]",
// JSON
"json": "string",
"jsonb": "string",
}
// SQLToCSharp converts SQL types to C# types
func SQLToCSharp(sqlType string, nullable bool) string {
baseType := ExtractBaseType(sqlType)
csType, ok := CSharpTypeMap[baseType]
if !ok {
csType = "object"
}
// Handle nullable value types (reference types are already nullable)
if !nullable && csType != "string" && !strings.HasSuffix(csType, "[]") && csType != "object" {
return csType + "?"
}
return csType
}

28
pkg/commontypes/doc.go Normal file
View File

@@ -0,0 +1,28 @@
// Package commontypes provides shared type definitions used across multiple packages.
//
// # Overview
//
// The commontypes package contains common data structures, constants, and type
// definitions that are shared between different parts of RelSpec but don't belong
// to the core models package.
//
// # Purpose
//
// This package helps avoid circular dependencies by providing a common location
// for types that are used by multiple packages without creating import cycles.
//
// # Contents
//
// Common types may include:
// - Shared enums and constants
// - Utility type aliases
// - Common error types
// - Shared configuration structures
//
// # Usage
//
// import "git.warky.dev/wdevs/relspecgo/pkg/commontypes"
//
// // Use common types
// var formatType commontypes.FormatType
package commontypes

89
pkg/commontypes/golang.go Normal file
View File

@@ -0,0 +1,89 @@
package commontypes
import "strings"
// GoTypeMap maps PostgreSQL types to Go types
var GoTypeMap = map[string]string{
// Integer types
"integer": "int32",
"int": "int32",
"int4": "int32",
"smallint": "int16",
"int2": "int16",
"bigint": "int64",
"int8": "int64",
"serial": "int32",
"bigserial": "int64",
"smallserial": "int16",
// String types
"text": "string",
"varchar": "string",
"char": "string",
"character": "string",
"citext": "string",
"bpchar": "string",
// Boolean
"boolean": "bool",
"bool": "bool",
// Float types
"real": "float32",
"float4": "float32",
"double precision": "float64",
"float8": "float64",
"numeric": "float64",
"decimal": "float64",
// Date/Time types
"timestamp": "time.Time",
"timestamp without time zone": "time.Time",
"timestamp with time zone": "time.Time",
"timestamptz": "time.Time",
"date": "time.Time",
"time": "time.Time",
"time without time zone": "time.Time",
"time with time zone": "time.Time",
"timetz": "time.Time",
// Binary
"bytea": "[]byte",
// UUID
"uuid": "string",
// JSON
"json": "string",
"jsonb": "string",
// Array
"array": "[]string",
}
// SQLToGo converts SQL types to Go types
func SQLToGo(sqlType string, nullable bool) string {
baseType := ExtractBaseType(sqlType)
goType, ok := GoTypeMap[baseType]
if !ok {
goType = "interface{}"
}
// Handle nullable types
if nullable {
return goType
}
// For nullable, use pointer types (except for slices and interfaces)
if !strings.HasPrefix(goType, "[]") && goType != "interface{}" {
return "*" + goType
}
return goType
}
// NeedsTimeImport checks if a Go type requires the time package
func NeedsTimeImport(goType string) bool {
return strings.Contains(goType, "time.Time")
}

68
pkg/commontypes/java.go Normal file
View File

@@ -0,0 +1,68 @@
package commontypes
// JavaTypeMap maps PostgreSQL types to Java types
var JavaTypeMap = map[string]string{
// Integer types
"integer": "Integer",
"int": "Integer",
"int4": "Integer",
"smallint": "Short",
"int2": "Short",
"bigint": "Long",
"int8": "Long",
"serial": "Integer",
"bigserial": "Long",
"smallserial": "Short",
// String types
"text": "String",
"varchar": "String",
"char": "String",
"character": "String",
"citext": "String",
"bpchar": "String",
"uuid": "UUID",
// Boolean
"boolean": "Boolean",
"bool": "Boolean",
// Float types
"real": "Float",
"float4": "Float",
"double precision": "Double",
"float8": "Double",
"numeric": "BigDecimal",
"decimal": "BigDecimal",
// Date/Time types
"timestamp": "Timestamp",
"timestamp without time zone": "Timestamp",
"timestamp with time zone": "Timestamp",
"timestamptz": "Timestamp",
"date": "Date",
"time": "Time",
"time without time zone": "Time",
"time with time zone": "Time",
"timetz": "Time",
// Binary
"bytea": "byte[]",
// JSON
"json": "String",
"jsonb": "String",
}
// SQLToJava converts SQL types to Java types
func SQLToJava(sqlType string, nullable bool) string {
baseType := ExtractBaseType(sqlType)
javaType, ok := JavaTypeMap[baseType]
if !ok {
javaType = "Object"
}
// Java uses wrapper classes for nullable types by default
return javaType
}

72
pkg/commontypes/php.go Normal file
View File

@@ -0,0 +1,72 @@
package commontypes
// PHPTypeMap maps PostgreSQL types to PHP types
var PHPTypeMap = map[string]string{
// Integer types
"integer": "int",
"int": "int",
"int4": "int",
"smallint": "int",
"int2": "int",
"bigint": "int",
"int8": "int",
"serial": "int",
"bigserial": "int",
"smallserial": "int",
// String types
"text": "string",
"varchar": "string",
"char": "string",
"character": "string",
"citext": "string",
"bpchar": "string",
"uuid": "string",
// Boolean
"boolean": "bool",
"bool": "bool",
// Float types
"real": "float",
"float4": "float",
"double precision": "float",
"float8": "float",
"numeric": "float",
"decimal": "float",
// Date/Time types
"timestamp": "\\DateTime",
"timestamp without time zone": "\\DateTime",
"timestamp with time zone": "\\DateTime",
"timestamptz": "\\DateTime",
"date": "\\DateTime",
"time": "\\DateTime",
"time without time zone": "\\DateTime",
"time with time zone": "\\DateTime",
"timetz": "\\DateTime",
// Binary
"bytea": "string",
// JSON
"json": "array",
"jsonb": "array",
}
// SQLToPhp converts SQL types to PHP types
func SQLToPhp(sqlType string, nullable bool) string {
baseType := ExtractBaseType(sqlType)
phpType, ok := PHPTypeMap[baseType]
if !ok {
phpType = "mixed"
}
// PHP 7.1+ supports nullable types with ?Type syntax
if !nullable && phpType != "mixed" {
return "?" + phpType
}
return phpType
}

71
pkg/commontypes/python.go Normal file
View File

@@ -0,0 +1,71 @@
package commontypes
// PythonTypeMap maps PostgreSQL types to Python types
var PythonTypeMap = map[string]string{
// Integer types
"integer": "int",
"int": "int",
"int4": "int",
"smallint": "int",
"int2": "int",
"bigint": "int",
"int8": "int",
"serial": "int",
"bigserial": "int",
"smallserial": "int",
// String types
"text": "str",
"varchar": "str",
"char": "str",
"character": "str",
"citext": "str",
"bpchar": "str",
"uuid": "UUID",
// Boolean
"boolean": "bool",
"bool": "bool",
// Float types
"real": "float",
"float4": "float",
"double precision": "float",
"float8": "float",
"numeric": "Decimal",
"decimal": "Decimal",
// Date/Time types
"timestamp": "datetime",
"timestamp without time zone": "datetime",
"timestamp with time zone": "datetime",
"timestamptz": "datetime",
"date": "date",
"time": "time",
"time without time zone": "time",
"time with time zone": "time",
"timetz": "time",
// Binary
"bytea": "bytes",
// JSON
"json": "dict",
"jsonb": "dict",
// Array
"array": "list",
}
// SQLToPython converts SQL types to Python types
func SQLToPython(sqlType string) string {
baseType := ExtractBaseType(sqlType)
pyType, ok := PythonTypeMap[baseType]
if !ok {
pyType = "Any"
}
// Python uses Optional[Type] for nullable, but we return the base type
return pyType
}

72
pkg/commontypes/rust.go Normal file
View File

@@ -0,0 +1,72 @@
package commontypes
// RustTypeMap maps PostgreSQL types to Rust types
var RustTypeMap = map[string]string{
// Integer types
"integer": "i32",
"int": "i32",
"int4": "i32",
"smallint": "i16",
"int2": "i16",
"bigint": "i64",
"int8": "i64",
"serial": "i32",
"bigserial": "i64",
"smallserial": "i16",
// String types
"text": "String",
"varchar": "String",
"char": "String",
"character": "String",
"citext": "String",
"bpchar": "String",
"uuid": "String",
// Boolean
"boolean": "bool",
"bool": "bool",
// Float types
"real": "f32",
"float4": "f32",
"double precision": "f64",
"float8": "f64",
"numeric": "f64",
"decimal": "f64",
// Date/Time types (using chrono crate)
"timestamp": "NaiveDateTime",
"timestamp without time zone": "NaiveDateTime",
"timestamp with time zone": "DateTime<Utc>",
"timestamptz": "DateTime<Utc>",
"date": "NaiveDate",
"time": "NaiveTime",
"time without time zone": "NaiveTime",
"time with time zone": "DateTime<Utc>",
"timetz": "DateTime<Utc>",
// Binary
"bytea": "Vec<u8>",
// JSON
"json": "serde_json::Value",
"jsonb": "serde_json::Value",
}
// SQLToRust converts SQL types to Rust types
func SQLToRust(sqlType string, nullable bool) string {
baseType := ExtractBaseType(sqlType)
rustType, ok := RustTypeMap[baseType]
if !ok {
rustType = "String"
}
// Handle nullable types with Option<T>
if nullable {
return rustType
}
return "Option<" + rustType + ">"
}

22
pkg/commontypes/sql.go Normal file
View File

@@ -0,0 +1,22 @@
package commontypes
import "strings"
// ExtractBaseType extracts the base type from a SQL type string
// Examples: varchar(100) → varchar, numeric(10,2) → numeric
func ExtractBaseType(sqlType string) string {
sqlType = strings.ToLower(strings.TrimSpace(sqlType))
// Remove everything after '('
if idx := strings.Index(sqlType, "("); idx > 0 {
sqlType = sqlType[:idx]
}
return sqlType
}
// NormalizeType normalizes a SQL type to its base form
// Alias for ExtractBaseType for backwards compatibility
func NormalizeType(sqlType string) string {
return ExtractBaseType(sqlType)
}

View File

@@ -0,0 +1,75 @@
package commontypes
// TypeScriptTypeMap maps PostgreSQL types to TypeScript types
var TypeScriptTypeMap = map[string]string{
// Integer types
"integer": "number",
"int": "number",
"int4": "number",
"smallint": "number",
"int2": "number",
"bigint": "number",
"int8": "number",
"serial": "number",
"bigserial": "number",
"smallserial": "number",
// String types
"text": "string",
"varchar": "string",
"char": "string",
"character": "string",
"citext": "string",
"bpchar": "string",
"uuid": "string",
// Boolean
"boolean": "boolean",
"bool": "boolean",
// Float types
"real": "number",
"float4": "number",
"double precision": "number",
"float8": "number",
"numeric": "number",
"decimal": "number",
// Date/Time types
"timestamp": "Date",
"timestamp without time zone": "Date",
"timestamp with time zone": "Date",
"timestamptz": "Date",
"date": "Date",
"time": "Date",
"time without time zone": "Date",
"time with time zone": "Date",
"timetz": "Date",
// Binary
"bytea": "Buffer",
// JSON
"json": "any",
"jsonb": "any",
// Array
"array": "any[]",
}
// SQLToTypeScript converts SQL types to TypeScript types
func SQLToTypeScript(sqlType string, nullable bool) string {
baseType := ExtractBaseType(sqlType)
tsType, ok := TypeScriptTypeMap[baseType]
if !ok {
tsType = "any"
}
// Handle nullable types
if nullable {
return tsType
}
return tsType + " | null"
}

558
pkg/diff/diff_test.go Normal file
View File

@@ -0,0 +1,558 @@
package diff
import (
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
func TestCompareDatabases(t *testing.T) {
tests := []struct {
name string
source *models.Database
target *models.Database
want func(*DiffResult) bool
}{
{
name: "identical databases",
source: &models.Database{
Name: "source",
Schemas: []*models.Schema{},
},
target: &models.Database{
Name: "target",
Schemas: []*models.Schema{},
},
want: func(r *DiffResult) bool {
return r.Source == "source" && r.Target == "target" &&
len(r.Schemas.Missing) == 0 && len(r.Schemas.Extra) == 0
},
},
{
name: "different schemas",
source: &models.Database{
Name: "source",
Schemas: []*models.Schema{
{Name: "public"},
},
},
target: &models.Database{
Name: "target",
Schemas: []*models.Schema{},
},
want: func(r *DiffResult) bool {
return len(r.Schemas.Missing) == 1 && r.Schemas.Missing[0].Name == "public"
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := CompareDatabases(tt.source, tt.target)
if !tt.want(got) {
t.Errorf("CompareDatabases() result doesn't match expectations")
}
})
}
}
func TestCompareColumns(t *testing.T) {
tests := []struct {
name string
source map[string]*models.Column
target map[string]*models.Column
want func(*ColumnDiff) bool
}{
{
name: "identical columns",
source: map[string]*models.Column{},
target: map[string]*models.Column{},
want: func(d *ColumnDiff) bool {
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
},
},
{
name: "missing column",
source: map[string]*models.Column{
"id": {Name: "id", Type: "integer"},
},
target: map[string]*models.Column{},
want: func(d *ColumnDiff) bool {
return len(d.Missing) == 1 && d.Missing[0].Name == "id"
},
},
{
name: "extra column",
source: map[string]*models.Column{},
target: map[string]*models.Column{
"id": {Name: "id", Type: "integer"},
},
want: func(d *ColumnDiff) bool {
return len(d.Extra) == 1 && d.Extra[0].Name == "id"
},
},
{
name: "modified column type",
source: map[string]*models.Column{
"id": {Name: "id", Type: "integer"},
},
target: map[string]*models.Column{
"id": {Name: "id", Type: "bigint"},
},
want: func(d *ColumnDiff) bool {
return len(d.Modified) == 1 && d.Modified[0].Name == "id" &&
d.Modified[0].Changes["type"] != nil
},
},
{
name: "modified column nullable",
source: map[string]*models.Column{
"name": {Name: "name", Type: "text", NotNull: true},
},
target: map[string]*models.Column{
"name": {Name: "name", Type: "text", NotNull: false},
},
want: func(d *ColumnDiff) bool {
return len(d.Modified) == 1 && d.Modified[0].Changes["not_null"] != nil
},
},
{
name: "modified column length",
source: map[string]*models.Column{
"name": {Name: "name", Type: "varchar", Length: 100},
},
target: map[string]*models.Column{
"name": {Name: "name", Type: "varchar", Length: 255},
},
want: func(d *ColumnDiff) bool {
return len(d.Modified) == 1 && d.Modified[0].Changes["length"] != nil
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := compareColumns(tt.source, tt.target)
if !tt.want(got) {
t.Errorf("compareColumns() result doesn't match expectations")
}
})
}
}
func TestCompareColumnDetails(t *testing.T) {
tests := []struct {
name string
source *models.Column
target *models.Column
want int // number of changes
}{
{
name: "identical columns",
source: &models.Column{Name: "id", Type: "integer"},
target: &models.Column{Name: "id", Type: "integer"},
want: 0,
},
{
name: "type change",
source: &models.Column{Name: "id", Type: "integer"},
target: &models.Column{Name: "id", Type: "bigint"},
want: 1,
},
{
name: "length change",
source: &models.Column{Name: "name", Type: "varchar", Length: 100},
target: &models.Column{Name: "name", Type: "varchar", Length: 255},
want: 1,
},
{
name: "precision change",
source: &models.Column{Name: "price", Type: "numeric", Precision: 10},
target: &models.Column{Name: "price", Type: "numeric", Precision: 12},
want: 1,
},
{
name: "scale change",
source: &models.Column{Name: "price", Type: "numeric", Scale: 2},
target: &models.Column{Name: "price", Type: "numeric", Scale: 4},
want: 1,
},
{
name: "not null change",
source: &models.Column{Name: "name", Type: "text", NotNull: true},
target: &models.Column{Name: "name", Type: "text", NotNull: false},
want: 1,
},
{
name: "auto increment change",
source: &models.Column{Name: "id", Type: "integer", AutoIncrement: true},
target: &models.Column{Name: "id", Type: "integer", AutoIncrement: false},
want: 1,
},
{
name: "primary key change",
source: &models.Column{Name: "id", Type: "integer", IsPrimaryKey: true},
target: &models.Column{Name: "id", Type: "integer", IsPrimaryKey: false},
want: 1,
},
{
name: "multiple changes",
source: &models.Column{Name: "id", Type: "integer", NotNull: true, AutoIncrement: true},
target: &models.Column{Name: "id", Type: "bigint", NotNull: false, AutoIncrement: false},
want: 3,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := compareColumnDetails(tt.source, tt.target)
if len(got) != tt.want {
t.Errorf("compareColumnDetails() = %d changes, want %d", len(got), tt.want)
}
})
}
}
func TestCompareIndexes(t *testing.T) {
tests := []struct {
name string
source map[string]*models.Index
target map[string]*models.Index
want func(*IndexDiff) bool
}{
{
name: "identical indexes",
source: map[string]*models.Index{},
target: map[string]*models.Index{},
want: func(d *IndexDiff) bool {
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
},
},
{
name: "missing index",
source: map[string]*models.Index{
"idx_name": {Name: "idx_name", Columns: []string{"name"}},
},
target: map[string]*models.Index{},
want: func(d *IndexDiff) bool {
return len(d.Missing) == 1 && d.Missing[0].Name == "idx_name"
},
},
{
name: "extra index",
source: map[string]*models.Index{},
target: map[string]*models.Index{
"idx_name": {Name: "idx_name", Columns: []string{"name"}},
},
want: func(d *IndexDiff) bool {
return len(d.Extra) == 1 && d.Extra[0].Name == "idx_name"
},
},
{
name: "modified index uniqueness",
source: map[string]*models.Index{
"idx_name": {Name: "idx_name", Columns: []string{"name"}, Unique: false},
},
target: map[string]*models.Index{
"idx_name": {Name: "idx_name", Columns: []string{"name"}, Unique: true},
},
want: func(d *IndexDiff) bool {
return len(d.Modified) == 1 && d.Modified[0].Name == "idx_name"
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := compareIndexes(tt.source, tt.target)
if !tt.want(got) {
t.Errorf("compareIndexes() result doesn't match expectations")
}
})
}
}
func TestCompareConstraints(t *testing.T) {
tests := []struct {
name string
source map[string]*models.Constraint
target map[string]*models.Constraint
want func(*ConstraintDiff) bool
}{
{
name: "identical constraints",
source: map[string]*models.Constraint{},
target: map[string]*models.Constraint{},
want: func(d *ConstraintDiff) bool {
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
},
},
{
name: "missing constraint",
source: map[string]*models.Constraint{
"pk_id": {Name: "pk_id", Type: "PRIMARY KEY", Columns: []string{"id"}},
},
target: map[string]*models.Constraint{},
want: func(d *ConstraintDiff) bool {
return len(d.Missing) == 1 && d.Missing[0].Name == "pk_id"
},
},
{
name: "extra constraint",
source: map[string]*models.Constraint{},
target: map[string]*models.Constraint{
"pk_id": {Name: "pk_id", Type: "PRIMARY KEY", Columns: []string{"id"}},
},
want: func(d *ConstraintDiff) bool {
return len(d.Extra) == 1 && d.Extra[0].Name == "pk_id"
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := compareConstraints(tt.source, tt.target)
if !tt.want(got) {
t.Errorf("compareConstraints() result doesn't match expectations")
}
})
}
}
func TestCompareRelationships(t *testing.T) {
tests := []struct {
name string
source map[string]*models.Relationship
target map[string]*models.Relationship
want func(*RelationshipDiff) bool
}{
{
name: "identical relationships",
source: map[string]*models.Relationship{},
target: map[string]*models.Relationship{},
want: func(d *RelationshipDiff) bool {
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
},
},
{
name: "missing relationship",
source: map[string]*models.Relationship{
"fk_user": {Name: "fk_user", Type: "FOREIGN KEY"},
},
target: map[string]*models.Relationship{},
want: func(d *RelationshipDiff) bool {
return len(d.Missing) == 1 && d.Missing[0].Name == "fk_user"
},
},
{
name: "extra relationship",
source: map[string]*models.Relationship{},
target: map[string]*models.Relationship{
"fk_user": {Name: "fk_user", Type: "FOREIGN KEY"},
},
want: func(d *RelationshipDiff) bool {
return len(d.Extra) == 1 && d.Extra[0].Name == "fk_user"
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := compareRelationships(tt.source, tt.target)
if !tt.want(got) {
t.Errorf("compareRelationships() result doesn't match expectations")
}
})
}
}
func TestCompareTables(t *testing.T) {
tests := []struct {
name string
source []*models.Table
target []*models.Table
want func(*TableDiff) bool
}{
{
name: "identical tables",
source: []*models.Table{},
target: []*models.Table{},
want: func(d *TableDiff) bool {
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
},
},
{
name: "missing table",
source: []*models.Table{
{Name: "users", Schema: "public"},
},
target: []*models.Table{},
want: func(d *TableDiff) bool {
return len(d.Missing) == 1 && d.Missing[0].Name == "users"
},
},
{
name: "extra table",
source: []*models.Table{},
target: []*models.Table{
{Name: "users", Schema: "public"},
},
want: func(d *TableDiff) bool {
return len(d.Extra) == 1 && d.Extra[0].Name == "users"
},
},
{
name: "modified table",
source: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "integer"},
},
},
},
target: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "bigint"},
},
},
},
want: func(d *TableDiff) bool {
return len(d.Modified) == 1 && d.Modified[0].Name == "users"
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := compareTables(tt.source, tt.target)
if !tt.want(got) {
t.Errorf("compareTables() result doesn't match expectations")
}
})
}
}
func TestCompareSchemas(t *testing.T) {
tests := []struct {
name string
source []*models.Schema
target []*models.Schema
want func(*SchemaDiff) bool
}{
{
name: "identical schemas",
source: []*models.Schema{},
target: []*models.Schema{},
want: func(d *SchemaDiff) bool {
return len(d.Missing) == 0 && len(d.Extra) == 0 && len(d.Modified) == 0
},
},
{
name: "missing schema",
source: []*models.Schema{
{Name: "public"},
},
target: []*models.Schema{},
want: func(d *SchemaDiff) bool {
return len(d.Missing) == 1 && d.Missing[0].Name == "public"
},
},
{
name: "extra schema",
source: []*models.Schema{},
target: []*models.Schema{
{Name: "public"},
},
want: func(d *SchemaDiff) bool {
return len(d.Extra) == 1 && d.Extra[0].Name == "public"
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := compareSchemas(tt.source, tt.target)
if !tt.want(got) {
t.Errorf("compareSchemas() result doesn't match expectations")
}
})
}
}
func TestIsEmpty(t *testing.T) {
tests := []struct {
name string
v interface{}
want bool
}{
{"empty ColumnDiff", &ColumnDiff{Missing: []*models.Column{}, Extra: []*models.Column{}, Modified: []*ColumnChange{}}, true},
{"ColumnDiff with missing", &ColumnDiff{Missing: []*models.Column{{Name: "id"}}, Extra: []*models.Column{}, Modified: []*ColumnChange{}}, false},
{"ColumnDiff with extra", &ColumnDiff{Missing: []*models.Column{}, Extra: []*models.Column{{Name: "id"}}, Modified: []*ColumnChange{}}, false},
{"empty IndexDiff", &IndexDiff{Missing: []*models.Index{}, Extra: []*models.Index{}, Modified: []*IndexChange{}}, true},
{"IndexDiff with missing", &IndexDiff{Missing: []*models.Index{{Name: "idx"}}, Extra: []*models.Index{}, Modified: []*IndexChange{}}, false},
{"empty TableDiff", &TableDiff{Missing: []*models.Table{}, Extra: []*models.Table{}, Modified: []*TableChange{}}, true},
{"TableDiff with extra", &TableDiff{Missing: []*models.Table{}, Extra: []*models.Table{{Name: "users"}}, Modified: []*TableChange{}}, false},
{"empty ConstraintDiff", &ConstraintDiff{Missing: []*models.Constraint{}, Extra: []*models.Constraint{}, Modified: []*ConstraintChange{}}, true},
{"empty RelationshipDiff", &RelationshipDiff{Missing: []*models.Relationship{}, Extra: []*models.Relationship{}, Modified: []*RelationshipChange{}}, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := isEmpty(tt.v)
if got != tt.want {
t.Errorf("isEmpty() = %v, want %v", got, tt.want)
}
})
}
}
func TestComputeSummary(t *testing.T) {
tests := []struct {
name string
result *DiffResult
want func(*Summary) bool
}{
{
name: "empty diff",
result: &DiffResult{
Schemas: &SchemaDiff{
Missing: []*models.Schema{},
Extra: []*models.Schema{},
Modified: []*SchemaChange{},
},
},
want: func(s *Summary) bool {
return s.Schemas.Missing == 0 && s.Schemas.Extra == 0 && s.Schemas.Modified == 0
},
},
{
name: "schemas with differences",
result: &DiffResult{
Schemas: &SchemaDiff{
Missing: []*models.Schema{{Name: "schema1"}},
Extra: []*models.Schema{{Name: "schema2"}, {Name: "schema3"}},
Modified: []*SchemaChange{
{Name: "public"},
},
},
},
want: func(s *Summary) bool {
return s.Schemas.Missing == 1 && s.Schemas.Extra == 2 && s.Schemas.Modified == 1
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := ComputeSummary(tt.result)
if !tt.want(got) {
t.Errorf("ComputeSummary() result doesn't match expectations")
}
})
}
}

43
pkg/diff/doc.go Normal file
View File

@@ -0,0 +1,43 @@
// Package diff provides utilities for comparing database schemas and identifying differences.
//
// # Overview
//
// The diff package compares two database models at various granularity levels (database,
// schema, table, column) and produces detailed reports of differences including:
// - Missing items (present in source but not in target)
// - Extra items (present in target but not in source)
// - Modified items (present in both but with different properties)
//
// # Usage
//
// Compare two databases and format the output:
//
// result := diff.CompareDatabases(sourceDB, targetDB)
// err := diff.FormatDiff(result, diff.OutputFormatText, os.Stdout)
//
// # Output Formats
//
// The package supports multiple output formats:
// - OutputFormatText: Human-readable text format
// - OutputFormatJSON: Structured JSON output
// - OutputFormatYAML: Structured YAML output
//
// # Comparison Scope
//
// The comparison covers:
// - Schemas: Name, description, and contents
// - Tables: Name, description, and all sub-elements
// - Columns: Type, nullability, defaults, constraints
// - Indexes: Columns, uniqueness, type
// - Constraints: Type, columns, references
// - Relationships: Type, from/to tables and columns
// - Views: Definition and columns
// - Sequences: Start value, increment, min/max values
//
// # Use Cases
//
// - Schema migration planning
// - Database synchronization verification
// - Change tracking and auditing
// - CI/CD pipeline validation
package diff

440
pkg/diff/formatters_test.go Normal file
View File

@@ -0,0 +1,440 @@
package diff
import (
"bytes"
"encoding/json"
"strings"
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
func TestFormatDiff(t *testing.T) {
result := &DiffResult{
Source: "source_db",
Target: "target_db",
Schemas: &SchemaDiff{
Missing: []*models.Schema{},
Extra: []*models.Schema{},
Modified: []*SchemaChange{},
},
}
tests := []struct {
name string
format OutputFormat
wantErr bool
}{
{"summary format", FormatSummary, false},
{"json format", FormatJSON, false},
{"html format", FormatHTML, false},
{"invalid format", OutputFormat("invalid"), true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var buf bytes.Buffer
err := FormatDiff(result, tt.format, &buf)
if (err != nil) != tt.wantErr {
t.Errorf("FormatDiff() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !tt.wantErr && buf.Len() == 0 {
t.Error("FormatDiff() produced empty output")
}
})
}
}
func TestFormatSummary(t *testing.T) {
tests := []struct {
name string
result *DiffResult
wantStr []string // strings that should appear in output
}{
{
name: "no differences",
result: &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Missing: []*models.Schema{},
Extra: []*models.Schema{},
Modified: []*SchemaChange{},
},
},
wantStr: []string{"source", "target", "No differences found"},
},
{
name: "with schema differences",
result: &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Missing: []*models.Schema{{Name: "schema1"}},
Extra: []*models.Schema{{Name: "schema2"}},
Modified: []*SchemaChange{
{Name: "public"},
},
},
},
wantStr: []string{"Schemas:", "Missing: 1", "Extra: 1", "Modified: 1"},
},
{
name: "with table differences",
result: &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Modified: []*SchemaChange{
{
Name: "public",
Tables: &TableDiff{
Missing: []*models.Table{{Name: "users"}},
Extra: []*models.Table{{Name: "posts"}},
Modified: []*TableChange{
{Name: "comments", Schema: "public"},
},
},
},
},
},
},
wantStr: []string{"Tables:", "Missing: 1", "Extra: 1", "Modified: 1"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var buf bytes.Buffer
err := formatSummary(tt.result, &buf)
if err != nil {
t.Errorf("formatSummary() error = %v", err)
return
}
output := buf.String()
for _, want := range tt.wantStr {
if !strings.Contains(output, want) {
t.Errorf("formatSummary() output doesn't contain %q\nGot: %s", want, output)
}
}
})
}
}
func TestFormatJSON(t *testing.T) {
result := &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Missing: []*models.Schema{{Name: "schema1"}},
Extra: []*models.Schema{},
Modified: []*SchemaChange{},
},
}
var buf bytes.Buffer
err := formatJSON(result, &buf)
if err != nil {
t.Errorf("formatJSON() error = %v", err)
return
}
// Check if output is valid JSON
var decoded DiffResult
if err := json.Unmarshal(buf.Bytes(), &decoded); err != nil {
t.Errorf("formatJSON() produced invalid JSON: %v", err)
}
// Check basic structure
if decoded.Source != "source" {
t.Errorf("formatJSON() source = %v, want %v", decoded.Source, "source")
}
if decoded.Target != "target" {
t.Errorf("formatJSON() target = %v, want %v", decoded.Target, "target")
}
if len(decoded.Schemas.Missing) != 1 {
t.Errorf("formatJSON() missing schemas = %v, want 1", len(decoded.Schemas.Missing))
}
}
func TestFormatHTML(t *testing.T) {
tests := []struct {
name string
result *DiffResult
wantStr []string // HTML elements/content that should appear
}{
{
name: "basic HTML structure",
result: &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Missing: []*models.Schema{},
Extra: []*models.Schema{},
Modified: []*SchemaChange{},
},
},
wantStr: []string{
"<!DOCTYPE html>",
"<title>Database Diff Report</title>",
"source",
"target",
},
},
{
name: "with schema differences",
result: &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Missing: []*models.Schema{{Name: "missing_schema"}},
Extra: []*models.Schema{{Name: "extra_schema"}},
Modified: []*SchemaChange{},
},
},
wantStr: []string{
"<!DOCTYPE html>",
"missing_schema",
"extra_schema",
"MISSING",
"EXTRA",
},
},
{
name: "with table modifications",
result: &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Modified: []*SchemaChange{
{
Name: "public",
Tables: &TableDiff{
Modified: []*TableChange{
{
Name: "users",
Schema: "public",
Columns: &ColumnDiff{
Missing: []*models.Column{{Name: "email", Type: "text"}},
},
},
},
},
},
},
},
},
wantStr: []string{
"public",
"users",
"email",
"text",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var buf bytes.Buffer
err := formatHTML(tt.result, &buf)
if err != nil {
t.Errorf("formatHTML() error = %v", err)
return
}
output := buf.String()
for _, want := range tt.wantStr {
if !strings.Contains(output, want) {
t.Errorf("formatHTML() output doesn't contain %q", want)
}
}
})
}
}
func TestFormatSummaryWithColumns(t *testing.T) {
result := &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Modified: []*SchemaChange{
{
Name: "public",
Tables: &TableDiff{
Modified: []*TableChange{
{
Name: "users",
Schema: "public",
Columns: &ColumnDiff{
Missing: []*models.Column{{Name: "email"}},
Extra: []*models.Column{{Name: "phone"}, {Name: "address"}},
Modified: []*ColumnChange{
{Name: "name"},
},
},
},
},
},
},
},
},
}
var buf bytes.Buffer
err := formatSummary(result, &buf)
if err != nil {
t.Errorf("formatSummary() error = %v", err)
return
}
output := buf.String()
wantStrings := []string{
"Columns:",
"Missing: 1",
"Extra: 2",
"Modified: 1",
}
for _, want := range wantStrings {
if !strings.Contains(output, want) {
t.Errorf("formatSummary() output doesn't contain %q\nGot: %s", want, output)
}
}
}
func TestFormatSummaryWithIndexes(t *testing.T) {
result := &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Modified: []*SchemaChange{
{
Name: "public",
Tables: &TableDiff{
Modified: []*TableChange{
{
Name: "users",
Schema: "public",
Indexes: &IndexDiff{
Missing: []*models.Index{{Name: "idx_email"}},
Extra: []*models.Index{{Name: "idx_phone"}},
Modified: []*IndexChange{{Name: "idx_name"}},
},
},
},
},
},
},
},
}
var buf bytes.Buffer
err := formatSummary(result, &buf)
if err != nil {
t.Errorf("formatSummary() error = %v", err)
return
}
output := buf.String()
if !strings.Contains(output, "Indexes:") {
t.Error("formatSummary() output doesn't contain Indexes section")
}
if !strings.Contains(output, "Missing: 1") {
t.Error("formatSummary() output doesn't contain correct missing count")
}
}
func TestFormatSummaryWithConstraints(t *testing.T) {
result := &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Modified: []*SchemaChange{
{
Name: "public",
Tables: &TableDiff{
Modified: []*TableChange{
{
Name: "users",
Schema: "public",
Constraints: &ConstraintDiff{
Missing: []*models.Constraint{{Name: "pk_users", Type: "PRIMARY KEY"}},
Extra: []*models.Constraint{{Name: "fk_users_roles", Type: "FOREIGN KEY"}},
},
},
},
},
},
},
},
}
var buf bytes.Buffer
err := formatSummary(result, &buf)
if err != nil {
t.Errorf("formatSummary() error = %v", err)
return
}
output := buf.String()
if !strings.Contains(output, "Constraints:") {
t.Error("formatSummary() output doesn't contain Constraints section")
}
}
func TestFormatJSONIndentation(t *testing.T) {
result := &DiffResult{
Source: "source",
Target: "target",
Schemas: &SchemaDiff{
Missing: []*models.Schema{{Name: "test"}},
},
}
var buf bytes.Buffer
err := formatJSON(result, &buf)
if err != nil {
t.Errorf("formatJSON() error = %v", err)
return
}
// Check that JSON is indented (has newlines and spaces)
output := buf.String()
if !strings.Contains(output, "\n") {
t.Error("formatJSON() should produce indented JSON with newlines")
}
if !strings.Contains(output, " ") {
t.Error("formatJSON() should produce indented JSON with spaces")
}
}
func TestOutputFormatConstants(t *testing.T) {
tests := []struct {
name string
format OutputFormat
want string
}{
{"summary constant", FormatSummary, "summary"},
{"json constant", FormatJSON, "json"},
{"html constant", FormatHTML, "html"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if string(tt.format) != tt.want {
t.Errorf("OutputFormat %v = %v, want %v", tt.name, tt.format, tt.want)
}
})
}
}

40
pkg/inspector/doc.go Normal file
View File

@@ -0,0 +1,40 @@
// Package inspector provides database introspection capabilities for live databases.
//
// # Overview
//
// The inspector package contains utilities for connecting to live databases and
// extracting their schema information through system catalog queries and metadata
// inspection.
//
// # Features
//
// - Database connection management
// - Schema metadata extraction
// - Table structure analysis
// - Constraint and index discovery
// - Foreign key relationship mapping
//
// # Supported Databases
//
// - PostgreSQL (via pgx driver)
// - SQLite (via modernc.org/sqlite driver)
//
// # Usage
//
// This package is used internally by database readers (pgsql, sqlite) to perform
// live schema introspection:
//
// inspector := inspector.NewPostgreSQLInspector(connString)
// schemas, err := inspector.GetSchemas()
// tables, err := inspector.GetTables(schemaName)
//
// # Architecture
//
// Each database type has its own inspector implementation that understands the
// specific system catalogs and metadata structures of that database system.
//
// # Security
//
// Inspectors use read-only operations and never modify database structure.
// Connection credentials should be handled securely.
package inspector

View File

@@ -0,0 +1,238 @@
package inspector
import (
"testing"
)
func TestNewInspector(t *testing.T) {
db := createTestDatabase()
config := GetDefaultConfig()
inspector := NewInspector(db, config)
if inspector == nil {
t.Fatal("NewInspector() returned nil")
}
if inspector.db != db {
t.Error("NewInspector() database not set correctly")
}
if inspector.config != config {
t.Error("NewInspector() config not set correctly")
}
}
func TestInspect(t *testing.T) {
db := createTestDatabase()
config := GetDefaultConfig()
inspector := NewInspector(db, config)
report, err := inspector.Inspect()
if err != nil {
t.Fatalf("Inspect() returned error: %v", err)
}
if report == nil {
t.Fatal("Inspect() returned nil report")
}
if report.Database != db.Name {
t.Errorf("Inspect() report.Database = %q, want %q", report.Database, db.Name)
}
if report.Summary.TotalRules != len(config.Rules) {
t.Errorf("Inspect() TotalRules = %d, want %d", report.Summary.TotalRules, len(config.Rules))
}
if len(report.Violations) == 0 {
t.Error("Inspect() returned no violations, expected some results")
}
}
func TestInspectWithDisabledRules(t *testing.T) {
db := createTestDatabase()
config := GetDefaultConfig()
// Disable all rules
for name := range config.Rules {
rule := config.Rules[name]
rule.Enabled = "off"
config.Rules[name] = rule
}
inspector := NewInspector(db, config)
report, err := inspector.Inspect()
if err != nil {
t.Fatalf("Inspect() with disabled rules returned error: %v", err)
}
if report.Summary.RulesChecked != 0 {
t.Errorf("Inspect() RulesChecked = %d, want 0 (all disabled)", report.Summary.RulesChecked)
}
if report.Summary.RulesSkipped != len(config.Rules) {
t.Errorf("Inspect() RulesSkipped = %d, want %d", report.Summary.RulesSkipped, len(config.Rules))
}
}
func TestInspectWithEnforcedRules(t *testing.T) {
db := createTestDatabase()
config := GetDefaultConfig()
// Enable only one rule and enforce it
for name := range config.Rules {
rule := config.Rules[name]
rule.Enabled = "off"
config.Rules[name] = rule
}
primaryKeyRule := config.Rules["primary_key_naming"]
primaryKeyRule.Enabled = "enforce"
primaryKeyRule.Pattern = "^id$"
config.Rules["primary_key_naming"] = primaryKeyRule
inspector := NewInspector(db, config)
report, err := inspector.Inspect()
if err != nil {
t.Fatalf("Inspect() returned error: %v", err)
}
if report.Summary.RulesChecked != 1 {
t.Errorf("Inspect() RulesChecked = %d, want 1", report.Summary.RulesChecked)
}
// All results should be at error level for enforced rules
for _, violation := range report.Violations {
if violation.Level != "error" {
t.Errorf("Enforced rule violation has Level = %q, want \"error\"", violation.Level)
}
}
}
func TestGenerateSummary(t *testing.T) {
db := createTestDatabase()
config := GetDefaultConfig()
inspector := NewInspector(db, config)
results := []ValidationResult{
{RuleName: "rule1", Passed: true, Level: "error"},
{RuleName: "rule2", Passed: false, Level: "error"},
{RuleName: "rule3", Passed: false, Level: "warning"},
{RuleName: "rule4", Passed: true, Level: "warning"},
}
summary := inspector.generateSummary(results)
if summary.PassedCount != 2 {
t.Errorf("generateSummary() PassedCount = %d, want 2", summary.PassedCount)
}
if summary.ErrorCount != 1 {
t.Errorf("generateSummary() ErrorCount = %d, want 1", summary.ErrorCount)
}
if summary.WarningCount != 1 {
t.Errorf("generateSummary() WarningCount = %d, want 1", summary.WarningCount)
}
}
func TestHasErrors(t *testing.T) {
tests := []struct {
name string
report *InspectorReport
want bool
}{
{
name: "with errors",
report: &InspectorReport{
Summary: ReportSummary{
ErrorCount: 5,
},
},
want: true,
},
{
name: "without errors",
report: &InspectorReport{
Summary: ReportSummary{
ErrorCount: 0,
WarningCount: 3,
},
},
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.report.HasErrors(); got != tt.want {
t.Errorf("HasErrors() = %v, want %v", got, tt.want)
}
})
}
}
func TestGetValidator(t *testing.T) {
tests := []struct {
name string
functionName string
wantExists bool
}{
{"primary_key_naming", "primary_key_naming", true},
{"primary_key_datatype", "primary_key_datatype", true},
{"foreign_key_column_naming", "foreign_key_column_naming", true},
{"table_regexpr", "table_regexpr", true},
{"column_regexpr", "column_regexpr", true},
{"reserved_words", "reserved_words", true},
{"have_primary_key", "have_primary_key", true},
{"orphaned_foreign_key", "orphaned_foreign_key", true},
{"circular_dependency", "circular_dependency", true},
{"unknown_function", "unknown_function", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, exists := getValidator(tt.functionName)
if exists != tt.wantExists {
t.Errorf("getValidator(%q) exists = %v, want %v", tt.functionName, exists, tt.wantExists)
}
})
}
}
func TestCreateResult(t *testing.T) {
result := createResult(
"test_rule",
true,
"Test message",
"schema.table.column",
map[string]interface{}{
"key1": "value1",
"key2": 42,
},
)
if result.RuleName != "test_rule" {
t.Errorf("createResult() RuleName = %q, want \"test_rule\"", result.RuleName)
}
if !result.Passed {
t.Error("createResult() Passed = false, want true")
}
if result.Message != "Test message" {
t.Errorf("createResult() Message = %q, want \"Test message\"", result.Message)
}
if result.Location != "schema.table.column" {
t.Errorf("createResult() Location = %q, want \"schema.table.column\"", result.Location)
}
if len(result.Context) != 2 {
t.Errorf("createResult() Context length = %d, want 2", len(result.Context))
}
}

View File

@@ -0,0 +1,366 @@
package inspector
import (
"bytes"
"encoding/json"
"strings"
"testing"
"time"
)
func createTestReport() *InspectorReport {
return &InspectorReport{
Summary: ReportSummary{
TotalRules: 10,
RulesChecked: 8,
RulesSkipped: 2,
ErrorCount: 3,
WarningCount: 5,
PassedCount: 12,
},
Violations: []ValidationResult{
{
RuleName: "primary_key_naming",
Level: "error",
Message: "Primary key should start with 'id_'",
Location: "public.users.user_id",
Passed: false,
Context: map[string]interface{}{
"schema": "public",
"table": "users",
"column": "user_id",
"pattern": "^id_",
},
},
{
RuleName: "table_name_length",
Level: "warning",
Message: "Table name too long",
Location: "public.very_long_table_name_that_exceeds_limits",
Passed: false,
Context: map[string]interface{}{
"schema": "public",
"table": "very_long_table_name_that_exceeds_limits",
"length": 44,
"max_length": 32,
},
},
},
GeneratedAt: time.Now(),
Database: "testdb",
SourceFormat: "postgresql",
}
}
func TestNewMarkdownFormatter(t *testing.T) {
var buf bytes.Buffer
formatter := NewMarkdownFormatter(&buf)
if formatter == nil {
t.Fatal("NewMarkdownFormatter() returned nil")
}
// Buffer is not a terminal, so colors should be disabled
if formatter.UseColors {
t.Error("NewMarkdownFormatter() UseColors should be false for non-terminal")
}
}
func TestNewJSONFormatter(t *testing.T) {
formatter := NewJSONFormatter()
if formatter == nil {
t.Fatal("NewJSONFormatter() returned nil")
}
}
func TestMarkdownFormatter_Format(t *testing.T) {
report := createTestReport()
var buf bytes.Buffer
formatter := NewMarkdownFormatter(&buf)
output, err := formatter.Format(report)
if err != nil {
t.Fatalf("MarkdownFormatter.Format() returned error: %v", err)
}
// Check that output contains expected sections
if !strings.Contains(output, "# RelSpec Inspector Report") {
t.Error("Markdown output missing header")
}
if !strings.Contains(output, "Database:") {
t.Error("Markdown output missing database field")
}
if !strings.Contains(output, "testdb") {
t.Error("Markdown output missing database name")
}
if !strings.Contains(output, "Summary") {
t.Error("Markdown output missing summary section")
}
if !strings.Contains(output, "Rules Checked: 8") {
t.Error("Markdown output missing rules checked count")
}
if !strings.Contains(output, "Errors: 3") {
t.Error("Markdown output missing error count")
}
if !strings.Contains(output, "Warnings: 5") {
t.Error("Markdown output missing warning count")
}
if !strings.Contains(output, "Violations") {
t.Error("Markdown output missing violations section")
}
if !strings.Contains(output, "primary_key_naming") {
t.Error("Markdown output missing rule name")
}
if !strings.Contains(output, "public.users.user_id") {
t.Error("Markdown output missing location")
}
}
func TestMarkdownFormatter_FormatNoViolations(t *testing.T) {
report := &InspectorReport{
Summary: ReportSummary{
TotalRules: 10,
RulesChecked: 10,
RulesSkipped: 0,
ErrorCount: 0,
WarningCount: 0,
PassedCount: 50,
},
Violations: []ValidationResult{},
GeneratedAt: time.Now(),
Database: "testdb",
SourceFormat: "postgresql",
}
var buf bytes.Buffer
formatter := NewMarkdownFormatter(&buf)
output, err := formatter.Format(report)
if err != nil {
t.Fatalf("MarkdownFormatter.Format() returned error: %v", err)
}
if !strings.Contains(output, "No violations found") {
t.Error("Markdown output should indicate no violations")
}
}
func TestJSONFormatter_Format(t *testing.T) {
report := createTestReport()
formatter := NewJSONFormatter()
output, err := formatter.Format(report)
if err != nil {
t.Fatalf("JSONFormatter.Format() returned error: %v", err)
}
// Verify it's valid JSON
var decoded InspectorReport
if err := json.Unmarshal([]byte(output), &decoded); err != nil {
t.Fatalf("JSONFormatter.Format() produced invalid JSON: %v", err)
}
// Check key fields
if decoded.Database != "testdb" {
t.Errorf("JSON decoded Database = %q, want \"testdb\"", decoded.Database)
}
if decoded.Summary.ErrorCount != 3 {
t.Errorf("JSON decoded ErrorCount = %d, want 3", decoded.Summary.ErrorCount)
}
if len(decoded.Violations) != 2 {
t.Errorf("JSON decoded Violations length = %d, want 2", len(decoded.Violations))
}
}
func TestMarkdownFormatter_FormatHeader(t *testing.T) {
var buf bytes.Buffer
formatter := NewMarkdownFormatter(&buf)
header := formatter.formatHeader("Test Header")
if !strings.Contains(header, "# Test Header") {
t.Errorf("formatHeader() = %q, want to contain \"# Test Header\"", header)
}
}
func TestMarkdownFormatter_FormatBold(t *testing.T) {
tests := []struct {
name string
useColors bool
text string
wantContains string
}{
{
name: "without colors",
useColors: false,
text: "Bold Text",
wantContains: "**Bold Text**",
},
{
name: "with colors",
useColors: true,
text: "Bold Text",
wantContains: "Bold Text",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
formatter := &MarkdownFormatter{UseColors: tt.useColors}
result := formatter.formatBold(tt.text)
if !strings.Contains(result, tt.wantContains) {
t.Errorf("formatBold() = %q, want to contain %q", result, tt.wantContains)
}
})
}
}
func TestMarkdownFormatter_Colorize(t *testing.T) {
tests := []struct {
name string
useColors bool
text string
color string
wantColor bool
}{
{
name: "without colors",
useColors: false,
text: "Test",
color: colorRed,
wantColor: false,
},
{
name: "with colors",
useColors: true,
text: "Test",
color: colorRed,
wantColor: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
formatter := &MarkdownFormatter{UseColors: tt.useColors}
result := formatter.colorize(tt.text, tt.color)
hasColor := strings.Contains(result, tt.color)
if hasColor != tt.wantColor {
t.Errorf("colorize() has color codes = %v, want %v", hasColor, tt.wantColor)
}
if !strings.Contains(result, tt.text) {
t.Errorf("colorize() doesn't contain original text %q", tt.text)
}
})
}
}
func TestMarkdownFormatter_FormatContext(t *testing.T) {
formatter := &MarkdownFormatter{UseColors: false}
context := map[string]interface{}{
"schema": "public",
"table": "users",
"column": "id",
"pattern": "^id_",
"max_length": 64,
}
result := formatter.formatContext(context)
// Should not include schema, table, column (they're in location)
if strings.Contains(result, "schema") {
t.Error("formatContext() should skip schema field")
}
if strings.Contains(result, "table=") {
t.Error("formatContext() should skip table field")
}
if strings.Contains(result, "column=") {
t.Error("formatContext() should skip column field")
}
// Should include other fields
if !strings.Contains(result, "pattern") {
t.Error("formatContext() should include pattern field")
}
if !strings.Contains(result, "max_length") {
t.Error("formatContext() should include max_length field")
}
}
func TestMarkdownFormatter_FormatViolation(t *testing.T) {
formatter := &MarkdownFormatter{UseColors: false}
violation := ValidationResult{
RuleName: "test_rule",
Level: "error",
Message: "Test violation message",
Location: "public.users.id",
Passed: false,
Context: map[string]interface{}{
"pattern": "^id_",
},
}
result := formatter.formatViolation(violation, colorRed)
if !strings.Contains(result, "test_rule") {
t.Error("formatViolation() should include rule name")
}
if !strings.Contains(result, "Test violation message") {
t.Error("formatViolation() should include message")
}
if !strings.Contains(result, "public.users.id") {
t.Error("formatViolation() should include location")
}
if !strings.Contains(result, "Location:") {
t.Error("formatViolation() should include Location label")
}
if !strings.Contains(result, "Message:") {
t.Error("formatViolation() should include Message label")
}
}
func TestReportFormatConstants(t *testing.T) {
// Test that color constants are defined
if colorReset == "" {
t.Error("colorReset is not defined")
}
if colorRed == "" {
t.Error("colorRed is not defined")
}
if colorYellow == "" {
t.Error("colorYellow is not defined")
}
if colorGreen == "" {
t.Error("colorGreen is not defined")
}
if colorBold == "" {
t.Error("colorBold is not defined")
}
}

249
pkg/inspector/rules_test.go Normal file
View File

@@ -0,0 +1,249 @@
package inspector
import (
"os"
"path/filepath"
"testing"
)
func TestGetDefaultConfig(t *testing.T) {
config := GetDefaultConfig()
if config == nil {
t.Fatal("GetDefaultConfig() returned nil")
}
if config.Version != "1.0" {
t.Errorf("GetDefaultConfig() Version = %q, want \"1.0\"", config.Version)
}
if len(config.Rules) == 0 {
t.Error("GetDefaultConfig() returned no rules")
}
// Check that all expected rules are present
expectedRules := []string{
"primary_key_naming",
"primary_key_datatype",
"primary_key_auto_increment",
"foreign_key_column_naming",
"foreign_key_constraint_naming",
"foreign_key_index",
"table_naming_case",
"column_naming_case",
"table_name_length",
"column_name_length",
"reserved_keywords",
"missing_primary_key",
"orphaned_foreign_key",
"circular_dependency",
}
for _, ruleName := range expectedRules {
if _, exists := config.Rules[ruleName]; !exists {
t.Errorf("GetDefaultConfig() missing rule: %q", ruleName)
}
}
}
func TestLoadConfig_NonExistentFile(t *testing.T) {
// Try to load a non-existent file
config, err := LoadConfig("/path/to/nonexistent/file.yaml")
if err != nil {
t.Fatalf("LoadConfig() with non-existent file returned error: %v", err)
}
// Should return default config
if config == nil {
t.Fatal("LoadConfig() returned nil config for non-existent file")
}
if len(config.Rules) == 0 {
t.Error("LoadConfig() returned config with no rules")
}
}
func TestLoadConfig_ValidFile(t *testing.T) {
// Create a temporary config file
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "test-config.yaml")
configContent := `version: "1.0"
rules:
primary_key_naming:
enabled: "enforce"
function: "primary_key_naming"
pattern: "^pk_"
message: "Primary keys must start with pk_"
table_name_length:
enabled: "warn"
function: "table_name_length"
max_length: 50
message: "Table name too long"
`
err := os.WriteFile(configPath, []byte(configContent), 0644)
if err != nil {
t.Fatalf("Failed to create test config file: %v", err)
}
config, err := LoadConfig(configPath)
if err != nil {
t.Fatalf("LoadConfig() returned error: %v", err)
}
if config.Version != "1.0" {
t.Errorf("LoadConfig() Version = %q, want \"1.0\"", config.Version)
}
if len(config.Rules) != 2 {
t.Errorf("LoadConfig() loaded %d rules, want 2", len(config.Rules))
}
// Check primary_key_naming rule
pkRule, exists := config.Rules["primary_key_naming"]
if !exists {
t.Fatal("LoadConfig() missing primary_key_naming rule")
}
if pkRule.Enabled != "enforce" {
t.Errorf("primary_key_naming.Enabled = %q, want \"enforce\"", pkRule.Enabled)
}
if pkRule.Pattern != "^pk_" {
t.Errorf("primary_key_naming.Pattern = %q, want \"^pk_\"", pkRule.Pattern)
}
// Check table_name_length rule
lengthRule, exists := config.Rules["table_name_length"]
if !exists {
t.Fatal("LoadConfig() missing table_name_length rule")
}
if lengthRule.MaxLength != 50 {
t.Errorf("table_name_length.MaxLength = %d, want 50", lengthRule.MaxLength)
}
}
func TestLoadConfig_InvalidYAML(t *testing.T) {
// Create a temporary invalid config file
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "invalid-config.yaml")
invalidContent := `invalid: yaml: content: {[}]`
err := os.WriteFile(configPath, []byte(invalidContent), 0644)
if err != nil {
t.Fatalf("Failed to create test config file: %v", err)
}
_, err = LoadConfig(configPath)
if err == nil {
t.Error("LoadConfig() with invalid YAML did not return error")
}
}
func TestRuleIsEnabled(t *testing.T) {
tests := []struct {
name string
rule Rule
want bool
}{
{
name: "enforce is enabled",
rule: Rule{Enabled: "enforce"},
want: true,
},
{
name: "warn is enabled",
rule: Rule{Enabled: "warn"},
want: true,
},
{
name: "off is not enabled",
rule: Rule{Enabled: "off"},
want: false,
},
{
name: "empty is not enabled",
rule: Rule{Enabled: ""},
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.rule.IsEnabled(); got != tt.want {
t.Errorf("Rule.IsEnabled() = %v, want %v", got, tt.want)
}
})
}
}
func TestRuleIsEnforced(t *testing.T) {
tests := []struct {
name string
rule Rule
want bool
}{
{
name: "enforce is enforced",
rule: Rule{Enabled: "enforce"},
want: true,
},
{
name: "warn is not enforced",
rule: Rule{Enabled: "warn"},
want: false,
},
{
name: "off is not enforced",
rule: Rule{Enabled: "off"},
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.rule.IsEnforced(); got != tt.want {
t.Errorf("Rule.IsEnforced() = %v, want %v", got, tt.want)
}
})
}
}
func TestDefaultConfigRuleSettings(t *testing.T) {
config := GetDefaultConfig()
// Test specific rule settings
pkNamingRule := config.Rules["primary_key_naming"]
if pkNamingRule.Function != "primary_key_naming" {
t.Errorf("primary_key_naming.Function = %q, want \"primary_key_naming\"", pkNamingRule.Function)
}
if pkNamingRule.Pattern != "^id_" {
t.Errorf("primary_key_naming.Pattern = %q, want \"^id_\"", pkNamingRule.Pattern)
}
// Test datatype rule
pkDatatypeRule := config.Rules["primary_key_datatype"]
if len(pkDatatypeRule.AllowedTypes) == 0 {
t.Error("primary_key_datatype has no allowed types")
}
// Test length rule
tableLengthRule := config.Rules["table_name_length"]
if tableLengthRule.MaxLength != 64 {
t.Errorf("table_name_length.MaxLength = %d, want 64", tableLengthRule.MaxLength)
}
// Test reserved keywords rule
reservedRule := config.Rules["reserved_keywords"]
if !reservedRule.CheckTables {
t.Error("reserved_keywords.CheckTables should be true")
}
if !reservedRule.CheckColumns {
t.Error("reserved_keywords.CheckColumns should be true")
}
}

View File

@@ -0,0 +1,837 @@
package inspector
import (
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// Helper function to create test database
func createTestDatabase() *models.Database {
return &models.Database{
Name: "testdb",
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Columns: map[string]*models.Column{
"id": {
Name: "id",
Type: "bigserial",
IsPrimaryKey: true,
AutoIncrement: true,
},
"username": {
Name: "username",
Type: "varchar(50)",
NotNull: true,
IsPrimaryKey: false,
},
"rid_organization": {
Name: "rid_organization",
Type: "bigint",
NotNull: true,
IsPrimaryKey: false,
},
},
Constraints: map[string]*models.Constraint{
"fk_users_organization": {
Name: "fk_users_organization",
Type: models.ForeignKeyConstraint,
Columns: []string{"rid_organization"},
ReferencedTable: "organizations",
ReferencedSchema: "public",
ReferencedColumns: []string{"id"},
},
},
Indexes: map[string]*models.Index{
"idx_rid_organization": {
Name: "idx_rid_organization",
Columns: []string{"rid_organization"},
},
},
},
{
Name: "organizations",
Columns: map[string]*models.Column{
"id": {
Name: "id",
Type: "bigserial",
IsPrimaryKey: true,
AutoIncrement: true,
},
"name": {
Name: "name",
Type: "varchar(100)",
NotNull: true,
IsPrimaryKey: false,
},
},
},
},
},
},
}
}
func TestValidatePrimaryKeyNaming(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "matching pattern id",
rule: Rule{
Pattern: "^id$",
Message: "Primary key should be 'id'",
},
wantLen: 2,
wantPass: true,
},
{
name: "non-matching pattern id_",
rule: Rule{
Pattern: "^id_",
Message: "Primary key should start with 'id_'",
},
wantLen: 2,
wantPass: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validatePrimaryKeyNaming(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validatePrimaryKeyNaming() returned %d results, want %d", len(results), tt.wantLen)
}
if len(results) > 0 && results[0].Passed != tt.wantPass {
t.Errorf("validatePrimaryKeyNaming() passed=%v, want %v", results[0].Passed, tt.wantPass)
}
})
}
}
func TestValidatePrimaryKeyDatatype(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "allowed type bigserial",
rule: Rule{
AllowedTypes: []string{"bigserial", "bigint", "int"},
Message: "Primary key should use integer types",
},
wantLen: 2,
wantPass: true,
},
{
name: "disallowed type",
rule: Rule{
AllowedTypes: []string{"uuid"},
Message: "Primary key should use UUID",
},
wantLen: 2,
wantPass: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validatePrimaryKeyDatatype(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validatePrimaryKeyDatatype() returned %d results, want %d", len(results), tt.wantLen)
}
if len(results) > 0 && results[0].Passed != tt.wantPass {
t.Errorf("validatePrimaryKeyDatatype() passed=%v, want %v", results[0].Passed, tt.wantPass)
}
})
}
}
func TestValidatePrimaryKeyAutoIncrement(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
}{
{
name: "require auto increment",
rule: Rule{
RequireAutoIncrement: true,
Message: "Primary key should have auto-increment",
},
wantLen: 0, // No violations - all PKs have auto-increment
},
{
name: "disallow auto increment",
rule: Rule{
RequireAutoIncrement: false,
Message: "Primary key should not have auto-increment",
},
wantLen: 2, // 2 violations - both PKs have auto-increment
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validatePrimaryKeyAutoIncrement(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validatePrimaryKeyAutoIncrement() returned %d results, want %d", len(results), tt.wantLen)
}
})
}
}
func TestValidateForeignKeyColumnNaming(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "matching pattern rid_",
rule: Rule{
Pattern: "^rid_",
Message: "Foreign key columns should start with 'rid_'",
},
wantLen: 1,
wantPass: true,
},
{
name: "non-matching pattern fk_",
rule: Rule{
Pattern: "^fk_",
Message: "Foreign key columns should start with 'fk_'",
},
wantLen: 1,
wantPass: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateForeignKeyColumnNaming(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateForeignKeyColumnNaming() returned %d results, want %d", len(results), tt.wantLen)
}
if len(results) > 0 && results[0].Passed != tt.wantPass {
t.Errorf("validateForeignKeyColumnNaming() passed=%v, want %v", results[0].Passed, tt.wantPass)
}
})
}
}
func TestValidateForeignKeyConstraintNaming(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "matching pattern fk_",
rule: Rule{
Pattern: "^fk_",
Message: "Foreign key constraints should start with 'fk_'",
},
wantLen: 1,
wantPass: true,
},
{
name: "non-matching pattern FK_",
rule: Rule{
Pattern: "^FK_",
Message: "Foreign key constraints should start with 'FK_'",
},
wantLen: 1,
wantPass: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateForeignKeyConstraintNaming(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateForeignKeyConstraintNaming() returned %d results, want %d", len(results), tt.wantLen)
}
if len(results) > 0 && results[0].Passed != tt.wantPass {
t.Errorf("validateForeignKeyConstraintNaming() passed=%v, want %v", results[0].Passed, tt.wantPass)
}
})
}
}
func TestValidateForeignKeyIndex(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "require index with index present",
rule: Rule{
RequireIndex: true,
Message: "Foreign key columns should have indexes",
},
wantLen: 1,
wantPass: true,
},
{
name: "no requirement",
rule: Rule{
RequireIndex: false,
Message: "Foreign key index check disabled",
},
wantLen: 0,
wantPass: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateForeignKeyIndex(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateForeignKeyIndex() returned %d results, want %d", len(results), tt.wantLen)
}
if len(results) > 0 && results[0].Passed != tt.wantPass {
t.Errorf("validateForeignKeyIndex() passed=%v, want %v", results[0].Passed, tt.wantPass)
}
})
}
}
func TestValidateTableNamingCase(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "lowercase snake_case pattern",
rule: Rule{
Pattern: "^[a-z][a-z0-9_]*$",
Case: "lowercase",
Message: "Table names should be lowercase snake_case",
},
wantLen: 2,
wantPass: true,
},
{
name: "uppercase pattern",
rule: Rule{
Pattern: "^[A-Z][A-Z0-9_]*$",
Case: "uppercase",
Message: "Table names should be uppercase",
},
wantLen: 2,
wantPass: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateTableNamingCase(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateTableNamingCase() returned %d results, want %d", len(results), tt.wantLen)
}
if len(results) > 0 && results[0].Passed != tt.wantPass {
t.Errorf("validateTableNamingCase() passed=%v, want %v", results[0].Passed, tt.wantPass)
}
})
}
}
func TestValidateColumnNamingCase(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "lowercase snake_case pattern",
rule: Rule{
Pattern: "^[a-z][a-z0-9_]*$",
Case: "lowercase",
Message: "Column names should be lowercase snake_case",
},
wantLen: 5, // 5 total columns across both tables
wantPass: true,
},
{
name: "camelCase pattern",
rule: Rule{
Pattern: "^[a-z][a-zA-Z0-9]*$",
Case: "camelCase",
Message: "Column names should be camelCase",
},
wantLen: 5,
wantPass: false, // rid_organization has underscore
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateColumnNamingCase(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateColumnNamingCase() returned %d results, want %d", len(results), tt.wantLen)
}
})
}
}
func TestValidateTableNameLength(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "max length 64",
rule: Rule{
MaxLength: 64,
Message: "Table name too long",
},
wantLen: 2,
wantPass: true,
},
{
name: "max length 5",
rule: Rule{
MaxLength: 5,
Message: "Table name too long",
},
wantLen: 2,
wantPass: false, // "users" is 5 chars (passes), "organizations" is 13 (fails)
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateTableNameLength(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateTableNameLength() returned %d results, want %d", len(results), tt.wantLen)
}
})
}
}
func TestValidateColumnNameLength(t *testing.T) {
db := createTestDatabase()
tests := []struct {
name string
rule Rule
wantLen int
wantPass bool
}{
{
name: "max length 64",
rule: Rule{
MaxLength: 64,
Message: "Column name too long",
},
wantLen: 5,
wantPass: true,
},
{
name: "max length 5",
rule: Rule{
MaxLength: 5,
Message: "Column name too long",
},
wantLen: 5,
wantPass: false, // Some columns exceed 5 chars
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateColumnNameLength(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateColumnNameLength() returned %d results, want %d", len(results), tt.wantLen)
}
})
}
}
func TestValidateReservedKeywords(t *testing.T) {
// Create a database with reserved keywords
db := &models.Database{
Name: "testdb",
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "user", // "user" is a reserved keyword
Columns: map[string]*models.Column{
"id": {
Name: "id",
Type: "bigint",
IsPrimaryKey: true,
},
"select": { // "select" is a reserved keyword
Name: "select",
Type: "varchar(50)",
},
},
},
},
},
},
}
tests := []struct {
name string
rule Rule
wantLen int
checkPasses bool
}{
{
name: "check tables only",
rule: Rule{
CheckTables: true,
CheckColumns: false,
Message: "Reserved keyword used",
},
wantLen: 1, // "user" table
checkPasses: false,
},
{
name: "check columns only",
rule: Rule{
CheckTables: false,
CheckColumns: true,
Message: "Reserved keyword used",
},
wantLen: 2, // "id", "select" columns (id passes, select fails)
checkPasses: false,
},
{
name: "check both",
rule: Rule{
CheckTables: true,
CheckColumns: true,
Message: "Reserved keyword used",
},
wantLen: 3, // "user" table + "id", "select" columns
checkPasses: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := validateReservedKeywords(db, tt.rule, "test_rule")
if len(results) != tt.wantLen {
t.Errorf("validateReservedKeywords() returned %d results, want %d", len(results), tt.wantLen)
}
})
}
}
func TestValidateMissingPrimaryKey(t *testing.T) {
// Create database with and without primary keys
db := &models.Database{
Name: "testdb",
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "with_pk",
Columns: map[string]*models.Column{
"id": {
Name: "id",
Type: "bigint",
IsPrimaryKey: true,
},
},
},
{
Name: "without_pk",
Columns: map[string]*models.Column{
"name": {
Name: "name",
Type: "varchar(50)",
},
},
},
},
},
},
}
rule := Rule{
Message: "Table missing primary key",
}
results := validateMissingPrimaryKey(db, rule, "test_rule")
if len(results) != 2 {
t.Errorf("validateMissingPrimaryKey() returned %d results, want 2", len(results))
}
// First result should pass (with_pk has PK)
if results[0].Passed != true {
t.Errorf("validateMissingPrimaryKey() result[0].Passed=%v, want true", results[0].Passed)
}
// Second result should fail (without_pk missing PK)
if results[1].Passed != false {
t.Errorf("validateMissingPrimaryKey() result[1].Passed=%v, want false", results[1].Passed)
}
}
func TestValidateOrphanedForeignKey(t *testing.T) {
// Create database with orphaned FK
db := &models.Database{
Name: "testdb",
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Columns: map[string]*models.Column{
"id": {
Name: "id",
Type: "bigint",
IsPrimaryKey: true,
},
},
Constraints: map[string]*models.Constraint{
"fk_nonexistent": {
Name: "fk_nonexistent",
Type: models.ForeignKeyConstraint,
Columns: []string{"rid_organization"},
ReferencedTable: "nonexistent_table",
ReferencedSchema: "public",
},
},
},
},
},
},
}
rule := Rule{
Message: "Foreign key references non-existent table",
}
results := validateOrphanedForeignKey(db, rule, "test_rule")
if len(results) != 1 {
t.Errorf("validateOrphanedForeignKey() returned %d results, want 1", len(results))
}
if results[0].Passed != false {
t.Errorf("validateOrphanedForeignKey() passed=%v, want false", results[0].Passed)
}
}
func TestValidateCircularDependency(t *testing.T) {
// Create database with circular dependency
db := &models.Database{
Name: "testdb",
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "table_a",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "bigint", IsPrimaryKey: true},
},
Constraints: map[string]*models.Constraint{
"fk_to_b": {
Name: "fk_to_b",
Type: models.ForeignKeyConstraint,
ReferencedTable: "table_b",
ReferencedSchema: "public",
},
},
},
{
Name: "table_b",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "bigint", IsPrimaryKey: true},
},
Constraints: map[string]*models.Constraint{
"fk_to_a": {
Name: "fk_to_a",
Type: models.ForeignKeyConstraint,
ReferencedTable: "table_a",
ReferencedSchema: "public",
},
},
},
},
},
},
}
rule := Rule{
Message: "Circular dependency detected",
}
results := validateCircularDependency(db, rule, "test_rule")
// Should detect circular dependency in both tables
if len(results) == 0 {
t.Error("validateCircularDependency() returned 0 results, expected circular dependency detection")
}
for _, result := range results {
if result.Passed {
t.Error("validateCircularDependency() passed=true, want false for circular dependency")
}
}
}
func TestNormalizeDataType(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"varchar(50)", "varchar"},
{"decimal(10,2)", "decimal"},
{"int", "int"},
{"BIGINT", "bigint"},
{"VARCHAR(255)", "varchar"},
}
for _, tt := range tests {
t.Run(tt.input, func(t *testing.T) {
result := normalizeDataType(tt.input)
if result != tt.expected {
t.Errorf("normalizeDataType(%q) = %q, want %q", tt.input, result, tt.expected)
}
})
}
}
func TestContains(t *testing.T) {
tests := []struct {
name string
slice []string
value string
expected bool
}{
{"found exact", []string{"foo", "bar", "baz"}, "bar", true},
{"not found", []string{"foo", "bar", "baz"}, "qux", false},
{"case insensitive match", []string{"foo", "Bar", "baz"}, "bar", true},
{"empty slice", []string{}, "foo", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := contains(tt.slice, tt.value)
if result != tt.expected {
t.Errorf("contains(%v, %q) = %v, want %v", tt.slice, tt.value, result, tt.expected)
}
})
}
}
func TestHasCycle(t *testing.T) {
tests := []struct {
name string
graph map[string][]string
node string
expected bool
}{
{
name: "simple cycle",
graph: map[string][]string{
"A": {"B"},
"B": {"C"},
"C": {"A"},
},
node: "A",
expected: true,
},
{
name: "no cycle",
graph: map[string][]string{
"A": {"B"},
"B": {"C"},
"C": {},
},
node: "A",
expected: false,
},
{
name: "self cycle",
graph: map[string][]string{
"A": {"A"},
},
node: "A",
expected: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
visited := make(map[string]bool)
recStack := make(map[string]bool)
result := hasCycle(tt.node, tt.graph, visited, recStack)
if result != tt.expected {
t.Errorf("hasCycle() = %v, want %v", result, tt.expected)
}
})
}
}
func TestFormatLocation(t *testing.T) {
tests := []struct {
schema string
table string
column string
expected string
}{
{"public", "users", "id", "public.users.id"},
{"public", "users", "", "public.users"},
{"public", "", "", "public"},
}
for _, tt := range tests {
t.Run(tt.expected, func(t *testing.T) {
result := formatLocation(tt.schema, tt.table, tt.column)
if result != tt.expected {
t.Errorf("formatLocation(%q, %q, %q) = %q, want %q",
tt.schema, tt.table, tt.column, result, tt.expected)
}
})
}
}

627
pkg/merge/merge.go Normal file
View File

@@ -0,0 +1,627 @@
// Package merge provides utilities for merging database schemas.
// It allows combining schemas from multiple sources while avoiding duplicates,
// supporting only additive operations (no deletion or modification of existing items).
package merge
import (
"fmt"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// MergeResult represents the result of a merge operation
type MergeResult struct {
SchemasAdded int
TablesAdded int
ColumnsAdded int
ConstraintsAdded int
IndexesAdded int
RelationsAdded int
DomainsAdded int
EnumsAdded int
ViewsAdded int
SequencesAdded int
}
// MergeOptions contains options for merge operations
type MergeOptions struct {
SkipDomains bool
SkipRelations bool
SkipEnums bool
SkipViews bool
SkipSequences bool
SkipTableNames map[string]bool // Tables to skip during merge (keyed by table name)
}
// MergeDatabases merges the source database into the target database.
// Only adds missing items; existing items are not modified.
func MergeDatabases(target, source *models.Database, opts *MergeOptions) *MergeResult {
if opts == nil {
opts = &MergeOptions{}
}
result := &MergeResult{}
if target == nil || source == nil {
return result
}
// Merge schemas and their contents
result.merge(target, source, opts)
return result
}
func (r *MergeResult) merge(target, source *models.Database, opts *MergeOptions) {
// Create maps of existing schemas for quick lookup
existingSchemas := make(map[string]*models.Schema)
for _, schema := range target.Schemas {
existingSchemas[schema.SQLName()] = schema
}
// Merge schemas
for _, srcSchema := range source.Schemas {
schemaName := srcSchema.SQLName()
if tgtSchema, exists := existingSchemas[schemaName]; exists {
// Schema exists, merge its contents
r.mergeSchemaContents(tgtSchema, srcSchema, opts)
} else {
// Schema doesn't exist, add it
newSchema := cloneSchema(srcSchema)
target.Schemas = append(target.Schemas, newSchema)
r.SchemasAdded++
}
}
// Merge domains if not skipped
if !opts.SkipDomains {
r.mergeDomains(target, source)
}
}
func (r *MergeResult) mergeSchemaContents(target, source *models.Schema, opts *MergeOptions) {
// Merge tables
r.mergeTables(target, source, opts)
// Merge views if not skipped
if !opts.SkipViews {
r.mergeViews(target, source)
}
// Merge sequences if not skipped
if !opts.SkipSequences {
r.mergeSequences(target, source)
}
// Merge enums if not skipped
if !opts.SkipEnums {
r.mergeEnums(target, source)
}
// Merge relations if not skipped
if !opts.SkipRelations {
r.mergeRelations(target, source)
}
}
func (r *MergeResult) mergeTables(schema *models.Schema, source *models.Schema, opts *MergeOptions) {
// Create map of existing tables
existingTables := make(map[string]*models.Table)
for _, table := range schema.Tables {
existingTables[table.SQLName()] = table
}
// Merge tables
for _, srcTable := range source.Tables {
tableName := srcTable.SQLName()
// Skip if table is in the skip list (case-insensitive)
if opts != nil && opts.SkipTableNames != nil && opts.SkipTableNames[strings.ToLower(tableName)] {
continue
}
if tgtTable, exists := existingTables[tableName]; exists {
// Table exists, merge its columns, constraints, and indexes
r.mergeColumns(tgtTable, srcTable)
r.mergeConstraints(tgtTable, srcTable)
r.mergeIndexes(tgtTable, srcTable)
} else {
// Table doesn't exist, add it
newTable := cloneTable(srcTable)
schema.Tables = append(schema.Tables, newTable)
r.TablesAdded++
// Count columns in the newly added table
r.ColumnsAdded += len(newTable.Columns)
}
}
}
func (r *MergeResult) mergeColumns(table *models.Table, srcTable *models.Table) {
// Create map of existing columns
existingColumns := make(map[string]*models.Column)
for colName := range table.Columns {
existingColumns[colName] = table.Columns[colName]
}
// Merge columns
for colName, srcCol := range srcTable.Columns {
if _, exists := existingColumns[colName]; !exists {
// Column doesn't exist, add it
newCol := cloneColumn(srcCol)
table.Columns[colName] = newCol
r.ColumnsAdded++
}
}
}
func (r *MergeResult) mergeConstraints(table *models.Table, srcTable *models.Table) {
// Initialize constraints map if nil
if table.Constraints == nil {
table.Constraints = make(map[string]*models.Constraint)
}
// Create map of existing constraints
existingConstraints := make(map[string]*models.Constraint)
for constName := range table.Constraints {
existingConstraints[constName] = table.Constraints[constName]
}
// Merge constraints
for constName, srcConst := range srcTable.Constraints {
if _, exists := existingConstraints[constName]; !exists {
// Constraint doesn't exist, add it
newConst := cloneConstraint(srcConst)
table.Constraints[constName] = newConst
r.ConstraintsAdded++
}
}
}
func (r *MergeResult) mergeIndexes(table *models.Table, srcTable *models.Table) {
// Initialize indexes map if nil
if table.Indexes == nil {
table.Indexes = make(map[string]*models.Index)
}
// Create map of existing indexes
existingIndexes := make(map[string]*models.Index)
for idxName := range table.Indexes {
existingIndexes[idxName] = table.Indexes[idxName]
}
// Merge indexes
for idxName, srcIdx := range srcTable.Indexes {
if _, exists := existingIndexes[idxName]; !exists {
// Index doesn't exist, add it
newIdx := cloneIndex(srcIdx)
table.Indexes[idxName] = newIdx
r.IndexesAdded++
}
}
}
func (r *MergeResult) mergeViews(schema *models.Schema, source *models.Schema) {
// Create map of existing views
existingViews := make(map[string]*models.View)
for _, view := range schema.Views {
existingViews[view.SQLName()] = view
}
// Merge views
for _, srcView := range source.Views {
viewName := srcView.SQLName()
if _, exists := existingViews[viewName]; !exists {
// View doesn't exist, add it
newView := cloneView(srcView)
schema.Views = append(schema.Views, newView)
r.ViewsAdded++
}
}
}
func (r *MergeResult) mergeSequences(schema *models.Schema, source *models.Schema) {
// Create map of existing sequences
existingSequences := make(map[string]*models.Sequence)
for _, seq := range schema.Sequences {
existingSequences[seq.SQLName()] = seq
}
// Merge sequences
for _, srcSeq := range source.Sequences {
seqName := srcSeq.SQLName()
if _, exists := existingSequences[seqName]; !exists {
// Sequence doesn't exist, add it
newSeq := cloneSequence(srcSeq)
schema.Sequences = append(schema.Sequences, newSeq)
r.SequencesAdded++
}
}
}
func (r *MergeResult) mergeEnums(schema *models.Schema, source *models.Schema) {
// Create map of existing enums
existingEnums := make(map[string]*models.Enum)
for _, enum := range schema.Enums {
existingEnums[enum.SQLName()] = enum
}
// Merge enums
for _, srcEnum := range source.Enums {
enumName := srcEnum.SQLName()
if _, exists := existingEnums[enumName]; !exists {
// Enum doesn't exist, add it
newEnum := cloneEnum(srcEnum)
schema.Enums = append(schema.Enums, newEnum)
r.EnumsAdded++
}
}
}
func (r *MergeResult) mergeRelations(schema *models.Schema, source *models.Schema) {
// Create map of existing relations
existingRelations := make(map[string]*models.Relationship)
for _, rel := range schema.Relations {
existingRelations[rel.SQLName()] = rel
}
// Merge relations
for _, srcRel := range source.Relations {
if _, exists := existingRelations[srcRel.SQLName()]; !exists {
// Relation doesn't exist, add it
newRel := cloneRelation(srcRel)
schema.Relations = append(schema.Relations, newRel)
r.RelationsAdded++
}
}
}
func (r *MergeResult) mergeDomains(target *models.Database, source *models.Database) {
// Create map of existing domains
existingDomains := make(map[string]*models.Domain)
for _, domain := range target.Domains {
existingDomains[domain.SQLName()] = domain
}
// Merge domains
for _, srcDomain := range source.Domains {
domainName := srcDomain.SQLName()
if _, exists := existingDomains[domainName]; !exists {
// Domain doesn't exist, add it
newDomain := cloneDomain(srcDomain)
target.Domains = append(target.Domains, newDomain)
r.DomainsAdded++
}
}
}
// Clone functions to create deep copies of models
func cloneSchema(schema *models.Schema) *models.Schema {
if schema == nil {
return nil
}
newSchema := &models.Schema{
Name: schema.Name,
Description: schema.Description,
Owner: schema.Owner,
Comment: schema.Comment,
Sequence: schema.Sequence,
UpdatedAt: schema.UpdatedAt,
Tables: make([]*models.Table, 0),
Views: make([]*models.View, 0),
Sequences: make([]*models.Sequence, 0),
Enums: make([]*models.Enum, 0),
Relations: make([]*models.Relationship, 0),
}
if schema.Permissions != nil {
newSchema.Permissions = make(map[string]string)
for k, v := range schema.Permissions {
newSchema.Permissions[k] = v
}
}
if schema.Metadata != nil {
newSchema.Metadata = make(map[string]interface{})
for k, v := range schema.Metadata {
newSchema.Metadata[k] = v
}
}
if schema.Scripts != nil {
newSchema.Scripts = make([]*models.Script, len(schema.Scripts))
copy(newSchema.Scripts, schema.Scripts)
}
// Clone tables
for _, table := range schema.Tables {
newSchema.Tables = append(newSchema.Tables, cloneTable(table))
}
// Clone views
for _, view := range schema.Views {
newSchema.Views = append(newSchema.Views, cloneView(view))
}
// Clone sequences
for _, seq := range schema.Sequences {
newSchema.Sequences = append(newSchema.Sequences, cloneSequence(seq))
}
// Clone enums
for _, enum := range schema.Enums {
newSchema.Enums = append(newSchema.Enums, cloneEnum(enum))
}
// Clone relations
for _, rel := range schema.Relations {
newSchema.Relations = append(newSchema.Relations, cloneRelation(rel))
}
return newSchema
}
func cloneTable(table *models.Table) *models.Table {
if table == nil {
return nil
}
newTable := &models.Table{
Name: table.Name,
Description: table.Description,
Schema: table.Schema,
Comment: table.Comment,
Sequence: table.Sequence,
UpdatedAt: table.UpdatedAt,
Columns: make(map[string]*models.Column),
Constraints: make(map[string]*models.Constraint),
Indexes: make(map[string]*models.Index),
}
if table.Metadata != nil {
newTable.Metadata = make(map[string]interface{})
for k, v := range table.Metadata {
newTable.Metadata[k] = v
}
}
// Clone columns
for colName, col := range table.Columns {
newTable.Columns[colName] = cloneColumn(col)
}
// Clone constraints
for constName, constraint := range table.Constraints {
newTable.Constraints[constName] = cloneConstraint(constraint)
}
// Clone indexes
for idxName, index := range table.Indexes {
newTable.Indexes[idxName] = cloneIndex(index)
}
return newTable
}
func cloneColumn(col *models.Column) *models.Column {
if col == nil {
return nil
}
newCol := &models.Column{
Name: col.Name,
Type: col.Type,
Description: col.Description,
Comment: col.Comment,
IsPrimaryKey: col.IsPrimaryKey,
NotNull: col.NotNull,
Default: col.Default,
Precision: col.Precision,
Scale: col.Scale,
Length: col.Length,
Sequence: col.Sequence,
AutoIncrement: col.AutoIncrement,
Collation: col.Collation,
}
return newCol
}
func cloneConstraint(constraint *models.Constraint) *models.Constraint {
if constraint == nil {
return nil
}
newConstraint := &models.Constraint{
Type: constraint.Type,
Columns: make([]string, len(constraint.Columns)),
ReferencedTable: constraint.ReferencedTable,
ReferencedSchema: constraint.ReferencedSchema,
ReferencedColumns: make([]string, len(constraint.ReferencedColumns)),
OnUpdate: constraint.OnUpdate,
OnDelete: constraint.OnDelete,
Expression: constraint.Expression,
Name: constraint.Name,
Deferrable: constraint.Deferrable,
InitiallyDeferred: constraint.InitiallyDeferred,
Sequence: constraint.Sequence,
}
copy(newConstraint.Columns, constraint.Columns)
copy(newConstraint.ReferencedColumns, constraint.ReferencedColumns)
return newConstraint
}
func cloneIndex(index *models.Index) *models.Index {
if index == nil {
return nil
}
newIndex := &models.Index{
Name: index.Name,
Description: index.Description,
Table: index.Table,
Schema: index.Schema,
Columns: make([]string, len(index.Columns)),
Unique: index.Unique,
Type: index.Type,
Where: index.Where,
Concurrent: index.Concurrent,
Include: make([]string, len(index.Include)),
Comment: index.Comment,
Sequence: index.Sequence,
}
copy(newIndex.Columns, index.Columns)
copy(newIndex.Include, index.Include)
return newIndex
}
func cloneView(view *models.View) *models.View {
if view == nil {
return nil
}
newView := &models.View{
Name: view.Name,
Description: view.Description,
Schema: view.Schema,
Definition: view.Definition,
Comment: view.Comment,
Sequence: view.Sequence,
Columns: make(map[string]*models.Column),
}
if view.Metadata != nil {
newView.Metadata = make(map[string]interface{})
for k, v := range view.Metadata {
newView.Metadata[k] = v
}
}
// Clone columns
for colName, col := range view.Columns {
newView.Columns[colName] = cloneColumn(col)
}
return newView
}
func cloneSequence(seq *models.Sequence) *models.Sequence {
if seq == nil {
return nil
}
newSeq := &models.Sequence{
Name: seq.Name,
Description: seq.Description,
Schema: seq.Schema,
StartValue: seq.StartValue,
MinValue: seq.MinValue,
MaxValue: seq.MaxValue,
IncrementBy: seq.IncrementBy,
CacheSize: seq.CacheSize,
Cycle: seq.Cycle,
OwnedByTable: seq.OwnedByTable,
OwnedByColumn: seq.OwnedByColumn,
Comment: seq.Comment,
Sequence: seq.Sequence,
}
return newSeq
}
func cloneEnum(enum *models.Enum) *models.Enum {
if enum == nil {
return nil
}
newEnum := &models.Enum{
Name: enum.Name,
Values: make([]string, len(enum.Values)),
Schema: enum.Schema,
}
copy(newEnum.Values, enum.Values)
return newEnum
}
func cloneRelation(rel *models.Relationship) *models.Relationship {
if rel == nil {
return nil
}
newRel := &models.Relationship{
Name: rel.Name,
Type: rel.Type,
FromTable: rel.FromTable,
FromSchema: rel.FromSchema,
FromColumns: make([]string, len(rel.FromColumns)),
ToTable: rel.ToTable,
ToSchema: rel.ToSchema,
ToColumns: make([]string, len(rel.ToColumns)),
ForeignKey: rel.ForeignKey,
ThroughTable: rel.ThroughTable,
ThroughSchema: rel.ThroughSchema,
Description: rel.Description,
Sequence: rel.Sequence,
}
if rel.Properties != nil {
newRel.Properties = make(map[string]string)
for k, v := range rel.Properties {
newRel.Properties[k] = v
}
}
copy(newRel.FromColumns, rel.FromColumns)
copy(newRel.ToColumns, rel.ToColumns)
return newRel
}
func cloneDomain(domain *models.Domain) *models.Domain {
if domain == nil {
return nil
}
newDomain := &models.Domain{
Name: domain.Name,
Description: domain.Description,
Comment: domain.Comment,
Sequence: domain.Sequence,
Tables: make([]*models.DomainTable, len(domain.Tables)),
}
if domain.Metadata != nil {
newDomain.Metadata = make(map[string]interface{})
for k, v := range domain.Metadata {
newDomain.Metadata[k] = v
}
}
copy(newDomain.Tables, domain.Tables)
return newDomain
}
// GetMergeSummary returns a human-readable summary of the merge result
func GetMergeSummary(result *MergeResult) string {
if result == nil {
return "No merge result available"
}
lines := []string{
"=== Merge Summary ===",
fmt.Sprintf("Schemas added: %d", result.SchemasAdded),
fmt.Sprintf("Tables added: %d", result.TablesAdded),
fmt.Sprintf("Columns added: %d", result.ColumnsAdded),
fmt.Sprintf("Constraints added: %d", result.ConstraintsAdded),
fmt.Sprintf("Indexes added: %d", result.IndexesAdded),
fmt.Sprintf("Views added: %d", result.ViewsAdded),
fmt.Sprintf("Sequences added: %d", result.SequencesAdded),
fmt.Sprintf("Enums added: %d", result.EnumsAdded),
fmt.Sprintf("Relations added: %d", result.RelationsAdded),
fmt.Sprintf("Domains added: %d", result.DomainsAdded),
}
totalAdded := result.SchemasAdded + result.TablesAdded + result.ColumnsAdded +
result.ConstraintsAdded + result.IndexesAdded +
result.ViewsAdded + result.SequencesAdded + result.EnumsAdded +
result.RelationsAdded + result.DomainsAdded
lines = append(lines, fmt.Sprintf("Total items added: %d", totalAdded))
summary := ""
for _, line := range lines {
summary += line + "\n"
}
return summary
}

617
pkg/merge/merge_test.go Normal file
View File

@@ -0,0 +1,617 @@
package merge
import (
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
func TestMergeDatabases_NilInputs(t *testing.T) {
result := MergeDatabases(nil, nil, nil)
if result == nil {
t.Fatal("Expected non-nil result")
}
if result.SchemasAdded != 0 {
t.Errorf("Expected 0 schemas added, got %d", result.SchemasAdded)
}
}
func TestMergeDatabases_NewSchema(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{Name: "public"},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{Name: "auth"},
},
}
result := MergeDatabases(target, source, nil)
if result.SchemasAdded != 1 {
t.Errorf("Expected 1 schema added, got %d", result.SchemasAdded)
}
if len(target.Schemas) != 2 {
t.Errorf("Expected 2 schemas in target, got %d", len(target.Schemas))
}
}
func TestMergeDatabases_ExistingSchema(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{Name: "public"},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{Name: "public"},
},
}
result := MergeDatabases(target, source, nil)
if result.SchemasAdded != 0 {
t.Errorf("Expected 0 schemas added, got %d", result.SchemasAdded)
}
if len(target.Schemas) != 1 {
t.Errorf("Expected 1 schema in target, got %d", len(target.Schemas))
}
}
func TestMergeTables_NewTable(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "posts",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.TablesAdded != 1 {
t.Errorf("Expected 1 table added, got %d", result.TablesAdded)
}
if len(target.Schemas[0].Tables) != 2 {
t.Errorf("Expected 2 tables in target schema, got %d", len(target.Schemas[0].Tables))
}
}
func TestMergeColumns_NewColumn(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "int"},
},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"email": {Name: "email", Type: "varchar"},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ColumnsAdded != 1 {
t.Errorf("Expected 1 column added, got %d", result.ColumnsAdded)
}
if len(target.Schemas[0].Tables[0].Columns) != 2 {
t.Errorf("Expected 2 columns in target table, got %d", len(target.Schemas[0].Tables[0].Columns))
}
}
func TestMergeConstraints_NewConstraint(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: map[string]*models.Constraint{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: map[string]*models.Constraint{
"ukey_users_email": {
Type: models.UniqueConstraint,
Columns: []string{"email"},
Name: "ukey_users_email",
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ConstraintsAdded != 1 {
t.Errorf("Expected 1 constraint added, got %d", result.ConstraintsAdded)
}
if len(target.Schemas[0].Tables[0].Constraints) != 1 {
t.Errorf("Expected 1 constraint in target table, got %d", len(target.Schemas[0].Tables[0].Constraints))
}
}
func TestMergeConstraints_NilConstraintsMap(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: nil, // Nil map
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: map[string]*models.Constraint{
"ukey_users_email": {
Type: models.UniqueConstraint,
Columns: []string{"email"},
Name: "ukey_users_email",
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ConstraintsAdded != 1 {
t.Errorf("Expected 1 constraint added, got %d", result.ConstraintsAdded)
}
if target.Schemas[0].Tables[0].Constraints == nil {
t.Error("Expected constraints map to be initialized")
}
if len(target.Schemas[0].Tables[0].Constraints) != 1 {
t.Errorf("Expected 1 constraint in target table, got %d", len(target.Schemas[0].Tables[0].Constraints))
}
}
func TestMergeIndexes_NewIndex(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: map[string]*models.Index{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: map[string]*models.Index{
"idx_users_email": {
Name: "idx_users_email",
Columns: []string{"email"},
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.IndexesAdded != 1 {
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
}
if len(target.Schemas[0].Tables[0].Indexes) != 1 {
t.Errorf("Expected 1 index in target table, got %d", len(target.Schemas[0].Tables[0].Indexes))
}
}
func TestMergeIndexes_NilIndexesMap(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: nil, // Nil map
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: map[string]*models.Index{
"idx_users_email": {
Name: "idx_users_email",
Columns: []string{"email"},
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.IndexesAdded != 1 {
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
}
if target.Schemas[0].Tables[0].Indexes == nil {
t.Error("Expected indexes map to be initialized")
}
if len(target.Schemas[0].Tables[0].Indexes) != 1 {
t.Errorf("Expected 1 index in target table, got %d", len(target.Schemas[0].Tables[0].Indexes))
}
}
func TestMergeOptions_SkipTableNames(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "migrations",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
opts := &MergeOptions{
SkipTableNames: map[string]bool{
"migrations": true,
},
}
result := MergeDatabases(target, source, opts)
if result.TablesAdded != 0 {
t.Errorf("Expected 0 tables added (skipped), got %d", result.TablesAdded)
}
if len(target.Schemas[0].Tables) != 1 {
t.Errorf("Expected 1 table in target schema, got %d", len(target.Schemas[0].Tables))
}
}
func TestMergeViews_NewView(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Views: []*models.View{},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Views: []*models.View{
{
Name: "user_summary",
Schema: "public",
Definition: "SELECT * FROM users",
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ViewsAdded != 1 {
t.Errorf("Expected 1 view added, got %d", result.ViewsAdded)
}
if len(target.Schemas[0].Views) != 1 {
t.Errorf("Expected 1 view in target schema, got %d", len(target.Schemas[0].Views))
}
}
func TestMergeEnums_NewEnum(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Enums: []*models.Enum{},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Enums: []*models.Enum{
{
Name: "user_role",
Schema: "public",
Values: []string{"admin", "user"},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.EnumsAdded != 1 {
t.Errorf("Expected 1 enum added, got %d", result.EnumsAdded)
}
if len(target.Schemas[0].Enums) != 1 {
t.Errorf("Expected 1 enum in target schema, got %d", len(target.Schemas[0].Enums))
}
}
func TestMergeDomains_NewDomain(t *testing.T) {
target := &models.Database{
Domains: []*models.Domain{},
}
source := &models.Database{
Domains: []*models.Domain{
{
Name: "auth",
Description: "Authentication domain",
},
},
}
result := MergeDatabases(target, source, nil)
if result.DomainsAdded != 1 {
t.Errorf("Expected 1 domain added, got %d", result.DomainsAdded)
}
if len(target.Domains) != 1 {
t.Errorf("Expected 1 domain in target, got %d", len(target.Domains))
}
}
func TestMergeRelations_NewRelation(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Relations: []*models.Relationship{},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Relations: []*models.Relationship{
{
Name: "fk_posts_user",
Type: models.OneToMany,
FromTable: "posts",
FromColumns: []string{"user_id"},
ToTable: "users",
ToColumns: []string{"id"},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.RelationsAdded != 1 {
t.Errorf("Expected 1 relation added, got %d", result.RelationsAdded)
}
if len(target.Schemas[0].Relations) != 1 {
t.Errorf("Expected 1 relation in target schema, got %d", len(target.Schemas[0].Relations))
}
}
func TestGetMergeSummary(t *testing.T) {
result := &MergeResult{
SchemasAdded: 1,
TablesAdded: 2,
ColumnsAdded: 5,
ConstraintsAdded: 3,
IndexesAdded: 2,
ViewsAdded: 1,
}
summary := GetMergeSummary(result)
if summary == "" {
t.Error("Expected non-empty summary")
}
if len(summary) < 50 {
t.Errorf("Summary seems too short: %s", summary)
}
}
func TestGetMergeSummary_Nil(t *testing.T) {
summary := GetMergeSummary(nil)
if summary == "" {
t.Error("Expected non-empty summary for nil result")
}
}
func TestComplexMerge(t *testing.T) {
// Target with existing structure
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "int"},
},
Constraints: map[string]*models.Constraint{},
Indexes: map[string]*models.Index{},
},
},
},
},
}
// Source with new columns, constraints, and indexes
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"email": {Name: "email", Type: "varchar"},
"guid": {Name: "guid", Type: "uuid"},
},
Constraints: map[string]*models.Constraint{
"ukey_users_email": {
Type: models.UniqueConstraint,
Columns: []string{"email"},
Name: "ukey_users_email",
},
"ukey_users_guid": {
Type: models.UniqueConstraint,
Columns: []string{"guid"},
Name: "ukey_users_guid",
},
},
Indexes: map[string]*models.Index{
"idx_users_email": {
Name: "idx_users_email",
Columns: []string{"email"},
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
// Verify counts
if result.ColumnsAdded != 2 {
t.Errorf("Expected 2 columns added, got %d", result.ColumnsAdded)
}
if result.ConstraintsAdded != 2 {
t.Errorf("Expected 2 constraints added, got %d", result.ConstraintsAdded)
}
if result.IndexesAdded != 1 {
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
}
// Verify target has merged data
table := target.Schemas[0].Tables[0]
if len(table.Columns) != 3 {
t.Errorf("Expected 3 columns in merged table, got %d", len(table.Columns))
}
if len(table.Constraints) != 2 {
t.Errorf("Expected 2 constraints in merged table, got %d", len(table.Constraints))
}
if len(table.Indexes) != 1 {
t.Errorf("Expected 1 index in merged table, got %d", len(table.Indexes))
}
// Verify specific constraint
if _, exists := table.Constraints["ukey_users_guid"]; !exists {
t.Error("Expected ukey_users_guid constraint to exist")
}
}

View File

@@ -4,7 +4,12 @@
// intermediate representation for converting between various database schema formats. // intermediate representation for converting between various database schema formats.
package models package models
import "strings" import (
"strings"
"time"
"github.com/google/uuid"
)
// DatabaseType represents the type of database system. // DatabaseType represents the type of database system.
type DatabaseType string type DatabaseType string
@@ -21,10 +26,13 @@ type Database struct {
Name string `json:"name" yaml:"name"` Name string `json:"name" yaml:"name"`
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"` Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
Schemas []*Schema `json:"schemas" yaml:"schemas" xml:"schemas"` Schemas []*Schema `json:"schemas" yaml:"schemas" xml:"schemas"`
Domains []*Domain `json:"domains,omitempty" yaml:"domains,omitempty" xml:"domains,omitempty"`
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"` Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
DatabaseType DatabaseType `json:"database_type,omitempty" yaml:"database_type,omitempty" xml:"database_type,omitempty"` DatabaseType DatabaseType `json:"database_type,omitempty" yaml:"database_type,omitempty" xml:"database_type,omitempty"`
DatabaseVersion string `json:"database_version,omitempty" yaml:"database_version,omitempty" xml:"database_version,omitempty"` DatabaseVersion string `json:"database_version,omitempty" yaml:"database_version,omitempty" xml:"database_version,omitempty"`
SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database. SourceFormat string `json:"source_format,omitempty" yaml:"source_format,omitempty" xml:"source_format,omitempty"` // Source Format of the database.
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the database name in lowercase for SQL compatibility. // SQLName returns the database name in lowercase for SQL compatibility.
@@ -32,6 +40,39 @@ func (d *Database) SQLName() string {
return strings.ToLower(d.Name) return strings.ToLower(d.Name)
} }
// UpdateDate sets the UpdatedAt field to the current time in RFC3339 format.
func (d *Database) UpdateDate() {
d.UpdatedAt = time.Now().Format(time.RFC3339)
}
// Domain represents a logical business domain grouping multiple tables from potentially different schemas.
// Domains allow for organizing database tables by functional areas (e.g., authentication, user data, financial).
type Domain struct {
Name string `json:"name" yaml:"name" xml:"name"`
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
Tables []*DomainTable `json:"tables" yaml:"tables" xml:"tables"`
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
}
// SQLName returns the domain name in lowercase for SQL compatibility.
func (d *Domain) SQLName() string {
return strings.ToLower(d.Name)
}
// DomainTable represents a reference to a specific table within a domain.
// It identifies the table by name and schema, allowing a single domain to include
// tables from multiple schemas.
type DomainTable struct {
TableName string `json:"table_name" yaml:"table_name" xml:"table_name"`
SchemaName string `json:"schema_name" yaml:"schema_name" xml:"schema_name"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
RefTable *Table `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
GUID string `json:"guid" yaml:"guid" xml:"guid"`
}
// Schema represents a database schema, which is a logical grouping of database objects // Schema represents a database schema, which is a logical grouping of database objects
// such as tables, views, sequences, and relationships within a database. // such as tables, views, sequences, and relationships within a database.
type Schema struct { type Schema struct {
@@ -49,6 +90,16 @@ type Schema struct {
RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references RefDatabase *Database `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"` Relations []*Relationship `json:"relations,omitempty" yaml:"relations,omitempty" xml:"-"`
Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"` Enums []*Enum `json:"enums,omitempty" yaml:"enums,omitempty" xml:"enums"`
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
}
// UpdaUpdateDateted sets the UpdatedAt field to the current time in RFC3339 format.
func (d *Schema) UpdateDate() {
d.UpdatedAt = time.Now().Format(time.RFC3339)
if d.RefDatabase != nil {
d.RefDatabase.UpdateDate()
}
} }
// SQLName returns the schema name in lowercase for SQL compatibility. // SQLName returns the schema name in lowercase for SQL compatibility.
@@ -71,6 +122,16 @@ type Table struct {
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"` Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
UpdatedAt string `json:"updatedat,omitempty" yaml:"updatedat,omitempty" xml:"updatedat,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
}
// UpdateDate sets the UpdatedAt field to the current time in RFC3339 format.
func (d *Table) UpdateDate() {
d.UpdatedAt = time.Now().Format(time.RFC3339)
if d.RefSchema != nil {
d.RefSchema.UpdateDate()
}
} }
// SQLName returns the table name in lowercase for SQL compatibility. // SQLName returns the table name in lowercase for SQL compatibility.
@@ -111,6 +172,7 @@ type View struct {
Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"` Metadata map[string]any `json:"metadata,omitempty" yaml:"metadata,omitempty" xml:"-"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the view name in lowercase for SQL compatibility. // SQLName returns the view name in lowercase for SQL compatibility.
@@ -134,6 +196,7 @@ type Sequence struct {
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"` Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references RefSchema *Schema `json:"-" yaml:"-" xml:"-"` // Excluded to prevent circular references
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the sequence name in lowercase for SQL compatibility. // SQLName returns the sequence name in lowercase for SQL compatibility.
@@ -158,6 +221,7 @@ type Column struct {
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"` Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
Collation string `json:"collation,omitempty" yaml:"collation,omitempty" xml:"collation,omitempty"` Collation string `json:"collation,omitempty" yaml:"collation,omitempty" xml:"collation,omitempty"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the column name in lowercase for SQL compatibility. // SQLName returns the column name in lowercase for SQL compatibility.
@@ -180,6 +244,7 @@ type Index struct {
Include []string `json:"include,omitempty" yaml:"include,omitempty" xml:"include,omitempty"` // INCLUDE columns Include []string `json:"include,omitempty" yaml:"include,omitempty" xml:"include,omitempty"` // INCLUDE columns
Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"` Comment string `json:"comment,omitempty" yaml:"comment,omitempty" xml:"comment,omitempty"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the index name in lowercase for SQL compatibility. // SQLName returns the index name in lowercase for SQL compatibility.
@@ -214,6 +279,7 @@ type Relationship struct {
ThroughSchema string `json:"through_schema,omitempty" yaml:"through_schema,omitempty" xml:"through_schema,omitempty"` ThroughSchema string `json:"through_schema,omitempty" yaml:"through_schema,omitempty" xml:"through_schema,omitempty"`
Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"` Description string `json:"description,omitempty" yaml:"description,omitempty" xml:"description,omitempty"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the relationship name in lowercase for SQL compatibility. // SQLName returns the relationship name in lowercase for SQL compatibility.
@@ -238,6 +304,7 @@ type Constraint struct {
Deferrable bool `json:"deferrable,omitempty" yaml:"deferrable,omitempty" xml:"deferrable,omitempty"` Deferrable bool `json:"deferrable,omitempty" yaml:"deferrable,omitempty" xml:"deferrable,omitempty"`
InitiallyDeferred bool `json:"initially_deferred,omitempty" yaml:"initially_deferred,omitempty" xml:"initially_deferred,omitempty"` InitiallyDeferred bool `json:"initially_deferred,omitempty" yaml:"initially_deferred,omitempty" xml:"initially_deferred,omitempty"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the constraint name in lowercase for SQL compatibility. // SQLName returns the constraint name in lowercase for SQL compatibility.
@@ -253,6 +320,7 @@ type Enum struct {
Name string `json:"name" yaml:"name" xml:"name"` Name string `json:"name" yaml:"name" xml:"name"`
Values []string `json:"values" yaml:"values" xml:"values"` Values []string `json:"values" yaml:"values" xml:"values"`
Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"` Schema string `json:"schema,omitempty" yaml:"schema,omitempty" xml:"schema,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the enum name in lowercase for SQL compatibility. // SQLName returns the enum name in lowercase for SQL compatibility.
@@ -260,6 +328,16 @@ func (d *Enum) SQLName() string {
return strings.ToLower(d.Name) return strings.ToLower(d.Name)
} }
// InitEnum initializes a new Enum with empty values slice
func InitEnum(name, schema string) *Enum {
return &Enum{
Name: name,
Schema: schema,
Values: make([]string, 0),
GUID: uuid.New().String(),
}
}
// Supported constraint types. // Supported constraint types.
const ( const (
PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record PrimaryKeyConstraint ConstraintType = "primary_key" // Primary key uniquely identifies each record
@@ -281,6 +359,7 @@ type Script struct {
Version string `json:"version,omitempty" yaml:"version,omitempty" xml:"version,omitempty"` Version string `json:"version,omitempty" yaml:"version,omitempty" xml:"version,omitempty"`
Priority int `json:"priority,omitempty" yaml:"priority,omitempty" xml:"priority,omitempty"` Priority int `json:"priority,omitempty" yaml:"priority,omitempty" xml:"priority,omitempty"`
Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"` Sequence uint `json:"sequence,omitempty" yaml:"sequence,omitempty" xml:"sequence,omitempty"`
GUID string `json:"guid" yaml:"guid" xml:"guid"`
} }
// SQLName returns the script name in lowercase for SQL compatibility. // SQLName returns the script name in lowercase for SQL compatibility.
@@ -295,6 +374,8 @@ func InitDatabase(name string) *Database {
return &Database{ return &Database{
Name: name, Name: name,
Schemas: make([]*Schema, 0), Schemas: make([]*Schema, 0),
Domains: make([]*Domain, 0),
GUID: uuid.New().String(),
} }
} }
@@ -308,6 +389,7 @@ func InitSchema(name string) *Schema {
Permissions: make(map[string]string), Permissions: make(map[string]string),
Metadata: make(map[string]any), Metadata: make(map[string]any),
Scripts: make([]*Script, 0), Scripts: make([]*Script, 0),
GUID: uuid.New().String(),
} }
} }
@@ -321,6 +403,7 @@ func InitTable(name, schema string) *Table {
Indexes: make(map[string]*Index), Indexes: make(map[string]*Index),
Relationships: make(map[string]*Relationship), Relationships: make(map[string]*Relationship),
Metadata: make(map[string]any), Metadata: make(map[string]any),
GUID: uuid.New().String(),
} }
} }
@@ -330,6 +413,7 @@ func InitColumn(name, table, schema string) *Column {
Name: name, Name: name,
Table: table, Table: table,
Schema: schema, Schema: schema,
GUID: uuid.New().String(),
} }
} }
@@ -341,6 +425,7 @@ func InitIndex(name, table, schema string) *Index {
Schema: schema, Schema: schema,
Columns: make([]string, 0), Columns: make([]string, 0),
Include: make([]string, 0), Include: make([]string, 0),
GUID: uuid.New().String(),
} }
} }
@@ -353,6 +438,7 @@ func InitRelation(name, schema string) *Relationship {
Properties: make(map[string]string), Properties: make(map[string]string),
FromColumns: make([]string, 0), FromColumns: make([]string, 0),
ToColumns: make([]string, 0), ToColumns: make([]string, 0),
GUID: uuid.New().String(),
} }
} }
@@ -362,6 +448,7 @@ func InitRelationship(name string, relType RelationType) *Relationship {
Name: name, Name: name,
Type: relType, Type: relType,
Properties: make(map[string]string), Properties: make(map[string]string),
GUID: uuid.New().String(),
} }
} }
@@ -372,6 +459,7 @@ func InitConstraint(name string, constraintType ConstraintType) *Constraint {
Type: constraintType, Type: constraintType,
Columns: make([]string, 0), Columns: make([]string, 0),
ReferencedColumns: make([]string, 0), ReferencedColumns: make([]string, 0),
GUID: uuid.New().String(),
} }
} }
@@ -380,6 +468,7 @@ func InitScript(name string) *Script {
return &Script{ return &Script{
Name: name, Name: name,
RunAfter: make([]string, 0), RunAfter: make([]string, 0),
GUID: uuid.New().String(),
} }
} }
@@ -390,6 +479,7 @@ func InitView(name, schema string) *View {
Schema: schema, Schema: schema,
Columns: make(map[string]*Column), Columns: make(map[string]*Column),
Metadata: make(map[string]any), Metadata: make(map[string]any),
GUID: uuid.New().String(),
} }
} }
@@ -400,5 +490,25 @@ func InitSequence(name, schema string) *Sequence {
Schema: schema, Schema: schema,
IncrementBy: 1, IncrementBy: 1,
StartValue: 1, StartValue: 1,
GUID: uuid.New().String(),
}
}
// InitDomain initializes a new Domain with empty slices and maps
func InitDomain(name string) *Domain {
return &Domain{
Name: name,
Tables: make([]*DomainTable, 0),
Metadata: make(map[string]any),
GUID: uuid.New().String(),
}
}
// InitDomainTable initializes a new DomainTable reference
func InitDomainTable(tableName, schemaName string) *DomainTable {
return &DomainTable{
TableName: tableName,
SchemaName: schemaName,
GUID: uuid.New().String(),
} }
} }

282
pkg/models/sorting.go Normal file
View File

@@ -0,0 +1,282 @@
package models
import (
"sort"
"strings"
)
// SortOrder represents the sort direction
type SortOrder bool
const (
// Ascending sort order
Ascending SortOrder = false
// Descending sort order
Descending SortOrder = true
)
// Schema Sorting
// SortSchemasByName sorts schemas by name
func SortSchemasByName(schemas []*Schema, desc bool) error {
sort.SliceStable(schemas, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(schemas[i].Name), strings.ToLower(schemas[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// SortSchemasBySequence sorts schemas by sequence number
func SortSchemasBySequence(schemas []*Schema, desc bool) error {
sort.SliceStable(schemas, func(i, j int) bool {
if desc {
return schemas[i].Sequence > schemas[j].Sequence
}
return schemas[i].Sequence < schemas[j].Sequence
})
return nil
}
// Table Sorting
// SortTablesByName sorts tables by name
func SortTablesByName(tables []*Table, desc bool) error {
sort.SliceStable(tables, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(tables[i].Name), strings.ToLower(tables[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// SortTablesBySequence sorts tables by sequence number
func SortTablesBySequence(tables []*Table, desc bool) error {
sort.SliceStable(tables, func(i, j int) bool {
if desc {
return tables[i].Sequence > tables[j].Sequence
}
return tables[i].Sequence < tables[j].Sequence
})
return nil
}
// Column Sorting
// SortColumnsMapByName converts column map to sorted slice by name
func SortColumnsMapByName(columns map[string]*Column, desc bool) []*Column {
result := make([]*Column, 0, len(columns))
for _, col := range columns {
result = append(result, col)
}
_ = SortColumnsByName(result, desc)
return result
}
// SortColumnsMapBySequence converts column map to sorted slice by sequence
func SortColumnsMapBySequence(columns map[string]*Column, desc bool) []*Column {
result := make([]*Column, 0, len(columns))
for _, col := range columns {
result = append(result, col)
}
_ = SortColumnsBySequence(result, desc)
return result
}
// SortColumnsByName sorts columns by name
func SortColumnsByName(columns []*Column, desc bool) error {
sort.SliceStable(columns, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(columns[i].Name), strings.ToLower(columns[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// SortColumnsBySequence sorts columns by sequence number
func SortColumnsBySequence(columns []*Column, desc bool) error {
sort.SliceStable(columns, func(i, j int) bool {
if desc {
return columns[i].Sequence > columns[j].Sequence
}
return columns[i].Sequence < columns[j].Sequence
})
return nil
}
// View Sorting
// SortViewsByName sorts views by name
func SortViewsByName(views []*View, desc bool) error {
sort.SliceStable(views, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(views[i].Name), strings.ToLower(views[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// SortViewsBySequence sorts views by sequence number
func SortViewsBySequence(views []*View, desc bool) error {
sort.SliceStable(views, func(i, j int) bool {
if desc {
return views[i].Sequence > views[j].Sequence
}
return views[i].Sequence < views[j].Sequence
})
return nil
}
// Sequence Sorting
// SortSequencesByName sorts sequences by name
func SortSequencesByName(sequences []*Sequence, desc bool) error {
sort.SliceStable(sequences, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(sequences[i].Name), strings.ToLower(sequences[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// SortSequencesBySequence sorts sequences by sequence number
func SortSequencesBySequence(sequences []*Sequence, desc bool) error {
sort.SliceStable(sequences, func(i, j int) bool {
if desc {
return sequences[i].Sequence > sequences[j].Sequence
}
return sequences[i].Sequence < sequences[j].Sequence
})
return nil
}
// Index Sorting
// SortIndexesMapByName converts index map to sorted slice by name
func SortIndexesMapByName(indexes map[string]*Index, desc bool) []*Index {
result := make([]*Index, 0, len(indexes))
for _, idx := range indexes {
result = append(result, idx)
}
_ = SortIndexesByName(result, desc)
return result
}
// SortIndexesMapBySequence converts index map to sorted slice by sequence
func SortIndexesMapBySequence(indexes map[string]*Index, desc bool) []*Index {
result := make([]*Index, 0, len(indexes))
for _, idx := range indexes {
result = append(result, idx)
}
_ = SortIndexesBySequence(result, desc)
return result
}
// SortIndexesByName sorts indexes by name
func SortIndexesByName(indexes []*Index, desc bool) error {
sort.SliceStable(indexes, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(indexes[i].Name), strings.ToLower(indexes[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// SortIndexesBySequence sorts indexes by sequence number
func SortIndexesBySequence(indexes []*Index, desc bool) error {
sort.SliceStable(indexes, func(i, j int) bool {
if desc {
return indexes[i].Sequence > indexes[j].Sequence
}
return indexes[i].Sequence < indexes[j].Sequence
})
return nil
}
// Constraint Sorting
// SortConstraintsMapByName converts constraint map to sorted slice by name
func SortConstraintsMapByName(constraints map[string]*Constraint, desc bool) []*Constraint {
result := make([]*Constraint, 0, len(constraints))
for _, c := range constraints {
result = append(result, c)
}
_ = SortConstraintsByName(result, desc)
return result
}
// SortConstraintsByName sorts constraints by name
func SortConstraintsByName(constraints []*Constraint, desc bool) error {
sort.SliceStable(constraints, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(constraints[i].Name), strings.ToLower(constraints[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// Relationship Sorting
// SortRelationshipsMapByName converts relationship map to sorted slice by name
func SortRelationshipsMapByName(relationships map[string]*Relationship, desc bool) []*Relationship {
result := make([]*Relationship, 0, len(relationships))
for _, r := range relationships {
result = append(result, r)
}
_ = SortRelationshipsByName(result, desc)
return result
}
// SortRelationshipsByName sorts relationships by name
func SortRelationshipsByName(relationships []*Relationship, desc bool) error {
sort.SliceStable(relationships, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(relationships[i].Name), strings.ToLower(relationships[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// Script Sorting
// SortScriptsByName sorts scripts by name
func SortScriptsByName(scripts []*Script, desc bool) error {
sort.SliceStable(scripts, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(scripts[i].Name), strings.ToLower(scripts[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}
// Enum Sorting
// SortEnumsByName sorts enums by name
func SortEnumsByName(enums []*Enum, desc bool) error {
sort.SliceStable(enums, func(i, j int) bool {
cmp := strings.Compare(strings.ToLower(enums[i].Name), strings.ToLower(enums[j].Name))
if desc {
return cmp > 0
}
return cmp < 0
})
return nil
}

99
pkg/mssql/README.md Normal file
View File

@@ -0,0 +1,99 @@
# MSSQL Package
Provides utilities for working with Microsoft SQL Server data types and conversions.
## Components
### Type Mapping
Provides bidirectional conversion between canonical types and MSSQL types:
- **CanonicalToMSSQL**: Convert abstract types to MSSQL-specific types
- **MSSQLToCanonical**: Convert MSSQL types to abstract representation
## Type Conversion Tables
### Canonical → MSSQL
| Canonical | MSSQL | Notes |
|-----------|-------|-------|
| int | INT | 32-bit signed integer |
| int64 | BIGINT | 64-bit signed integer |
| int32 | INT | 32-bit signed integer |
| int16 | SMALLINT | 16-bit signed integer |
| int8 | TINYINT | 8-bit unsigned integer |
| bool | BIT | 0 (false) or 1 (true) |
| float32 | REAL | Single precision floating point |
| float64 | FLOAT | Double precision floating point |
| decimal | NUMERIC | Fixed-point decimal number |
| string | NVARCHAR(255) | Unicode variable-length string |
| text | NVARCHAR(MAX) | Unicode large text |
| timestamp | DATETIME2 | Date and time without timezone |
| timestamptz | DATETIMEOFFSET | Date and time with timezone offset |
| uuid | UNIQUEIDENTIFIER | GUID/UUID type |
| bytea | VARBINARY(MAX) | Variable-length binary data |
| date | DATE | Date only |
| time | TIME | Time only |
| json | NVARCHAR(MAX) | Stored as text (MSSQL v2016+) |
| jsonb | NVARCHAR(MAX) | Stored as text (MSSQL v2016+) |
### MSSQL → Canonical
| MSSQL | Canonical | Notes |
|-------|-----------|-------|
| INT, INTEGER | int | Standard integer |
| BIGINT | int64 | Large integer |
| SMALLINT | int16 | Small integer |
| TINYINT | int8 | Tiny integer |
| BIT | bool | Boolean/bit flag |
| REAL | float32 | Single precision |
| FLOAT | float64 | Double precision |
| NUMERIC, DECIMAL | decimal | Exact decimal |
| NVARCHAR, VARCHAR | string | Variable-length string |
| NCHAR, CHAR | string | Fixed-length string |
| DATETIME2 | timestamp | Default timestamp |
| DATETIMEOFFSET | timestamptz | Timestamp with timezone |
| DATE | date | Date only |
| TIME | time | Time only |
| UNIQUEIDENTIFIER | uuid | UUID/GUID |
| VARBINARY, BINARY | bytea | Binary data |
| XML | string | Stored as text |
## Usage
```go
package main
import (
"fmt"
"git.warky.dev/wdevs/relspecgo/pkg/mssql"
)
func main() {
// Convert canonical to MSSQL
mssqlType := mssql.ConvertCanonicalToMSSQL("int")
fmt.Println(mssqlType) // Output: INT
// Convert MSSQL to canonical
canonicalType := mssql.ConvertMSSQLToCanonical("BIGINT")
fmt.Println(canonicalType) // Output: int64
// Handle parameterized types
canonicalType = mssql.ConvertMSSQLToCanonical("NVARCHAR(255)")
fmt.Println(canonicalType) // Output: string
}
```
## Testing
Run tests with:
```bash
go test ./pkg/mssql/...
```
## Notes
- Type conversions are case-insensitive
- Parameterized types (e.g., `NVARCHAR(255)`) have their base type extracted
- Unmapped types default to `string` for safety
- The package supports SQL Server 2016 and later versions

114
pkg/mssql/datatypes.go Normal file
View File

@@ -0,0 +1,114 @@
package mssql
import "strings"
// CanonicalToMSSQLTypes maps canonical types to MSSQL types
var CanonicalToMSSQLTypes = map[string]string{
"bool": "BIT",
"int8": "TINYINT",
"int16": "SMALLINT",
"int": "INT",
"int32": "INT",
"int64": "BIGINT",
"uint": "BIGINT",
"uint8": "SMALLINT",
"uint16": "INT",
"uint32": "BIGINT",
"uint64": "BIGINT",
"float32": "REAL",
"float64": "FLOAT",
"decimal": "NUMERIC",
"string": "NVARCHAR(255)",
"text": "NVARCHAR(MAX)",
"date": "DATE",
"time": "TIME",
"timestamp": "DATETIME2",
"timestamptz": "DATETIMEOFFSET",
"uuid": "UNIQUEIDENTIFIER",
"json": "NVARCHAR(MAX)",
"jsonb": "NVARCHAR(MAX)",
"bytea": "VARBINARY(MAX)",
}
// MSSQLToCanonicalTypes maps MSSQL types to canonical types
var MSSQLToCanonicalTypes = map[string]string{
"bit": "bool",
"tinyint": "int8",
"smallint": "int16",
"int": "int",
"integer": "int",
"bigint": "int64",
"real": "float32",
"float": "float64",
"numeric": "decimal",
"decimal": "decimal",
"money": "decimal",
"smallmoney": "decimal",
"nvarchar": "string",
"nchar": "string",
"varchar": "string",
"char": "string",
"text": "string",
"ntext": "string",
"date": "date",
"time": "time",
"datetime": "timestamp",
"datetime2": "timestamp",
"smalldatetime": "timestamp",
"datetimeoffset": "timestamptz",
"uniqueidentifier": "uuid",
"varbinary": "bytea",
"binary": "bytea",
"image": "bytea",
"xml": "string",
"json": "json",
"sql_variant": "string",
"hierarchyid": "string",
"geography": "string",
"geometry": "string",
}
// ConvertCanonicalToMSSQL converts a canonical type to MSSQL type
func ConvertCanonicalToMSSQL(canonicalType string) string {
// Check direct mapping
if mssqlType, exists := CanonicalToMSSQLTypes[strings.ToLower(canonicalType)]; exists {
return mssqlType
}
// Try to find by prefix
lowerType := strings.ToLower(canonicalType)
for canonical, mssql := range CanonicalToMSSQLTypes {
if strings.HasPrefix(lowerType, canonical) {
return mssql
}
}
// Default to NVARCHAR
return "NVARCHAR(255)"
}
// ConvertMSSQLToCanonical converts an MSSQL type to canonical type
func ConvertMSSQLToCanonical(mssqlType string) string {
// Extract base type (remove parentheses and parameters)
baseType := mssqlType
if idx := strings.Index(baseType, "("); idx != -1 {
baseType = baseType[:idx]
}
baseType = strings.TrimSpace(baseType)
// Check direct mapping
if canonicalType, exists := MSSQLToCanonicalTypes[strings.ToLower(baseType)]; exists {
return canonicalType
}
// Try to find by prefix
lowerType := strings.ToLower(baseType)
for mssql, canonical := range MSSQLToCanonicalTypes {
if strings.HasPrefix(lowerType, mssql) {
return canonical
}
}
// Default to string
return "string"
}

View File

@@ -4,31 +4,31 @@ import "strings"
var GoToStdTypes = map[string]string{ var GoToStdTypes = map[string]string{
"bool": "boolean", "bool": "boolean",
"int64": "integer", "int64": "bigint",
"int": "integer", "int": "integer",
"int8": "integer", "int8": "smallint",
"int16": "integer", "int16": "smallint",
"int32": "integer", "int32": "integer",
"uint": "integer", "uint": "integer",
"uint8": "integer", "uint8": "smallint",
"uint16": "integer", "uint16": "smallint",
"uint32": "integer", "uint32": "integer",
"uint64": "integer", "uint64": "bigint",
"uintptr": "integer", "uintptr": "bigint",
"znullint64": "integer", "znullint64": "bigint",
"znullint32": "integer", "znullint32": "integer",
"znullbyte": "integer", "znullbyte": "smallint",
"float64": "double", "float64": "double",
"float32": "double", "float32": "double",
"complex64": "double", "complex64": "double",
"complex128": "double", "complex128": "double",
"customfloat64": "double", "customfloat64": "double",
"string": "string", "string": "text",
"Pointer": "integer", "Pointer": "bigint",
"[]byte": "blob", "[]byte": "blob",
"customdate": "string", "customdate": "date",
"customtime": "string", "customtime": "time",
"customtimestamp": "string", "customtimestamp": "timestamp",
"sqlfloat64": "double", "sqlfloat64": "double",
"sqlfloat16": "double", "sqlfloat16": "double",
"sqluuid": "uuid", "sqluuid": "uuid",
@@ -36,9 +36,9 @@ var GoToStdTypes = map[string]string{
"sqljson": "json", "sqljson": "json",
"sqlint64": "bigint", "sqlint64": "bigint",
"sqlint32": "integer", "sqlint32": "integer",
"sqlint16": "integer", "sqlint16": "smallint",
"sqlbool": "boolean", "sqlbool": "boolean",
"sqlstring": "string", "sqlstring": "text",
"nullablejsonb": "jsonb", "nullablejsonb": "jsonb",
"nullablejson": "json", "nullablejson": "json",
"nullableuuid": "uuid", "nullableuuid": "uuid",
@@ -67,7 +67,7 @@ var GoToPGSQLTypes = map[string]string{
"float32": "real", "float32": "real",
"complex64": "double precision", "complex64": "double precision",
"complex128": "double precision", "complex128": "double precision",
"customfloat64": "double precisio", "customfloat64": "double precision",
"string": "text", "string": "text",
"Pointer": "bigint", "Pointer": "bigint",
"[]byte": "bytea", "[]byte": "bytea",
@@ -81,9 +81,9 @@ var GoToPGSQLTypes = map[string]string{
"sqljson": "json", "sqljson": "json",
"sqlint64": "bigint", "sqlint64": "bigint",
"sqlint32": "integer", "sqlint32": "integer",
"sqlint16": "integer", "sqlint16": "smallint",
"sqlbool": "boolean", "sqlbool": "boolean",
"sqlstring": "string", "sqlstring": "text",
"nullablejsonb": "jsonb", "nullablejsonb": "jsonb",
"nullablejson": "json", "nullablejson": "json",
"nullableuuid": "uuid", "nullableuuid": "uuid",

339
pkg/pgsql/datatypes_test.go Normal file
View File

@@ -0,0 +1,339 @@
package pgsql
import (
"testing"
)
func TestValidSQLType(t *testing.T) {
tests := []struct {
name string
sqltype string
want bool
}{
// PostgreSQL types
{"Valid PGSQL bigint", "bigint", true},
{"Valid PGSQL integer", "integer", true},
{"Valid PGSQL text", "text", true},
{"Valid PGSQL boolean", "boolean", true},
{"Valid PGSQL double precision", "double precision", true},
{"Valid PGSQL bytea", "bytea", true},
{"Valid PGSQL uuid", "uuid", true},
{"Valid PGSQL jsonb", "jsonb", true},
{"Valid PGSQL json", "json", true},
{"Valid PGSQL timestamp", "timestamp", true},
{"Valid PGSQL date", "date", true},
{"Valid PGSQL time", "time", true},
{"Valid PGSQL citext", "citext", true},
// Standard types
{"Valid std double", "double", true},
{"Valid std blob", "blob", true},
// Case insensitive
{"Case insensitive BIGINT", "BIGINT", true},
{"Case insensitive TeXt", "TeXt", true},
{"Case insensitive BoOlEaN", "BoOlEaN", true},
// Invalid types
{"Invalid type", "invalidtype", false},
{"Invalid type varchar", "varchar", false},
{"Empty string", "", false},
{"Random string", "foobar", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := ValidSQLType(tt.sqltype)
if got != tt.want {
t.Errorf("ValidSQLType(%q) = %v, want %v", tt.sqltype, got, tt.want)
}
})
}
}
func TestGetSQLType(t *testing.T) {
tests := []struct {
name string
anytype string
want string
}{
// Go types to PostgreSQL types
{"Go bool to boolean", "bool", "boolean"},
{"Go int64 to bigint", "int64", "bigint"},
{"Go int to integer", "int", "integer"},
{"Go string to text", "string", "text"},
{"Go float64 to double precision", "float64", "double precision"},
{"Go float32 to real", "float32", "real"},
{"Go []byte to bytea", "[]byte", "bytea"},
// SQL types remain SQL types
{"SQL bigint", "bigint", "bigint"},
{"SQL integer", "integer", "integer"},
{"SQL text", "text", "text"},
{"SQL boolean", "boolean", "boolean"},
{"SQL uuid", "uuid", "uuid"},
{"SQL jsonb", "jsonb", "jsonb"},
// Case insensitive Go types
{"Case insensitive BOOL", "BOOL", "boolean"},
{"Case insensitive InT64", "InT64", "bigint"},
{"Case insensitive STRING", "STRING", "text"},
// Case insensitive SQL types
{"Case insensitive BIGINT", "BIGINT", "bigint"},
{"Case insensitive TEXT", "TEXT", "text"},
// Custom types
{"Custom sqluuid", "sqluuid", "uuid"},
{"Custom sqljsonb", "sqljsonb", "jsonb"},
{"Custom sqlint64", "sqlint64", "bigint"},
// Unknown types default to text
{"Unknown type varchar", "varchar", "text"},
{"Unknown type foobar", "foobar", "text"},
{"Empty string", "", "text"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := GetSQLType(tt.anytype)
if got != tt.want {
t.Errorf("GetSQLType(%q) = %q, want %q", tt.anytype, got, tt.want)
}
})
}
}
func TestConvertSQLType(t *testing.T) {
tests := []struct {
name string
anytype string
want string
}{
// Go types to PostgreSQL types
{"Go bool to boolean", "bool", "boolean"},
{"Go int64 to bigint", "int64", "bigint"},
{"Go int to integer", "int", "integer"},
{"Go string to text", "string", "text"},
{"Go float64 to double precision", "float64", "double precision"},
{"Go float32 to real", "float32", "real"},
{"Go []byte to bytea", "[]byte", "bytea"},
// SQL types remain SQL types
{"SQL bigint", "bigint", "bigint"},
{"SQL integer", "integer", "integer"},
{"SQL text", "text", "text"},
{"SQL boolean", "boolean", "boolean"},
// Case insensitive
{"Case insensitive BOOL", "BOOL", "boolean"},
{"Case insensitive InT64", "InT64", "bigint"},
// Unknown types remain unchanged (difference from GetSQLType)
{"Unknown type varchar", "varchar", "varchar"},
{"Unknown type foobar", "foobar", "foobar"},
{"Empty string", "", ""},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := ConvertSQLType(tt.anytype)
if got != tt.want {
t.Errorf("ConvertSQLType(%q) = %q, want %q", tt.anytype, got, tt.want)
}
})
}
}
func TestIsGoType(t *testing.T) {
tests := []struct {
name string
typeName string
want bool
}{
// Go basic types
{"Go bool", "bool", true},
{"Go int64", "int64", true},
{"Go int", "int", true},
{"Go int32", "int32", true},
{"Go int16", "int16", true},
{"Go int8", "int8", true},
{"Go uint", "uint", true},
{"Go uint64", "uint64", true},
{"Go uint32", "uint32", true},
{"Go uint16", "uint16", true},
{"Go uint8", "uint8", true},
{"Go float64", "float64", true},
{"Go float32", "float32", true},
{"Go string", "string", true},
{"Go []byte", "[]byte", true},
// Go custom types
{"Go complex64", "complex64", true},
{"Go complex128", "complex128", true},
{"Go uintptr", "uintptr", true},
{"Go Pointer", "Pointer", true},
// Custom SQL types
{"Custom sqluuid", "sqluuid", true},
{"Custom sqljsonb", "sqljsonb", true},
{"Custom sqlint64", "sqlint64", true},
{"Custom customdate", "customdate", true},
{"Custom customtime", "customtime", true},
// Case insensitive
{"Case insensitive BOOL", "BOOL", true},
{"Case insensitive InT64", "InT64", true},
{"Case insensitive STRING", "STRING", true},
// SQL types (not Go types)
{"SQL bigint", "bigint", false},
{"SQL integer", "integer", false},
{"SQL text", "text", false},
{"SQL boolean", "boolean", false},
// Invalid types
{"Invalid type", "invalidtype", false},
{"Empty string", "", false},
{"Random string", "foobar", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := IsGoType(tt.typeName)
if got != tt.want {
t.Errorf("IsGoType(%q) = %v, want %v", tt.typeName, got, tt.want)
}
})
}
}
func TestGetStdTypeFromGo(t *testing.T) {
tests := []struct {
name string
typeName string
want string
}{
// Go types to standard SQL types
{"Go bool to boolean", "bool", "boolean"},
{"Go int64 to bigint", "int64", "bigint"},
{"Go int to integer", "int", "integer"},
{"Go string to text", "string", "text"},
{"Go float64 to double", "float64", "double"},
{"Go float32 to double", "float32", "double"},
{"Go []byte to blob", "[]byte", "blob"},
{"Go int32 to integer", "int32", "integer"},
{"Go int16 to smallint", "int16", "smallint"},
// Custom types
{"Custom sqluuid to uuid", "sqluuid", "uuid"},
{"Custom sqljsonb to jsonb", "sqljsonb", "jsonb"},
{"Custom sqlint64 to bigint", "sqlint64", "bigint"},
{"Custom customdate to date", "customdate", "date"},
// Case insensitive
{"Case insensitive BOOL", "BOOL", "boolean"},
{"Case insensitive InT64", "InT64", "bigint"},
{"Case insensitive STRING", "STRING", "text"},
// Non-Go types remain unchanged
{"SQL bigint unchanged", "bigint", "bigint"},
{"SQL integer unchanged", "integer", "integer"},
{"Invalid type unchanged", "invalidtype", "invalidtype"},
{"Empty string unchanged", "", ""},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := GetStdTypeFromGo(tt.typeName)
if got != tt.want {
t.Errorf("GetStdTypeFromGo(%q) = %q, want %q", tt.typeName, got, tt.want)
}
})
}
}
func TestGoToStdTypesMap(t *testing.T) {
// Test that the map contains expected entries
expectedMappings := map[string]string{
"bool": "boolean",
"int64": "bigint",
"int": "integer",
"string": "text",
"float64": "double",
"[]byte": "blob",
}
for goType, expectedStd := range expectedMappings {
if stdType, ok := GoToStdTypes[goType]; !ok {
t.Errorf("GoToStdTypes missing entry for %q", goType)
} else if stdType != expectedStd {
t.Errorf("GoToStdTypes[%q] = %q, want %q", goType, stdType, expectedStd)
}
}
// Test that the map is not empty
if len(GoToStdTypes) == 0 {
t.Error("GoToStdTypes map is empty")
}
}
func TestGoToPGSQLTypesMap(t *testing.T) {
// Test that the map contains expected entries
expectedMappings := map[string]string{
"bool": "boolean",
"int64": "bigint",
"int": "integer",
"string": "text",
"float64": "double precision",
"float32": "real",
"[]byte": "bytea",
}
for goType, expectedPG := range expectedMappings {
if pgType, ok := GoToPGSQLTypes[goType]; !ok {
t.Errorf("GoToPGSQLTypes missing entry for %q", goType)
} else if pgType != expectedPG {
t.Errorf("GoToPGSQLTypes[%q] = %q, want %q", goType, pgType, expectedPG)
}
}
// Test that the map is not empty
if len(GoToPGSQLTypes) == 0 {
t.Error("GoToPGSQLTypes map is empty")
}
}
func TestTypeConversionConsistency(t *testing.T) {
// Test that GetSQLType and ConvertSQLType are consistent for known types
knownGoTypes := []string{"bool", "int64", "int", "string", "float64", "[]byte"}
for _, goType := range knownGoTypes {
getSQLResult := GetSQLType(goType)
convertResult := ConvertSQLType(goType)
if getSQLResult != convertResult {
t.Errorf("Inconsistent results for %q: GetSQLType=%q, ConvertSQLType=%q",
goType, getSQLResult, convertResult)
}
}
}
func TestGetSQLTypeVsConvertSQLTypeDifference(t *testing.T) {
// Test that GetSQLType returns "text" for unknown types
// while ConvertSQLType returns the input unchanged
unknownTypes := []string{"varchar", "char", "customtype", "unknowntype"}
for _, unknown := range unknownTypes {
getSQLResult := GetSQLType(unknown)
convertResult := ConvertSQLType(unknown)
if getSQLResult != "text" {
t.Errorf("GetSQLType(%q) = %q, want %q", unknown, getSQLResult, "text")
}
if convertResult != unknown {
t.Errorf("ConvertSQLType(%q) = %q, want %q", unknown, convertResult, unknown)
}
}
}

36
pkg/pgsql/doc.go Normal file
View File

@@ -0,0 +1,36 @@
// Package pgsql provides PostgreSQL-specific utilities and helpers.
//
// # Overview
//
// The pgsql package contains PostgreSQL-specific functionality including:
// - SQL reserved keyword validation
// - Data type mappings and conversions
// - PostgreSQL-specific schema introspection helpers
//
// # Components
//
// keywords.go - SQL reserved keywords validation
//
// Provides functions to check if identifiers conflict with SQL reserved words
// and need quoting for safe usage in PostgreSQL queries.
//
// datatypes.go - PostgreSQL data type utilities
//
// Contains mappings between PostgreSQL data types and their equivalents in other
// systems, as well as type conversion and normalization functions.
//
// # Usage
//
// // Check if identifier needs quoting
// if pgsql.IsReservedKeyword("user") {
// // Quote the identifier
// }
//
// // Normalize data type
// normalizedType := pgsql.NormalizeDataType("varchar(255)")
//
// # Purpose
//
// This package supports the PostgreSQL reader and writer implementations by providing
// shared utilities for handling PostgreSQL-specific schema elements and constraints.
package pgsql

136
pkg/pgsql/keywords_test.go Normal file
View File

@@ -0,0 +1,136 @@
package pgsql
import (
"testing"
)
func TestGetPostgresKeywords(t *testing.T) {
keywords := GetPostgresKeywords()
// Test that keywords are returned
if len(keywords) == 0 {
t.Fatal("Expected non-empty list of keywords")
}
// Test that we get all keywords from the map
expectedCount := len(postgresKeywords)
if len(keywords) != expectedCount {
t.Errorf("Expected %d keywords, got %d", expectedCount, len(keywords))
}
// Test that all returned keywords exist in the map
for _, keyword := range keywords {
if !postgresKeywords[keyword] {
t.Errorf("Keyword %q not found in postgresKeywords map", keyword)
}
}
// Test that no duplicate keywords are returned
seen := make(map[string]bool)
for _, keyword := range keywords {
if seen[keyword] {
t.Errorf("Duplicate keyword found: %q", keyword)
}
seen[keyword] = true
}
}
func TestPostgresKeywordsMap(t *testing.T) {
tests := []struct {
name string
keyword string
want bool
}{
{"SELECT keyword", "select", true},
{"FROM keyword", "from", true},
{"WHERE keyword", "where", true},
{"TABLE keyword", "table", true},
{"PRIMARY keyword", "primary", true},
{"FOREIGN keyword", "foreign", true},
{"CREATE keyword", "create", true},
{"DROP keyword", "drop", true},
{"ALTER keyword", "alter", true},
{"INDEX keyword", "index", true},
{"NOT keyword", "not", true},
{"NULL keyword", "null", true},
{"TRUE keyword", "true", true},
{"FALSE keyword", "false", true},
{"Non-keyword lowercase", "notakeyword", false},
{"Non-keyword uppercase", "NOTAKEYWORD", false},
{"Empty string", "", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := postgresKeywords[tt.keyword]
if got != tt.want {
t.Errorf("postgresKeywords[%q] = %v, want %v", tt.keyword, got, tt.want)
}
})
}
}
func TestPostgresKeywordsMapContent(t *testing.T) {
// Test that the map contains expected common keywords
commonKeywords := []string{
"select", "insert", "update", "delete", "create", "drop", "alter",
"table", "index", "view", "schema", "function", "procedure",
"primary", "foreign", "key", "constraint", "unique", "check",
"null", "not", "and", "or", "like", "in", "between",
"join", "inner", "left", "right", "cross", "full", "outer",
"where", "having", "group", "order", "limit", "offset",
"union", "intersect", "except",
"begin", "commit", "rollback", "transaction",
}
for _, keyword := range commonKeywords {
if !postgresKeywords[keyword] {
t.Errorf("Expected common keyword %q to be in postgresKeywords map", keyword)
}
}
}
func TestPostgresKeywordsMapSize(t *testing.T) {
// PostgreSQL has a substantial list of reserved keywords
// This test ensures the map has a reasonable number of entries
minExpectedKeywords := 200 // PostgreSQL 13+ has 400+ reserved words
if len(postgresKeywords) < minExpectedKeywords {
t.Errorf("Expected at least %d keywords, got %d. The map may be incomplete.",
minExpectedKeywords, len(postgresKeywords))
}
}
func TestGetPostgresKeywordsConsistency(t *testing.T) {
// Test that calling GetPostgresKeywords multiple times returns consistent results
keywords1 := GetPostgresKeywords()
keywords2 := GetPostgresKeywords()
if len(keywords1) != len(keywords2) {
t.Errorf("Inconsistent results: first call returned %d keywords, second call returned %d",
len(keywords1), len(keywords2))
}
// Create a map from both results to compare
map1 := make(map[string]bool)
map2 := make(map[string]bool)
for _, k := range keywords1 {
map1[k] = true
}
for _, k := range keywords2 {
map2[k] = true
}
// Check that both contain the same keywords
for k := range map1 {
if !map2[k] {
t.Errorf("Keyword %q present in first call but not in second", k)
}
}
for k := range map2 {
if !map1[k] {
t.Errorf("Keyword %q present in second call but not in first", k)
}
}
}

View File

@@ -632,6 +632,9 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
column.Name = parts[0] column.Name = parts[0]
} }
// Track if we found explicit nullability markers
hasExplicitNullableMarker := false
// Parse tag attributes // Parse tag attributes
for _, part := range parts[1:] { for _, part := range parts[1:] {
kv := strings.SplitN(part, ":", 2) kv := strings.SplitN(part, ":", 2)
@@ -649,6 +652,10 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
column.IsPrimaryKey = true column.IsPrimaryKey = true
case "notnull": case "notnull":
column.NotNull = true column.NotNull = true
hasExplicitNullableMarker = true
case "nullzero":
column.NotNull = false
hasExplicitNullableMarker = true
case "autoincrement": case "autoincrement":
column.AutoIncrement = true column.AutoIncrement = true
case "default": case "default":
@@ -664,17 +671,15 @@ func (r *Reader) parseColumn(fieldName string, fieldType ast.Expr, tag string, s
// Determine if nullable based on Go type and bun tags // Determine if nullable based on Go type and bun tags
// In Bun: // In Bun:
// - nullzero tag means the field is nullable (can be NULL in DB) // - explicit "notnull" tag means NOT NULL
// - absence of nullzero means the field is NOT NULL // - explicit "nullzero" tag means nullable
// - primitive types (int64, bool, string) are NOT NULL by default // - absence of explicit markers: infer from Go type
column.NotNull = true if !hasExplicitNullableMarker {
// Primary keys are always NOT NULL // Infer from Go type if no explicit marker found
if strings.Contains(bunTag, "nullzero") {
column.NotNull = false
} else {
column.NotNull = !r.isNullableGoType(fieldType) column.NotNull = !r.isNullableGoType(fieldType)
} }
// Primary keys are always NOT NULL
if column.IsPrimaryKey { if column.IsPrimaryKey {
column.NotNull = true column.NotNull = true
} }

View File

@@ -4,7 +4,9 @@ import (
"bufio" "bufio"
"fmt" "fmt"
"os" "os"
"path/filepath"
"regexp" "regexp"
"sort"
"strings" "strings"
"git.warky.dev/wdevs/relspecgo/pkg/models" "git.warky.dev/wdevs/relspecgo/pkg/models"
@@ -24,11 +26,23 @@ func NewReader(options *readers.ReaderOptions) *Reader {
} }
// ReadDatabase reads and parses DBML input, returning a Database model // ReadDatabase reads and parses DBML input, returning a Database model
// If FilePath points to a directory, all .dbml files are loaded and merged
func (r *Reader) ReadDatabase() (*models.Database, error) { func (r *Reader) ReadDatabase() (*models.Database, error) {
if r.options.FilePath == "" { if r.options.FilePath == "" {
return nil, fmt.Errorf("file path is required for DBML reader") return nil, fmt.Errorf("file path is required for DBML reader")
} }
// Check if path is a directory
info, err := os.Stat(r.options.FilePath)
if err != nil {
return nil, fmt.Errorf("failed to stat path: %w", err)
}
if info.IsDir() {
return r.readDirectoryDBML(r.options.FilePath)
}
// Single file - existing logic
content, err := os.ReadFile(r.options.FilePath) content, err := os.ReadFile(r.options.FilePath)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to read file: %w", err) return nil, fmt.Errorf("failed to read file: %w", err)
@@ -67,15 +81,341 @@ func (r *Reader) ReadTable() (*models.Table, error) {
return schema.Tables[0], nil return schema.Tables[0], nil
} }
// stripQuotes removes surrounding quotes from an identifier // readDirectoryDBML processes all .dbml files in directory
// Returns merged Database model
func (r *Reader) readDirectoryDBML(dirPath string) (*models.Database, error) {
// Discover and sort DBML files
files, err := r.discoverDBMLFiles(dirPath)
if err != nil {
return nil, fmt.Errorf("failed to discover DBML files: %w", err)
}
// If no files found, return empty database
if len(files) == 0 {
db := models.InitDatabase("database")
if r.options.Metadata != nil {
if name, ok := r.options.Metadata["name"].(string); ok {
db.Name = name
}
}
return db, nil
}
// Initialize database (will be merged with files)
var db *models.Database
// Process each file in sorted order
for _, filePath := range files {
content, err := os.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("failed to read file %s: %w", filePath, err)
}
fileDB, err := r.parseDBML(string(content))
if err != nil {
return nil, fmt.Errorf("failed to parse file %s: %w", filePath, err)
}
// First file initializes the database
if db == nil {
db = fileDB
} else {
// Subsequent files are merged
mergeDatabase(db, fileDB)
}
}
return db, nil
}
// splitIdentifier splits a dotted identifier while respecting quotes
// Handles cases like: "schema.with.dots"."table"."column"
func splitIdentifier(s string) []string {
var parts []string
var current strings.Builder
inQuote := false
quoteChar := byte(0)
for i := 0; i < len(s); i++ {
ch := s[i]
if !inQuote {
switch ch {
case '"', '\'':
inQuote = true
quoteChar = ch
current.WriteByte(ch)
case '.':
if current.Len() > 0 {
parts = append(parts, current.String())
current.Reset()
}
default:
current.WriteByte(ch)
}
} else {
current.WriteByte(ch)
if ch == quoteChar {
inQuote = false
}
}
}
if current.Len() > 0 {
parts = append(parts, current.String())
}
return parts
}
// stripQuotes removes surrounding quotes and comments from an identifier
func stripQuotes(s string) string { func stripQuotes(s string) string {
s = strings.TrimSpace(s) s = strings.TrimSpace(s)
// Remove DBML comments in brackets (e.g., [note: 'description'])
// This handles inline comments like: "table_name" [note: 'comment']
commentRegex := regexp.MustCompile(`\s*\[.*?\]\s*`)
s = commentRegex.ReplaceAllString(s, "")
// Trim again after removing comments
s = strings.TrimSpace(s)
// Remove surrounding quotes (double or single)
if len(s) >= 2 && ((s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'')) { if len(s) >= 2 && ((s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'')) {
return s[1 : len(s)-1] return s[1 : len(s)-1]
} }
return s return s
} }
// parseFilePrefix extracts numeric prefix from filename
// Examples: "1_schema.dbml" -> (1, true), "tables.dbml" -> (0, false)
func parseFilePrefix(filename string) (int, bool) {
base := filepath.Base(filename)
re := regexp.MustCompile(`^(\d+)[_-]`)
matches := re.FindStringSubmatch(base)
if len(matches) > 1 {
var prefix int
_, err := fmt.Sscanf(matches[1], "%d", &prefix)
if err == nil {
return prefix, true
}
}
return 0, false
}
// hasCommentedRefs scans file content for commented-out Ref statements
// Returns true if file contains lines like: // Ref: table.col > other.col
func hasCommentedRefs(filePath string) (bool, error) {
content, err := os.ReadFile(filePath)
if err != nil {
return false, err
}
scanner := bufio.NewScanner(strings.NewReader(string(content)))
commentedRefRegex := regexp.MustCompile(`^\s*//.*Ref:\s+`)
for scanner.Scan() {
line := scanner.Text()
if commentedRefRegex.MatchString(line) {
return true, nil
}
}
return false, nil
}
// discoverDBMLFiles finds all .dbml files in directory and returns them sorted
func (r *Reader) discoverDBMLFiles(dirPath string) ([]string, error) {
pattern := filepath.Join(dirPath, "*.dbml")
files, err := filepath.Glob(pattern)
if err != nil {
return nil, fmt.Errorf("failed to glob .dbml files: %w", err)
}
return sortDBMLFiles(files), nil
}
// sortDBMLFiles sorts files by:
// 1. Files without commented refs (by numeric prefix, then alphabetically)
// 2. Files with commented refs (by numeric prefix, then alphabetically)
func sortDBMLFiles(files []string) []string {
// Create a slice to hold file info for sorting
type fileInfo struct {
path string
hasCommented bool
prefix int
hasPrefix bool
basename string
}
fileInfos := make([]fileInfo, 0, len(files))
for _, file := range files {
hasCommented, err := hasCommentedRefs(file)
if err != nil {
// If we can't read the file, treat it as not having commented refs
hasCommented = false
}
prefix, hasPrefix := parseFilePrefix(file)
basename := filepath.Base(file)
fileInfos = append(fileInfos, fileInfo{
path: file,
hasCommented: hasCommented,
prefix: prefix,
hasPrefix: hasPrefix,
basename: basename,
})
}
// Sort by: hasCommented (false first), hasPrefix (true first), prefix, basename
sort.Slice(fileInfos, func(i, j int) bool {
// First, sort by commented refs (files without commented refs come first)
if fileInfos[i].hasCommented != fileInfos[j].hasCommented {
return !fileInfos[i].hasCommented
}
// Then by presence of prefix (files with prefix come first)
if fileInfos[i].hasPrefix != fileInfos[j].hasPrefix {
return fileInfos[i].hasPrefix
}
// If both have prefix, sort by prefix value
if fileInfos[i].hasPrefix && fileInfos[j].hasPrefix {
if fileInfos[i].prefix != fileInfos[j].prefix {
return fileInfos[i].prefix < fileInfos[j].prefix
}
}
// Finally, sort alphabetically by basename
return fileInfos[i].basename < fileInfos[j].basename
})
// Extract sorted paths
sortedFiles := make([]string, len(fileInfos))
for i, info := range fileInfos {
sortedFiles[i] = info.path
}
return sortedFiles
}
// mergeTable combines two table definitions
// Merges: Columns (map), Constraints (map), Indexes (map), Relationships (map)
// Uses first non-empty Description
func mergeTable(baseTable, fileTable *models.Table) {
// Merge columns (map naturally merges - later keys overwrite)
for key, col := range fileTable.Columns {
baseTable.Columns[key] = col
}
// Merge constraints
for key, constraint := range fileTable.Constraints {
baseTable.Constraints[key] = constraint
}
// Merge indexes
for key, index := range fileTable.Indexes {
baseTable.Indexes[key] = index
}
// Merge relationships
for key, rel := range fileTable.Relationships {
baseTable.Relationships[key] = rel
}
// Use first non-empty description
if baseTable.Description == "" && fileTable.Description != "" {
baseTable.Description = fileTable.Description
}
// Merge metadata maps
if baseTable.Metadata == nil {
baseTable.Metadata = make(map[string]any)
}
for key, val := range fileTable.Metadata {
baseTable.Metadata[key] = val
}
}
// mergeSchema finds or creates schema and merges tables
func mergeSchema(baseDB *models.Database, fileSchema *models.Schema) {
// Find existing schema by name (normalize names by stripping quotes)
var existingSchema *models.Schema
fileSchemaName := stripQuotes(fileSchema.Name)
for _, schema := range baseDB.Schemas {
if stripQuotes(schema.Name) == fileSchemaName {
existingSchema = schema
break
}
}
// If schema doesn't exist, add it and return
if existingSchema == nil {
baseDB.Schemas = append(baseDB.Schemas, fileSchema)
return
}
// Merge tables from fileSchema into existingSchema
for _, fileTable := range fileSchema.Tables {
// Find existing table by name (normalize names by stripping quotes)
var existingTable *models.Table
fileTableName := stripQuotes(fileTable.Name)
for _, table := range existingSchema.Tables {
if stripQuotes(table.Name) == fileTableName {
existingTable = table
break
}
}
// If table doesn't exist, add it
if existingTable == nil {
existingSchema.Tables = append(existingSchema.Tables, fileTable)
} else {
// Merge table properties - tables are identical, skip
mergeTable(existingTable, fileTable)
}
}
// Merge other schema properties
existingSchema.Views = append(existingSchema.Views, fileSchema.Views...)
existingSchema.Sequences = append(existingSchema.Sequences, fileSchema.Sequences...)
existingSchema.Scripts = append(existingSchema.Scripts, fileSchema.Scripts...)
// Merge permissions
if existingSchema.Permissions == nil {
existingSchema.Permissions = make(map[string]string)
}
for key, val := range fileSchema.Permissions {
existingSchema.Permissions[key] = val
}
// Merge metadata
if existingSchema.Metadata == nil {
existingSchema.Metadata = make(map[string]any)
}
for key, val := range fileSchema.Metadata {
existingSchema.Metadata[key] = val
}
}
// mergeDatabase merges schemas from fileDB into baseDB
func mergeDatabase(baseDB, fileDB *models.Database) {
// Merge each schema from fileDB
for _, fileSchema := range fileDB.Schemas {
mergeSchema(baseDB, fileSchema)
}
// Merge domains
baseDB.Domains = append(baseDB.Domains, fileDB.Domains...)
// Use first non-empty description
if baseDB.Description == "" && fileDB.Description != "" {
baseDB.Description = fileDB.Description
}
}
// parseDBML parses DBML content and returns a Database model // parseDBML parses DBML content and returns a Database model
func (r *Reader) parseDBML(content string) (*models.Database, error) { func (r *Reader) parseDBML(content string) (*models.Database, error) {
db := models.InitDatabase("database") db := models.InitDatabase("database")
@@ -109,7 +449,9 @@ func (r *Reader) parseDBML(content string) (*models.Database, error) {
// Parse Table definition // Parse Table definition
if matches := tableRegex.FindStringSubmatch(line); matches != nil { if matches := tableRegex.FindStringSubmatch(line); matches != nil {
tableName := matches[1] tableName := matches[1]
parts := strings.Split(tableName, ".") // Strip comments/notes before parsing to avoid dots in notes
tableName = strings.TrimSpace(regexp.MustCompile(`\s*\[.*?\]\s*`).ReplaceAllString(tableName, ""))
parts := splitIdentifier(tableName)
if len(parts) == 2 { if len(parts) == 2 {
currentSchema = stripQuotes(parts[0]) currentSchema = stripQuotes(parts[0])
@@ -261,8 +603,10 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
column.Default = strings.Trim(defaultVal, "'\"") column.Default = strings.Trim(defaultVal, "'\"")
} else if attr == "unique" { } else if attr == "unique" {
// Create a unique constraint // Create a unique constraint
// Clean table name by removing leading underscores to avoid double underscores
cleanTableName := strings.TrimLeft(tableName, "_")
uniqueConstraint := models.InitConstraint( uniqueConstraint := models.InitConstraint(
fmt.Sprintf("uq_%s", columnName), fmt.Sprintf("ukey_%s_%s", cleanTableName, columnName),
models.UniqueConstraint, models.UniqueConstraint,
) )
uniqueConstraint.Schema = schemaName uniqueConstraint.Schema = schemaName
@@ -287,10 +631,10 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
refOp := strings.TrimSpace(refStr) refOp := strings.TrimSpace(refStr)
var isReverse bool var isReverse bool
if strings.HasPrefix(refOp, "<") { if strings.HasPrefix(refOp, "<") {
isReverse = column.IsPrimaryKey // < on PK means "is referenced by" (reverse) // < means "is referenced by" - only makes sense on PK columns
} else if strings.HasPrefix(refOp, ">") { isReverse = column.IsPrimaryKey
isReverse = !column.IsPrimaryKey // > on FK means reverse
} }
// > means "references" - always a forward FK, never reverse
constraint = r.parseRef(refStr) constraint = r.parseRef(refStr)
if constraint != nil { if constraint != nil {
@@ -310,8 +654,8 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
constraint.Table = tableName constraint.Table = tableName
constraint.Columns = []string{columnName} constraint.Columns = []string{columnName}
} }
// Generate short constraint name based on the column // Generate constraint name based on table and columns
constraint.Name = fmt.Sprintf("fk_%s", constraint.Columns[0]) constraint.Name = fmt.Sprintf("fk_%s_%s", constraint.Table, strings.Join(constraint.Columns, "_"))
} }
} }
} }
@@ -332,29 +676,33 @@ func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
// Format: (columns) [attributes] OR columnname [attributes] // Format: (columns) [attributes] OR columnname [attributes]
var columns []string var columns []string
if strings.Contains(line, "(") && strings.Contains(line, ")") { // Find the attributes section to avoid parsing parentheses in notes/attributes
attrStart := strings.Index(line, "[")
columnPart := line
if attrStart > 0 {
columnPart = line[:attrStart]
}
if strings.Contains(columnPart, "(") && strings.Contains(columnPart, ")") {
// Multi-column format: (col1, col2) [attributes] // Multi-column format: (col1, col2) [attributes]
colStart := strings.Index(line, "(") colStart := strings.Index(columnPart, "(")
colEnd := strings.Index(line, ")") colEnd := strings.Index(columnPart, ")")
if colStart >= colEnd { if colStart >= colEnd {
return nil return nil
} }
columnsStr := line[colStart+1 : colEnd] columnsStr := columnPart[colStart+1 : colEnd]
for _, col := range strings.Split(columnsStr, ",") { for _, col := range strings.Split(columnsStr, ",") {
columns = append(columns, stripQuotes(strings.TrimSpace(col))) columns = append(columns, stripQuotes(strings.TrimSpace(col)))
} }
} else if strings.Contains(line, "[") { } else if attrStart > 0 {
// Single column format: columnname [attributes] // Single column format: columnname [attributes]
// Extract column name before the bracket // Extract column name before the bracket
idx := strings.Index(line, "[") colName := strings.TrimSpace(columnPart)
if idx > 0 {
colName := strings.TrimSpace(line[:idx])
if colName != "" { if colName != "" {
columns = []string{stripQuotes(colName)} columns = []string{stripQuotes(colName)}
} }
} }
}
if len(columns) == 0 { if len(columns) == 0 {
return nil return nil
@@ -391,7 +739,11 @@ func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
// Generate name if not provided // Generate name if not provided
if index.Name == "" { if index.Name == "" {
index.Name = fmt.Sprintf("idx_%s_%s", tableName, strings.Join(columns, "_")) prefix := "idx"
if index.Unique {
prefix = "uidx"
}
index.Name = fmt.Sprintf("%s_%s_%s", prefix, tableName, strings.Join(columns, "_"))
} }
return index return index
@@ -451,10 +803,10 @@ func (r *Reader) parseRef(refStr string) *models.Constraint {
return nil return nil
} }
// Generate short constraint name based on the source column // Generate constraint name based on table and columns
constraintName := fmt.Sprintf("fk_%s_%s", fromTable, toTable) constraintName := fmt.Sprintf("fk_%s_%s", fromTable, strings.Join(fromColumns, "_"))
if len(fromColumns) > 0 { if len(fromColumns) == 0 {
constraintName = fmt.Sprintf("fk_%s", fromColumns[0]) constraintName = fmt.Sprintf("fk_%s_%s", fromTable, toTable)
} }
constraint := models.InitConstraint( constraint := models.InitConstraint(
@@ -510,7 +862,7 @@ func (r *Reader) parseTableRef(ref string) (schema, table string, columns []stri
} }
// Parse schema, table, and optionally column // Parse schema, table, and optionally column
parts := strings.Split(strings.TrimSpace(ref), ".") parts := splitIdentifier(strings.TrimSpace(ref))
if len(parts) == 3 { if len(parts) == 3 {
// Format: "schema"."table"."column" // Format: "schema"."table"."column"
schema = stripQuotes(parts[0]) schema = stripQuotes(parts[0])

View File

@@ -1,6 +1,7 @@
package dbml package dbml
import ( import (
"os"
"path/filepath" "path/filepath"
"testing" "testing"
@@ -517,3 +518,356 @@ func TestGetForeignKeys(t *testing.T) {
t.Error("Expected foreign key constraint type") t.Error("Expected foreign key constraint type")
} }
} }
// Tests for multi-file directory loading
func TestReadDirectory_MultipleFiles(t *testing.T) {
opts := &readers.ReaderOptions{
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile"),
}
reader := NewReader(opts)
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase() error = %v", err)
}
if db == nil {
t.Fatal("ReadDatabase() returned nil database")
}
// Should have public schema
if len(db.Schemas) == 0 {
t.Fatal("Expected at least one schema")
}
var publicSchema *models.Schema
for _, schema := range db.Schemas {
if schema.Name == "public" {
publicSchema = schema
break
}
}
if publicSchema == nil {
t.Fatal("Public schema not found")
}
// Should have 3 tables: users, posts, comments
if len(publicSchema.Tables) != 3 {
t.Fatalf("Expected 3 tables, got %d", len(publicSchema.Tables))
}
// Find tables
var usersTable, postsTable, commentsTable *models.Table
for _, table := range publicSchema.Tables {
switch table.Name {
case "users":
usersTable = table
case "posts":
postsTable = table
case "comments":
commentsTable = table
}
}
if usersTable == nil {
t.Fatal("Users table not found")
}
if postsTable == nil {
t.Fatal("Posts table not found")
}
if commentsTable == nil {
t.Fatal("Comments table not found")
}
// Verify users table has merged columns from 1_users.dbml and 3_add_columns.dbml
expectedUserColumns := []string{"id", "email", "name", "created_at"}
if len(usersTable.Columns) != len(expectedUserColumns) {
t.Errorf("Expected %d columns in users table, got %d", len(expectedUserColumns), len(usersTable.Columns))
}
for _, colName := range expectedUserColumns {
if _, exists := usersTable.Columns[colName]; !exists {
t.Errorf("Expected column '%s' in users table", colName)
}
}
// Verify posts table columns
expectedPostColumns := []string{"id", "user_id", "title", "content", "created_at"}
for _, colName := range expectedPostColumns {
if _, exists := postsTable.Columns[colName]; !exists {
t.Errorf("Expected column '%s' in posts table", colName)
}
}
}
func TestReadDirectory_TableMerging(t *testing.T) {
opts := &readers.ReaderOptions{
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile"),
}
reader := NewReader(opts)
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase() error = %v", err)
}
// Find users table
var usersTable *models.Table
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
if table.Name == "users" && schema.Name == "public" {
usersTable = table
break
}
}
}
if usersTable == nil {
t.Fatal("Users table not found")
}
// Verify columns from file 1 (id, email)
if _, exists := usersTable.Columns["id"]; !exists {
t.Error("Column 'id' from 1_users.dbml not found")
}
if _, exists := usersTable.Columns["email"]; !exists {
t.Error("Column 'email' from 1_users.dbml not found")
}
// Verify columns from file 3 (name, created_at)
if _, exists := usersTable.Columns["name"]; !exists {
t.Error("Column 'name' from 3_add_columns.dbml not found")
}
if _, exists := usersTable.Columns["created_at"]; !exists {
t.Error("Column 'created_at' from 3_add_columns.dbml not found")
}
// Verify column properties from file 1
emailCol := usersTable.Columns["email"]
if !emailCol.NotNull {
t.Error("Email column should be not null (from 1_users.dbml)")
}
if emailCol.Type != "varchar(255)" {
t.Errorf("Expected email type 'varchar(255)', got '%s'", emailCol.Type)
}
}
func TestReadDirectory_CommentedRefsLast(t *testing.T) {
// This test verifies that files with commented refs are processed last
// by checking that the file discovery returns them in the correct order
dirPath := filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile")
opts := &readers.ReaderOptions{
FilePath: dirPath,
}
reader := NewReader(opts)
files, err := reader.discoverDBMLFiles(dirPath)
if err != nil {
t.Fatalf("discoverDBMLFiles() error = %v", err)
}
if len(files) < 2 {
t.Skip("Not enough files to test ordering")
}
// Check that 9_refs.dbml (which has commented refs) comes last
lastFile := filepath.Base(files[len(files)-1])
if lastFile != "9_refs.dbml" {
t.Errorf("Expected last file to be '9_refs.dbml' (has commented refs), got '%s'", lastFile)
}
// Check that numbered files without commented refs come first
firstFile := filepath.Base(files[0])
if firstFile != "1_users.dbml" {
t.Errorf("Expected first file to be '1_users.dbml', got '%s'", firstFile)
}
}
func TestReadDirectory_EmptyDirectory(t *testing.T) {
// Create a temporary empty directory
tmpDir := filepath.Join("..", "..", "..", "tests", "assets", "dbml", "empty_test_dir")
err := os.MkdirAll(tmpDir, 0755)
if err != nil {
t.Fatalf("Failed to create temp directory: %v", err)
}
defer os.RemoveAll(tmpDir)
opts := &readers.ReaderOptions{
FilePath: tmpDir,
}
reader := NewReader(opts)
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase() should not error on empty directory, got: %v", err)
}
if db == nil {
t.Fatal("ReadDatabase() returned nil database")
}
// Empty directory should return empty database
if len(db.Schemas) != 0 {
t.Errorf("Expected 0 schemas for empty directory, got %d", len(db.Schemas))
}
}
func TestReadDatabase_BackwardCompat(t *testing.T) {
// Test that single file loading still works
opts := &readers.ReaderOptions{
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "simple.dbml"),
}
reader := NewReader(opts)
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase() error = %v", err)
}
if db == nil {
t.Fatal("ReadDatabase() returned nil database")
}
if len(db.Schemas) == 0 {
t.Fatal("Expected at least one schema")
}
schema := db.Schemas[0]
if len(schema.Tables) != 1 {
t.Fatalf("Expected 1 table, got %d", len(schema.Tables))
}
table := schema.Tables[0]
if table.Name != "users" {
t.Errorf("Expected table name 'users', got '%s'", table.Name)
}
}
func TestParseFilePrefix(t *testing.T) {
tests := []struct {
filename string
wantPrefix int
wantHas bool
}{
{"1_schema.dbml", 1, true},
{"2_tables.dbml", 2, true},
{"10_relationships.dbml", 10, true},
{"99_data.dbml", 99, true},
{"schema.dbml", 0, false},
{"tables_no_prefix.dbml", 0, false},
{"/path/to/1_file.dbml", 1, true},
{"/path/to/file.dbml", 0, false},
{"1-file.dbml", 1, true},
{"2-another.dbml", 2, true},
}
for _, tt := range tests {
t.Run(tt.filename, func(t *testing.T) {
gotPrefix, gotHas := parseFilePrefix(tt.filename)
if gotPrefix != tt.wantPrefix {
t.Errorf("parseFilePrefix(%s) prefix = %d, want %d", tt.filename, gotPrefix, tt.wantPrefix)
}
if gotHas != tt.wantHas {
t.Errorf("parseFilePrefix(%s) hasPrefix = %v, want %v", tt.filename, gotHas, tt.wantHas)
}
})
}
}
func TestConstraintNaming(t *testing.T) {
// Test that constraints are named with proper prefixes
opts := &readers.ReaderOptions{
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "complex.dbml"),
}
reader := NewReader(opts)
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase() error = %v", err)
}
// Find users table
var usersTable *models.Table
var postsTable *models.Table
for _, schema := range db.Schemas {
for _, table := range schema.Tables {
if table.Name == "users" {
usersTable = table
} else if table.Name == "posts" {
postsTable = table
}
}
}
if usersTable == nil {
t.Fatal("Users table not found")
}
if postsTable == nil {
t.Fatal("Posts table not found")
}
// Test unique constraint naming: ukey_table_column
if _, exists := usersTable.Constraints["ukey_users_email"]; !exists {
t.Error("Expected unique constraint 'ukey_users_email' not found")
t.Logf("Available constraints: %v", getKeys(usersTable.Constraints))
}
if _, exists := postsTable.Constraints["ukey_posts_slug"]; !exists {
t.Error("Expected unique constraint 'ukey_posts_slug' not found")
t.Logf("Available constraints: %v", getKeys(postsTable.Constraints))
}
// Test foreign key naming: fk_table_column
if _, exists := postsTable.Constraints["fk_posts_user_id"]; !exists {
t.Error("Expected foreign key 'fk_posts_user_id' not found")
t.Logf("Available constraints: %v", getKeys(postsTable.Constraints))
}
// Test unique index naming: uidx_table_columns
if _, exists := postsTable.Indexes["uidx_posts_slug"]; !exists {
t.Error("Expected unique index 'uidx_posts_slug' not found")
t.Logf("Available indexes: %v", getKeys(postsTable.Indexes))
}
// Test regular index naming: idx_table_columns
if _, exists := postsTable.Indexes["idx_posts_user_id_published"]; !exists {
t.Error("Expected index 'idx_posts_user_id_published' not found")
t.Logf("Available indexes: %v", getKeys(postsTable.Indexes))
}
}
func getKeys[V any](m map[string]V) []string {
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
return keys
}
func TestHasCommentedRefs(t *testing.T) {
// Test with the actual multifile test fixtures
tests := []struct {
filename string
wantHas bool
}{
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "1_users.dbml"), false},
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "2_posts.dbml"), false},
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "3_add_columns.dbml"), false},
{filepath.Join("..", "..", "..", "tests", "assets", "dbml", "multifile", "9_refs.dbml"), true},
}
for _, tt := range tests {
t.Run(filepath.Base(tt.filename), func(t *testing.T) {
gotHas, err := hasCommentedRefs(tt.filename)
if err != nil {
t.Fatalf("hasCommentedRefs() error = %v", err)
}
if gotHas != tt.wantHas {
t.Errorf("hasCommentedRefs(%s) = %v, want %v", filepath.Base(tt.filename), gotHas, tt.wantHas)
}
})
}
}

View File

@@ -79,6 +79,8 @@ func (r *Reader) convertToDatabase(dctx *models.DCTXDictionary) (*models.Databas
db := models.InitDatabase(dbName) db := models.InitDatabase(dbName)
schema := models.InitSchema("public") schema := models.InitSchema("public")
// Note: DCTX doesn't have database GUID, but schema can use dictionary name if available
// Create GUID mappings for tables and keys // Create GUID mappings for tables and keys
tableGuidMap := make(map[string]string) // GUID -> table name tableGuidMap := make(map[string]string) // GUID -> table name
keyGuidMap := make(map[string]*models.DCTXKey) // GUID -> key definition keyGuidMap := make(map[string]*models.DCTXKey) // GUID -> key definition
@@ -162,6 +164,10 @@ func (r *Reader) convertTable(dctxTable *models.DCTXTable) (*models.Table, map[s
tableName := r.sanitizeName(dctxTable.Name) tableName := r.sanitizeName(dctxTable.Name)
table := models.InitTable(tableName, "public") table := models.InitTable(tableName, "public")
table.Description = dctxTable.Description table.Description = dctxTable.Description
// Assign GUID from DCTX table
if dctxTable.Guid != "" {
table.GUID = dctxTable.Guid
}
fieldGuidMap := make(map[string]string) fieldGuidMap := make(map[string]string)
@@ -202,6 +208,10 @@ func (r *Reader) convertField(dctxField *models.DCTXField, tableName string) ([]
// Convert single field // Convert single field
column := models.InitColumn(r.sanitizeName(dctxField.Name), tableName, "public") column := models.InitColumn(r.sanitizeName(dctxField.Name), tableName, "public")
// Assign GUID from DCTX field
if dctxField.Guid != "" {
column.GUID = dctxField.Guid
}
// Map Clarion data types // Map Clarion data types
dataType, length := r.mapDataType(dctxField.DataType, dctxField.Size) dataType, length := r.mapDataType(dctxField.DataType, dctxField.Size)
@@ -346,6 +356,10 @@ func (r *Reader) convertKey(dctxKey *models.DCTXKey, table *models.Table, fieldG
constraint.Table = table.Name constraint.Table = table.Name
constraint.Schema = table.Schema constraint.Schema = table.Schema
constraint.Columns = columns constraint.Columns = columns
// Assign GUID from DCTX key
if dctxKey.Guid != "" {
constraint.GUID = dctxKey.Guid
}
table.Constraints[constraint.Name] = constraint table.Constraints[constraint.Name] = constraint
@@ -366,6 +380,10 @@ func (r *Reader) convertKey(dctxKey *models.DCTXKey, table *models.Table, fieldG
index.Columns = columns index.Columns = columns
index.Unique = dctxKey.Unique index.Unique = dctxKey.Unique
index.Type = "btree" index.Type = "btree"
// Assign GUID from DCTX key
if dctxKey.Guid != "" {
index.GUID = dctxKey.Guid
}
table.Indexes[index.Name] = index table.Indexes[index.Name] = index
return nil return nil
@@ -460,6 +478,10 @@ func (r *Reader) processRelations(dctx *models.DCTXDictionary, schema *models.Sc
constraint.ReferencedColumns = pkColumns constraint.ReferencedColumns = pkColumns
constraint.OnDelete = r.mapReferentialAction(relation.Delete) constraint.OnDelete = r.mapReferentialAction(relation.Delete)
constraint.OnUpdate = r.mapReferentialAction(relation.Update) constraint.OnUpdate = r.mapReferentialAction(relation.Update)
// Assign GUID from DCTX relation
if relation.Guid != "" {
constraint.GUID = relation.Guid
}
foreignTable.Constraints[fkName] = constraint foreignTable.Constraints[fkName] = constraint
@@ -473,6 +495,10 @@ func (r *Reader) processRelations(dctx *models.DCTXDictionary, schema *models.Sc
relationship.ForeignKey = fkName relationship.ForeignKey = fkName
relationship.Properties["on_delete"] = constraint.OnDelete relationship.Properties["on_delete"] = constraint.OnDelete
relationship.Properties["on_update"] = constraint.OnUpdate relationship.Properties["on_update"] = constraint.OnUpdate
// Assign GUID from DCTX relation
if relation.Guid != "" {
relationship.GUID = relation.Guid
}
foreignTable.Relationships[relationshipName] = relationship foreignTable.Relationships[relationshipName] = relationship
} }

53
pkg/readers/doc.go Normal file
View File

@@ -0,0 +1,53 @@
// Package readers provides interfaces and implementations for reading database schemas
// from various input formats and data sources.
//
// # Overview
//
// The readers package defines a common Reader interface that all format-specific readers
// implement. This allows RelSpec to read database schemas from multiple sources including:
// - Live databases (PostgreSQL, SQLite)
// - Schema definition files (DBML, DCTX, DrawDB, GraphQL)
// - ORM model files (GORM, Bun, Drizzle, Prisma, TypeORM)
// - Data interchange formats (JSON, YAML)
//
// # Architecture
//
// Each reader implementation is located in its own subpackage (e.g., pkg/readers/dbml,
// pkg/readers/pgsql) and implements the Reader interface, supporting three levels of
// granularity:
// - ReadDatabase() - Read complete database with all schemas
// - ReadSchema() - Read single schema with all tables
// - ReadTable() - Read single table with all columns and metadata
//
// # Usage
//
// Readers are instantiated with ReaderOptions containing source-specific configuration:
//
// // Read from file
// reader := dbml.NewReader(&readers.ReaderOptions{
// FilePath: "schema.dbml",
// })
// db, err := reader.ReadDatabase()
//
// // Read from database
// reader := pgsql.NewReader(&readers.ReaderOptions{
// ConnectionString: "postgres://user:pass@localhost/mydb",
// })
// db, err := reader.ReadDatabase()
//
// # Supported Formats
//
// - dbml: Database Markup Language files
// - dctx: DCTX schema files
// - drawdb: DrawDB JSON format
// - graphql: GraphQL schema definition language
// - json: JSON database schema
// - yaml: YAML database schema
// - gorm: Go GORM model structs
// - bun: Go Bun model structs
// - drizzle: TypeScript Drizzle ORM schemas
// - prisma: Prisma schema language
// - typeorm: TypeScript TypeORM entities
// - pgsql: PostgreSQL live database introspection
// - sqlite: SQLite database files
package readers

View File

@@ -140,6 +140,32 @@ func (r *Reader) convertToDatabase(drawSchema *drawdb.DrawDBSchema) (*models.Dat
db.Schemas = append(db.Schemas, schema) db.Schemas = append(db.Schemas, schema)
} }
// Convert DrawDB subject areas to domains
for _, area := range drawSchema.SubjectAreas {
domain := models.InitDomain(area.Name)
// Find all tables that visually belong to this area
// A table belongs to an area if its position is within the area bounds
for _, drawTable := range drawSchema.Tables {
if drawTable.X >= area.X && drawTable.X <= (area.X+area.Width) &&
drawTable.Y >= area.Y && drawTable.Y <= (area.Y+area.Height) {
schemaName := drawTable.Schema
if schemaName == "" {
schemaName = "public"
}
domainTable := models.InitDomainTable(drawTable.Name, schemaName)
domain.Tables = append(domain.Tables, domainTable)
}
}
// Only add domain if it has tables
if len(domain.Tables) > 0 {
db.Domains = append(db.Domains, domain)
}
}
return db, nil return db, nil
} }

View File

@@ -241,11 +241,9 @@ func (r *Reader) parsePgEnum(line string, matches []string) *models.Enum {
} }
} }
return &models.Enum{ enum := models.InitEnum(enumName, "public")
Name: enumName, enum.Values = values
Values: values, return enum
Schema: "public",
}
} }
// parseTableBlock parses a complete pgTable definition block // parseTableBlock parses a complete pgTable definition block

View File

@@ -260,11 +260,7 @@ func (r *Reader) parseType(typeName string, lines []string, schema *models.Schem
} }
func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) { func (r *Reader) parseEnum(enumName string, lines []string, schema *models.Schema) {
enum := &models.Enum{ enum := models.InitEnum(enumName, schema.Name)
Name: enumName,
Schema: schema.Name,
Values: make([]string, 0),
}
for _, line := range lines { for _, line := range lines {
trimmed := strings.TrimSpace(line) trimmed := strings.TrimSpace(line)

View File

@@ -0,0 +1,91 @@
# MSSQL Reader
Reads database schema from Microsoft SQL Server databases using a live connection.
## Features
- **Live Connection**: Connects to MSSQL databases using the Microsoft ODBC driver
- **Multi-Schema Support**: Reads multiple schemas with full support for user-defined schemas
- **Comprehensive Metadata**: Reads tables, columns, constraints, indexes, and extended properties
- **Type Mapping**: Converts MSSQL types to canonical types for cross-database compatibility
- **Extended Properties**: Extracts table and column descriptions from MS_Description
- **Identity Columns**: Maps IDENTITY columns to AutoIncrement
- **Relationships**: Derives relationships from foreign key constraints
## Connection String Format
```
sqlserver://[user[:password]@][host][:port][?query]
```
Examples:
```
sqlserver://sa:password@localhost/dbname
sqlserver://user:pass@192.168.1.100:1433/production
sqlserver://localhost/testdb?encrypt=disable
```
## Supported Constraints
- Primary Keys
- Foreign Keys (with ON DELETE and ON UPDATE actions)
- Unique Constraints
- Check Constraints
## Type Mappings
| MSSQL Type | Canonical Type |
|------------|----------------|
| INT | int |
| BIGINT | int64 |
| SMALLINT | int16 |
| TINYINT | int8 |
| BIT | bool |
| REAL | float32 |
| FLOAT | float64 |
| NUMERIC, DECIMAL | decimal |
| NVARCHAR, VARCHAR | string |
| DATETIME2 | timestamp |
| DATETIMEOFFSET | timestamptz |
| UNIQUEIDENTIFIER | uuid |
| VARBINARY | bytea |
| DATE | date |
| TIME | time |
## Usage
```go
import "git.warky.dev/wdevs/relspecgo/pkg/readers/mssql"
import "git.warky.dev/wdevs/relspecgo/pkg/readers"
reader := mssql.NewReader(&readers.ReaderOptions{
ConnectionString: "sqlserver://sa:password@localhost/mydb",
})
db, err := reader.ReadDatabase()
if err != nil {
panic(err)
}
// Process schema...
for _, schema := range db.Schemas {
fmt.Printf("Schema: %s\n", schema.Name)
for _, table := range schema.Tables {
fmt.Printf(" Table: %s\n", table.Name)
}
}
```
## Testing
Run tests with:
```bash
go test ./pkg/readers/mssql/...
```
For integration testing with a live MSSQL database:
```bash
docker-compose up -d mssql
go test -tags=integration ./pkg/readers/mssql/...
docker-compose down
```

View File

@@ -0,0 +1,416 @@
package mssql
import (
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// querySchemas retrieves all user-defined schemas from the database
func (r *Reader) querySchemas() ([]*models.Schema, error) {
query := `
SELECT s.name, ISNULL(ep.value, '') as description
FROM sys.schemas s
LEFT JOIN sys.extended_properties ep
ON ep.major_id = s.schema_id
AND ep.minor_id = 0
AND ep.class = 3
AND ep.name = 'MS_Description'
WHERE s.name NOT IN ('dbo', 'guest', 'INFORMATION_SCHEMA', 'sys')
ORDER BY s.name
`
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
schemas := make([]*models.Schema, 0)
for rows.Next() {
var name, description string
if err := rows.Scan(&name, &description); err != nil {
return nil, err
}
schema := models.InitSchema(name)
if description != "" {
schema.Description = description
}
schemas = append(schemas, schema)
}
// Always include dbo schema if it has tables
dboSchema := models.InitSchema("dbo")
schemas = append(schemas, dboSchema)
return schemas, rows.Err()
}
// queryTables retrieves all tables for a given schema
func (r *Reader) queryTables(schemaName string) ([]*models.Table, error) {
query := `
SELECT t.table_schema, t.table_name, ISNULL(ep.value, '') as description
FROM information_schema.tables t
LEFT JOIN sys.extended_properties ep
ON ep.major_id = OBJECT_ID(QUOTENAME(t.table_schema) + '.' + QUOTENAME(t.table_name))
AND ep.minor_id = 0
AND ep.class = 1
AND ep.name = 'MS_Description'
WHERE t.table_schema = ? AND t.table_type = 'BASE TABLE'
ORDER BY t.table_name
`
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
if err != nil {
return nil, err
}
defer rows.Close()
tables := make([]*models.Table, 0)
for rows.Next() {
var schema, tableName, description string
if err := rows.Scan(&schema, &tableName, &description); err != nil {
return nil, err
}
table := models.InitTable(tableName, schema)
if description != "" {
table.Description = description
}
tables = append(tables, table)
}
return tables, rows.Err()
}
// queryColumns retrieves all columns for tables in a schema
// Returns map[schema.table]map[columnName]*Column
func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.Column, error) {
query := `
SELECT
c.table_schema,
c.table_name,
c.column_name,
c.ordinal_position,
c.column_default,
c.is_nullable,
c.data_type,
c.character_maximum_length,
c.numeric_precision,
c.numeric_scale,
ISNULL(ep.value, '') as description,
COLUMNPROPERTY(OBJECT_ID(QUOTENAME(c.table_schema) + '.' + QUOTENAME(c.table_name)), c.column_name, 'IsIdentity') as is_identity
FROM information_schema.columns c
LEFT JOIN sys.extended_properties ep
ON ep.major_id = OBJECT_ID(QUOTENAME(c.table_schema) + '.' + QUOTENAME(c.table_name))
AND ep.minor_id = COLUMNPROPERTY(OBJECT_ID(QUOTENAME(c.table_schema) + '.' + QUOTENAME(c.table_name)), c.column_name, 'ColumnId')
AND ep.class = 1
AND ep.name = 'MS_Description'
WHERE c.table_schema = ?
ORDER BY c.table_schema, c.table_name, c.ordinal_position
`
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
if err != nil {
return nil, err
}
defer rows.Close()
columnsMap := make(map[string]map[string]*models.Column)
for rows.Next() {
var schema, tableName, columnName, isNullable, dataType, description string
var ordinalPosition int
var columnDefault, charMaxLength, numPrecision, numScale, isIdentity *int
if err := rows.Scan(&schema, &tableName, &columnName, &ordinalPosition, &columnDefault, &isNullable, &dataType, &charMaxLength, &numPrecision, &numScale, &description, &isIdentity); err != nil {
return nil, err
}
column := models.InitColumn(columnName, tableName, schema)
column.Type = r.mapDataType(dataType)
column.NotNull = (isNullable == "NO")
column.Sequence = uint(ordinalPosition)
if description != "" {
column.Description = description
}
// Check if this is an identity column (auto-increment)
if isIdentity != nil && *isIdentity == 1 {
column.AutoIncrement = true
}
if charMaxLength != nil && *charMaxLength > 0 {
column.Length = *charMaxLength
}
if numPrecision != nil && *numPrecision > 0 {
column.Precision = *numPrecision
}
if numScale != nil && *numScale > 0 {
column.Scale = *numScale
}
// Create table key
tableKey := schema + "." + tableName
if columnsMap[tableKey] == nil {
columnsMap[tableKey] = make(map[string]*models.Column)
}
columnsMap[tableKey][columnName] = column
}
return columnsMap, rows.Err()
}
// queryPrimaryKeys retrieves all primary key constraints for a schema
// Returns map[schema.table]*Constraint
func (r *Reader) queryPrimaryKeys(schemaName string) (map[string]*models.Constraint, error) {
query := `
SELECT
s.name as schema_name,
t.name as table_name,
i.name as constraint_name,
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY ic.key_ordinal) as columns
FROM sys.tables t
INNER JOIN sys.indexes i ON t.object_id = i.object_id AND i.is_primary_key = 1
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
INNER JOIN sys.columns c ON t.object_id = c.object_id AND ic.column_id = c.column_id
WHERE s.name = ?
GROUP BY s.name, t.name, i.name
`
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
if err != nil {
return nil, err
}
defer rows.Close()
primaryKeys := make(map[string]*models.Constraint)
for rows.Next() {
var schema, tableName, constraintName, columnsStr string
if err := rows.Scan(&schema, &tableName, &constraintName, &columnsStr); err != nil {
return nil, err
}
columns := strings.Split(columnsStr, ",")
constraint := models.InitConstraint(constraintName, models.PrimaryKeyConstraint)
constraint.Schema = schema
constraint.Table = tableName
constraint.Columns = columns
tableKey := schema + "." + tableName
primaryKeys[tableKey] = constraint
}
return primaryKeys, rows.Err()
}
// queryForeignKeys retrieves all foreign key constraints for a schema
// Returns map[schema.table][]*Constraint
func (r *Reader) queryForeignKeys(schemaName string) (map[string][]*models.Constraint, error) {
query := `
SELECT
s.name as schema_name,
t.name as table_name,
fk.name as constraint_name,
rs.name as referenced_schema,
rt.name as referenced_table,
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY fkc.constraint_column_id) as columns,
STRING_AGG(rc.name, ',') WITHIN GROUP (ORDER BY fkc.constraint_column_id) as referenced_columns,
fk.delete_referential_action_desc,
fk.update_referential_action_desc
FROM sys.foreign_keys fk
INNER JOIN sys.tables t ON fk.parent_object_id = t.object_id
INNER JOIN sys.tables rt ON fk.referenced_object_id = rt.object_id
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
INNER JOIN sys.schemas rs ON rt.schema_id = rs.schema_id
INNER JOIN sys.foreign_key_columns fkc ON fk.object_id = fkc.constraint_object_id
INNER JOIN sys.columns c ON fkc.parent_object_id = c.object_id AND fkc.parent_column_id = c.column_id
INNER JOIN sys.columns rc ON fkc.referenced_object_id = rc.object_id AND fkc.referenced_column_id = rc.column_id
WHERE s.name = ?
GROUP BY s.name, t.name, fk.name, rs.name, rt.name, fk.delete_referential_action_desc, fk.update_referential_action_desc
`
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
if err != nil {
return nil, err
}
defer rows.Close()
foreignKeys := make(map[string][]*models.Constraint)
for rows.Next() {
var schema, tableName, constraintName, refSchema, refTable, columnsStr, refColumnsStr, deleteAction, updateAction string
if err := rows.Scan(&schema, &tableName, &constraintName, &refSchema, &refTable, &columnsStr, &refColumnsStr, &deleteAction, &updateAction); err != nil {
return nil, err
}
columns := strings.Split(columnsStr, ",")
refColumns := strings.Split(refColumnsStr, ",")
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
constraint.Schema = schema
constraint.Table = tableName
constraint.Columns = columns
constraint.ReferencedSchema = refSchema
constraint.ReferencedTable = refTable
constraint.ReferencedColumns = refColumns
constraint.OnDelete = strings.ToUpper(deleteAction)
constraint.OnUpdate = strings.ToUpper(updateAction)
tableKey := schema + "." + tableName
foreignKeys[tableKey] = append(foreignKeys[tableKey], constraint)
}
return foreignKeys, rows.Err()
}
// queryUniqueConstraints retrieves all unique constraints for a schema
// Returns map[schema.table][]*Constraint
func (r *Reader) queryUniqueConstraints(schemaName string) (map[string][]*models.Constraint, error) {
query := `
SELECT
s.name as schema_name,
t.name as table_name,
i.name as constraint_name,
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY ic.key_ordinal) as columns
FROM sys.tables t
INNER JOIN sys.indexes i ON t.object_id = i.object_id AND i.is_unique = 1 AND i.is_primary_key = 0
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
INNER JOIN sys.columns c ON t.object_id = c.object_id AND ic.column_id = c.column_id
WHERE s.name = ?
GROUP BY s.name, t.name, i.name
`
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
if err != nil {
return nil, err
}
defer rows.Close()
uniqueConstraints := make(map[string][]*models.Constraint)
for rows.Next() {
var schema, tableName, constraintName, columnsStr string
if err := rows.Scan(&schema, &tableName, &constraintName, &columnsStr); err != nil {
return nil, err
}
columns := strings.Split(columnsStr, ",")
constraint := models.InitConstraint(constraintName, models.UniqueConstraint)
constraint.Schema = schema
constraint.Table = tableName
constraint.Columns = columns
tableKey := schema + "." + tableName
uniqueConstraints[tableKey] = append(uniqueConstraints[tableKey], constraint)
}
return uniqueConstraints, rows.Err()
}
// queryCheckConstraints retrieves all check constraints for a schema
// Returns map[schema.table][]*Constraint
func (r *Reader) queryCheckConstraints(schemaName string) (map[string][]*models.Constraint, error) {
query := `
SELECT
s.name as schema_name,
t.name as table_name,
cc.name as constraint_name,
cc.definition
FROM sys.tables t
INNER JOIN sys.check_constraints cc ON t.object_id = cc.parent_object_id
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
WHERE s.name = ?
`
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
if err != nil {
return nil, err
}
defer rows.Close()
checkConstraints := make(map[string][]*models.Constraint)
for rows.Next() {
var schema, tableName, constraintName, definition string
if err := rows.Scan(&schema, &tableName, &constraintName, &definition); err != nil {
return nil, err
}
constraint := models.InitConstraint(constraintName, models.CheckConstraint)
constraint.Schema = schema
constraint.Table = tableName
constraint.Expression = definition
tableKey := schema + "." + tableName
checkConstraints[tableKey] = append(checkConstraints[tableKey], constraint)
}
return checkConstraints, rows.Err()
}
// queryIndexes retrieves all indexes for a schema
// Returns map[schema.table][]*Index
func (r *Reader) queryIndexes(schemaName string) (map[string][]*models.Index, error) {
query := `
SELECT
s.name as schema_name,
t.name as table_name,
i.name as index_name,
i.is_unique,
STRING_AGG(c.name, ',') WITHIN GROUP (ORDER BY ic.key_ordinal) as columns
FROM sys.tables t
INNER JOIN sys.indexes i ON t.object_id = i.object_id AND i.is_primary_key = 0 AND i.name IS NOT NULL
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
INNER JOIN sys.columns c ON t.object_id = c.object_id AND ic.column_id = c.column_id
WHERE s.name = ?
GROUP BY s.name, t.name, i.name, i.is_unique
`
rows, err := r.db.QueryContext(r.ctx, query, schemaName)
if err != nil {
return nil, err
}
defer rows.Close()
indexes := make(map[string][]*models.Index)
for rows.Next() {
var schema, tableName, indexName, columnsStr string
var isUnique int
if err := rows.Scan(&schema, &tableName, &indexName, &isUnique, &columnsStr); err != nil {
return nil, err
}
columns := strings.Split(columnsStr, ",")
index := models.InitIndex(indexName, tableName, schema)
index.Columns = columns
index.Unique = (isUnique == 1)
index.Type = "btree" // MSSQL uses btree by default
tableKey := schema + "." + tableName
indexes[tableKey] = append(indexes[tableKey], index)
}
return indexes, rows.Err()
}

266
pkg/readers/mssql/reader.go Normal file
View File

@@ -0,0 +1,266 @@
package mssql
import (
"context"
"database/sql"
"fmt"
_ "github.com/microsoft/go-mssqldb" // MSSQL driver
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/mssql"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
// Reader implements the readers.Reader interface for MSSQL databases
type Reader struct {
options *readers.ReaderOptions
db *sql.DB
ctx context.Context
}
// NewReader creates a new MSSQL reader
func NewReader(options *readers.ReaderOptions) *Reader {
return &Reader{
options: options,
ctx: context.Background(),
}
}
// ReadDatabase reads the entire database schema from MSSQL
func (r *Reader) ReadDatabase() (*models.Database, error) {
// Validate connection string
if r.options.ConnectionString == "" {
return nil, fmt.Errorf("connection string is required")
}
// Connect to the database
if err := r.connect(); err != nil {
return nil, fmt.Errorf("failed to connect: %w", err)
}
defer r.close()
// Get database name
var dbName string
err := r.db.QueryRowContext(r.ctx, "SELECT DB_NAME()").Scan(&dbName)
if err != nil {
return nil, fmt.Errorf("failed to get database name: %w", err)
}
// Initialize database model
db := models.InitDatabase(dbName)
db.DatabaseType = models.MSSQLDatabaseType
db.SourceFormat = "mssql"
// Get MSSQL version
var version string
err = r.db.QueryRowContext(r.ctx, "SELECT @@VERSION").Scan(&version)
if err == nil {
db.DatabaseVersion = version
}
// Query all schemas
schemas, err := r.querySchemas()
if err != nil {
return nil, fmt.Errorf("failed to query schemas: %w", err)
}
// Process each schema
for _, schema := range schemas {
// Query tables for this schema
tables, err := r.queryTables(schema.Name)
if err != nil {
return nil, fmt.Errorf("failed to query tables for schema %s: %w", schema.Name, err)
}
schema.Tables = tables
// Query columns for tables
columnsMap, err := r.queryColumns(schema.Name)
if err != nil {
return nil, fmt.Errorf("failed to query columns for schema %s: %w", schema.Name, err)
}
// Populate table columns
for _, table := range schema.Tables {
tableKey := schema.Name + "." + table.Name
if cols, exists := columnsMap[tableKey]; exists {
table.Columns = cols
}
}
// Query primary keys
primaryKeys, err := r.queryPrimaryKeys(schema.Name)
if err != nil {
return nil, fmt.Errorf("failed to query primary keys for schema %s: %w", schema.Name, err)
}
// Apply primary keys to tables
for _, table := range schema.Tables {
tableKey := schema.Name + "." + table.Name
if pk, exists := primaryKeys[tableKey]; exists {
table.Constraints[pk.Name] = pk
// Mark columns as primary key and not null
for _, colName := range pk.Columns {
if col, colExists := table.Columns[colName]; colExists {
col.IsPrimaryKey = true
col.NotNull = true
}
}
}
}
// Query foreign keys
foreignKeys, err := r.queryForeignKeys(schema.Name)
if err != nil {
return nil, fmt.Errorf("failed to query foreign keys for schema %s: %w", schema.Name, err)
}
// Apply foreign keys to tables
for _, table := range schema.Tables {
tableKey := schema.Name + "." + table.Name
if fks, exists := foreignKeys[tableKey]; exists {
for _, fk := range fks {
table.Constraints[fk.Name] = fk
// Derive relationship from foreign key
r.deriveRelationship(table, fk)
}
}
}
// Query unique constraints
uniqueConstraints, err := r.queryUniqueConstraints(schema.Name)
if err != nil {
return nil, fmt.Errorf("failed to query unique constraints for schema %s: %w", schema.Name, err)
}
// Apply unique constraints to tables
for _, table := range schema.Tables {
tableKey := schema.Name + "." + table.Name
if ucs, exists := uniqueConstraints[tableKey]; exists {
for _, uc := range ucs {
table.Constraints[uc.Name] = uc
}
}
}
// Query check constraints
checkConstraints, err := r.queryCheckConstraints(schema.Name)
if err != nil {
return nil, fmt.Errorf("failed to query check constraints for schema %s: %w", schema.Name, err)
}
// Apply check constraints to tables
for _, table := range schema.Tables {
tableKey := schema.Name + "." + table.Name
if ccs, exists := checkConstraints[tableKey]; exists {
for _, cc := range ccs {
table.Constraints[cc.Name] = cc
}
}
}
// Query indexes
indexes, err := r.queryIndexes(schema.Name)
if err != nil {
return nil, fmt.Errorf("failed to query indexes for schema %s: %w", schema.Name, err)
}
// Apply indexes to tables
for _, table := range schema.Tables {
tableKey := schema.Name + "." + table.Name
if idxs, exists := indexes[tableKey]; exists {
for _, idx := range idxs {
table.Indexes[idx.Name] = idx
}
}
}
// Set RefDatabase for schema
schema.RefDatabase = db
// Set RefSchema for tables
for _, table := range schema.Tables {
table.RefSchema = schema
}
// Add schema to database
db.Schemas = append(db.Schemas, schema)
}
return db, nil
}
// ReadSchema reads a single schema (returns the first schema from the database)
func (r *Reader) ReadSchema() (*models.Schema, error) {
db, err := r.ReadDatabase()
if err != nil {
return nil, err
}
if len(db.Schemas) == 0 {
return nil, fmt.Errorf("no schemas found in database")
}
return db.Schemas[0], nil
}
// ReadTable reads a single table (returns the first table from the first schema)
func (r *Reader) ReadTable() (*models.Table, error) {
schema, err := r.ReadSchema()
if err != nil {
return nil, err
}
if len(schema.Tables) == 0 {
return nil, fmt.Errorf("no tables found in schema")
}
return schema.Tables[0], nil
}
// connect establishes a connection to the MSSQL database
func (r *Reader) connect() error {
db, err := sql.Open("mssql", r.options.ConnectionString)
if err != nil {
return err
}
// Test connection
if err = db.PingContext(r.ctx); err != nil {
db.Close()
return err
}
r.db = db
return nil
}
// close closes the database connection
func (r *Reader) close() {
if r.db != nil {
r.db.Close()
}
}
// mapDataType maps MSSQL data types to canonical types
func (r *Reader) mapDataType(mssqlType string) string {
return mssql.ConvertMSSQLToCanonical(mssqlType)
}
// deriveRelationship creates a relationship from a foreign key constraint
func (r *Reader) deriveRelationship(table *models.Table, fk *models.Constraint) {
relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable)
relationship := models.InitRelationship(relationshipName, models.OneToMany)
relationship.FromTable = table.Name
relationship.FromSchema = table.Schema
relationship.ToTable = fk.ReferencedTable
relationship.ToSchema = fk.ReferencedSchema
relationship.ForeignKey = fk.Name
// Store constraint actions in properties
if fk.OnDelete != "" {
relationship.Properties["on_delete"] = fk.OnDelete
}
if fk.OnUpdate != "" {
relationship.Properties["on_update"] = fk.OnUpdate
}
table.Relationships[relationshipName] = relationship
}

View File

@@ -0,0 +1,86 @@
package mssql
import (
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/mssql"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"github.com/stretchr/testify/assert"
)
// TestMapDataType tests MSSQL type mapping to canonical types
func TestMapDataType(t *testing.T) {
reader := NewReader(&readers.ReaderOptions{})
tests := []struct {
name string
mssqlType string
expectedType string
}{
{"INT to int", "INT", "int"},
{"BIGINT to int64", "BIGINT", "int64"},
{"BIT to bool", "BIT", "bool"},
{"NVARCHAR to string", "NVARCHAR(255)", "string"},
{"DATETIME2 to timestamp", "DATETIME2", "timestamp"},
{"DATETIMEOFFSET to timestamptz", "DATETIMEOFFSET", "timestamptz"},
{"UNIQUEIDENTIFIER to uuid", "UNIQUEIDENTIFIER", "uuid"},
{"FLOAT to float64", "FLOAT", "float64"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := reader.mapDataType(tt.mssqlType)
assert.Equal(t, tt.expectedType, result)
})
}
}
// TestConvertCanonicalToMSSQL tests canonical to MSSQL type conversion
func TestConvertCanonicalToMSSQL(t *testing.T) {
tests := []struct {
name string
canonicalType string
expectedMSSQL string
}{
{"int to INT", "int", "INT"},
{"int64 to BIGINT", "int64", "BIGINT"},
{"bool to BIT", "bool", "BIT"},
{"string to NVARCHAR(255)", "string", "NVARCHAR(255)"},
{"text to NVARCHAR(MAX)", "text", "NVARCHAR(MAX)"},
{"timestamp to DATETIME2", "timestamp", "DATETIME2"},
{"timestamptz to DATETIMEOFFSET", "timestamptz", "DATETIMEOFFSET"},
{"uuid to UNIQUEIDENTIFIER", "uuid", "UNIQUEIDENTIFIER"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := mssql.ConvertCanonicalToMSSQL(tt.canonicalType)
assert.Equal(t, tt.expectedMSSQL, result)
})
}
}
// TestConvertMSSQLToCanonical tests MSSQL to canonical type conversion
func TestConvertMSSQLToCanonical(t *testing.T) {
tests := []struct {
name string
mssqlType string
expectedType string
}{
{"INT to int", "INT", "int"},
{"BIGINT to int64", "BIGINT", "int64"},
{"BIT to bool", "BIT", "bool"},
{"NVARCHAR with params", "NVARCHAR(255)", "string"},
{"DATETIME2 to timestamp", "DATETIME2", "timestamp"},
{"DATETIMEOFFSET to timestamptz", "DATETIMEOFFSET", "timestamptz"},
{"UNIQUEIDENTIFIER to uuid", "UNIQUEIDENTIFIER", "uuid"},
{"VARBINARY to bytea", "VARBINARY(MAX)", "bytea"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := mssql.ConvertMSSQLToCanonical(tt.mssqlType)
assert.Equal(t, tt.expectedType, result)
})
}
}

View File

@@ -231,14 +231,13 @@ func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.
} }
column := models.InitColumn(columnName, tableName, schema) column := models.InitColumn(columnName, tableName, schema)
column.Type = r.mapDataType(dataType, udtName)
column.NotNull = (isNullable == "NO")
column.Sequence = uint(ordinalPosition)
// Check if this is a serial type (has nextval default)
hasNextval := false
if columnDefault != nil { if columnDefault != nil {
// Parse default value - remove nextval for sequences
defaultVal := *columnDefault defaultVal := *columnDefault
if strings.HasPrefix(defaultVal, "nextval") { if strings.HasPrefix(defaultVal, "nextval") {
hasNextval = true
column.AutoIncrement = true column.AutoIncrement = true
column.Default = defaultVal column.Default = defaultVal
} else { } else {
@@ -246,6 +245,11 @@ func (r *Reader) queryColumns(schemaName string) (map[string]map[string]*models.
} }
} }
// Map data type, preserving serial types when detected
column.Type = r.mapDataType(dataType, udtName, hasNextval)
column.NotNull = (isNullable == "NO")
column.Sequence = uint(ordinalPosition)
if description != nil { if description != nil {
column.Description = *description column.Description = *description
} }

View File

@@ -3,6 +3,7 @@ package pgsql
import ( import (
"context" "context"
"fmt" "fmt"
"strings"
"github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5"
@@ -259,33 +260,46 @@ func (r *Reader) close() {
} }
// mapDataType maps PostgreSQL data types to canonical types // mapDataType maps PostgreSQL data types to canonical types
func (r *Reader) mapDataType(pgType, udtName string) string { func (r *Reader) mapDataType(pgType, udtName string, hasNextval bool) string {
// If the column has a nextval default, it's likely a serial type
// Map to the appropriate serial type instead of the base integer type
if hasNextval {
switch strings.ToLower(pgType) {
case "integer", "int", "int4":
return "serial"
case "bigint", "int8":
return "bigserial"
case "smallint", "int2":
return "smallserial"
}
}
// Map common PostgreSQL types // Map common PostgreSQL types
typeMap := map[string]string{ typeMap := map[string]string{
"integer": "int", "integer": "integer",
"bigint": "int64", "bigint": "bigint",
"smallint": "int16", "smallint": "smallint",
"int": "int", "int": "integer",
"int2": "int16", "int2": "smallint",
"int4": "int", "int4": "integer",
"int8": "int64", "int8": "bigint",
"serial": "int", "serial": "serial",
"bigserial": "int64", "bigserial": "bigserial",
"smallserial": "int16", "smallserial": "smallserial",
"numeric": "decimal", "numeric": "numeric",
"decimal": "decimal", "decimal": "decimal",
"real": "float32", "real": "real",
"double precision": "float64", "double precision": "double precision",
"float4": "float32", "float4": "real",
"float8": "float64", "float8": "double precision",
"money": "decimal", "money": "money",
"character varying": "string", "character varying": "varchar",
"varchar": "string", "varchar": "varchar",
"character": "string", "character": "char",
"char": "string", "char": "char",
"text": "string", "text": "text",
"boolean": "bool", "boolean": "boolean",
"bool": "bool", "bool": "boolean",
"date": "date", "date": "date",
"time": "time", "time": "time",
"time without time zone": "time", "time without time zone": "time",
@@ -329,10 +343,10 @@ func (r *Reader) deriveRelationship(table *models.Table, fk *models.Constraint)
relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable) relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable)
relationship := models.InitRelationship(relationshipName, models.OneToMany) relationship := models.InitRelationship(relationshipName, models.OneToMany)
relationship.FromTable = fk.ReferencedTable relationship.FromTable = table.Name
relationship.FromSchema = fk.ReferencedSchema relationship.FromSchema = table.Schema
relationship.ToTable = table.Name relationship.ToTable = fk.ReferencedTable
relationship.ToSchema = table.Schema relationship.ToSchema = fk.ReferencedSchema
relationship.ForeignKey = fk.Name relationship.ForeignKey = fk.Name
// Store constraint actions in properties // Store constraint actions in properties

View File

@@ -177,20 +177,20 @@ func TestMapDataType(t *testing.T) {
udtName string udtName string
expected string expected string
}{ }{
{"integer", "int4", "int"}, {"integer", "int4", "integer"},
{"bigint", "int8", "int64"}, {"bigint", "int8", "bigint"},
{"smallint", "int2", "int16"}, {"smallint", "int2", "smallint"},
{"character varying", "varchar", "string"}, {"character varying", "varchar", "varchar"},
{"text", "text", "string"}, {"text", "text", "text"},
{"boolean", "bool", "bool"}, {"boolean", "bool", "boolean"},
{"timestamp without time zone", "timestamp", "timestamp"}, {"timestamp without time zone", "timestamp", "timestamp"},
{"timestamp with time zone", "timestamptz", "timestamptz"}, {"timestamp with time zone", "timestamptz", "timestamptz"},
{"json", "json", "json"}, {"json", "json", "json"},
{"jsonb", "jsonb", "jsonb"}, {"jsonb", "jsonb", "jsonb"},
{"uuid", "uuid", "uuid"}, {"uuid", "uuid", "uuid"},
{"numeric", "numeric", "decimal"}, {"numeric", "numeric", "numeric"},
{"real", "float4", "float32"}, {"real", "float4", "real"},
{"double precision", "float8", "float64"}, {"double precision", "float8", "double precision"},
{"date", "date", "date"}, {"date", "date", "date"},
{"time without time zone", "time", "time"}, {"time without time zone", "time", "time"},
{"bytea", "bytea", "bytea"}, {"bytea", "bytea", "bytea"},
@@ -199,12 +199,31 @@ func TestMapDataType(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.pgType, func(t *testing.T) { t.Run(tt.pgType, func(t *testing.T) {
result := reader.mapDataType(tt.pgType, tt.udtName) result := reader.mapDataType(tt.pgType, tt.udtName, false)
if result != tt.expected { if result != tt.expected {
t.Errorf("mapDataType(%s, %s) = %s, expected %s", tt.pgType, tt.udtName, result, tt.expected) t.Errorf("mapDataType(%s, %s) = %s, expected %s", tt.pgType, tt.udtName, result, tt.expected)
} }
}) })
} }
// Test serial type detection with hasNextval=true
serialTests := []struct {
pgType string
expected string
}{
{"integer", "serial"},
{"bigint", "bigserial"},
{"smallint", "smallserial"},
}
for _, tt := range serialTests {
t.Run(tt.pgType+"_with_nextval", func(t *testing.T) {
result := reader.mapDataType(tt.pgType, "", true)
if result != tt.expected {
t.Errorf("mapDataType(%s, '', true) = %s, expected %s", tt.pgType, result, tt.expected)
}
})
}
} }
func TestParseIndexDefinition(t *testing.T) { func TestParseIndexDefinition(t *testing.T) {
@@ -328,12 +347,12 @@ func TestDeriveRelationship(t *testing.T) {
t.Errorf("Expected relationship type %s, got %s", models.OneToMany, rel.Type) t.Errorf("Expected relationship type %s, got %s", models.OneToMany, rel.Type)
} }
if rel.FromTable != "users" { if rel.FromTable != "orders" {
t.Errorf("Expected FromTable 'users', got '%s'", rel.FromTable) t.Errorf("Expected FromTable 'orders', got '%s'", rel.FromTable)
} }
if rel.ToTable != "orders" { if rel.ToTable != "users" {
t.Errorf("Expected ToTable 'orders', got '%s'", rel.ToTable) t.Errorf("Expected ToTable 'users', got '%s'", rel.ToTable)
} }
if rel.ForeignKey != "fk_orders_user_id" { if rel.ForeignKey != "fk_orders_user_id" {

View File

@@ -128,11 +128,7 @@ func (r *Reader) parsePrisma(content string) (*models.Database, error) {
if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil { if matches := enumRegex.FindStringSubmatch(trimmed); matches != nil {
currentBlock = "enum" currentBlock = "enum"
enumName := matches[1] enumName := matches[1]
currentEnum = &models.Enum{ currentEnum = models.InitEnum(enumName, "public")
Name: enumName,
Schema: "public",
Values: make([]string, 0),
}
blockContent = []string{} blockContent = []string{}
continue continue
} }

View File

@@ -93,6 +93,7 @@ fmt.Printf("Found %d scripts\n", len(schema.Scripts))
## Features ## Features
- **Recursive Directory Scanning**: Automatically scans all subdirectories - **Recursive Directory Scanning**: Automatically scans all subdirectories
- **Symlink Skipping**: Symbolic links are automatically skipped (prevents loops and duplicates)
- **Multiple Extensions**: Supports both `.sql` and `.pgsql` files - **Multiple Extensions**: Supports both `.sql` and `.pgsql` files
- **Flexible Naming**: Extract metadata from filename patterns - **Flexible Naming**: Extract metadata from filename patterns
- **Error Handling**: Validates directory existence and file accessibility - **Error Handling**: Validates directory existence and file accessibility
@@ -153,8 +154,9 @@ go test ./pkg/readers/sqldir/
``` ```
Tests include: Tests include:
- Valid file parsing - Valid file parsing (underscore and hyphen formats)
- Recursive directory scanning - Recursive directory scanning
- Symlink skipping
- Invalid filename handling - Invalid filename handling
- Empty directory handling - Empty directory handling
- Error conditions - Error conditions

View File

@@ -107,11 +107,20 @@ func (r *Reader) readScripts() ([]*models.Script, error) {
return err return err
} }
// Skip directories // Don't process directories as files (WalkDir still descends into them recursively)
if d.IsDir() { if d.IsDir() {
return nil return nil
} }
// Skip symlinks
info, err := d.Info()
if err != nil {
return err
}
if info.Mode()&os.ModeSymlink != 0 {
return nil
}
// Get filename // Get filename
filename := d.Name() filename := d.Name()
@@ -150,13 +159,11 @@ func (r *Reader) readScripts() ([]*models.Script, error) {
} }
// Create Script model // Create Script model
script := &models.Script{ script := models.InitScript(name)
Name: name, script.Description = fmt.Sprintf("SQL script from %s", relPath)
Description: fmt.Sprintf("SQL script from %s", relPath), script.SQL = string(content)
SQL: string(content), script.Priority = priority
Priority: priority, script.Sequence = uint(sequence)
Sequence: uint(sequence),
}
scripts = append(scripts, script) scripts = append(scripts, script)

View File

@@ -373,3 +373,65 @@ func TestReader_MixedFormat(t *testing.T) {
} }
} }
} }
func TestReader_SkipSymlinks(t *testing.T) {
// Create temporary test directory
tempDir, err := os.MkdirTemp("", "sqldir-test-symlink-*")
if err != nil {
t.Fatalf("Failed to create temp directory: %v", err)
}
defer os.RemoveAll(tempDir)
// Create a real SQL file
realFile := filepath.Join(tempDir, "1_001_real_file.sql")
if err := os.WriteFile(realFile, []byte("SELECT 1;"), 0644); err != nil {
t.Fatalf("Failed to create real file: %v", err)
}
// Create another file to link to
targetFile := filepath.Join(tempDir, "2_001_target.sql")
if err := os.WriteFile(targetFile, []byte("SELECT 2;"), 0644); err != nil {
t.Fatalf("Failed to create target file: %v", err)
}
// Create a symlink to the target file (this should be skipped)
symlinkFile := filepath.Join(tempDir, "3_001_symlink.sql")
if err := os.Symlink(targetFile, symlinkFile); err != nil {
// Skip test on systems that don't support symlinks (e.g., Windows without admin)
t.Skipf("Symlink creation not supported: %v", err)
}
// Create reader
reader := NewReader(&readers.ReaderOptions{
FilePath: tempDir,
})
// Read database
db, err := reader.ReadDatabase()
if err != nil {
t.Fatalf("ReadDatabase failed: %v", err)
}
schema := db.Schemas[0]
// Should only have 2 scripts (real_file and target), symlink should be skipped
if len(schema.Scripts) != 2 {
t.Errorf("Expected 2 scripts (symlink should be skipped), got %d", len(schema.Scripts))
}
// Verify the scripts are the real files, not the symlink
scriptNames := make(map[string]bool)
for _, script := range schema.Scripts {
scriptNames[script.Name] = true
}
if !scriptNames["real_file"] {
t.Error("Expected 'real_file' script to be present")
}
if !scriptNames["target"] {
t.Error("Expected 'target' script to be present")
}
if scriptNames["symlink"] {
t.Error("Symlink script should have been skipped but was found")
}
}

View File

@@ -0,0 +1,75 @@
# SQLite Reader
Reads database schema from SQLite database files.
## Usage
```go
import (
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/sqlite"
)
// Using file path
options := &readers.ReaderOptions{
FilePath: "path/to/database.db",
}
reader := sqlite.NewReader(options)
db, err := reader.ReadDatabase()
// Or using connection string
options := &readers.ReaderOptions{
ConnectionString: "path/to/database.db",
}
```
## Features
- Reads tables with columns and data types
- Reads views with definitions
- Reads primary keys
- Reads foreign keys with CASCADE actions
- Reads indexes (non-auto-generated)
- Maps SQLite types to canonical types
- Derives relationships from foreign keys
## SQLite Specifics
- SQLite doesn't support schemas, creates single "main" schema
- Uses pure Go driver (modernc.org/sqlite) - no CGo required
- Supports both file path and connection string
- Auto-increment detection for INTEGER PRIMARY KEY columns
- Foreign keys require `PRAGMA foreign_keys = ON` to be set
## Example Schema
```sql
PRAGMA foreign_keys = ON;
CREATE TABLE users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username VARCHAR(50) NOT NULL UNIQUE,
email VARCHAR(100) NOT NULL
);
CREATE TABLE posts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
title VARCHAR(200) NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
);
```
## Type Mappings
| SQLite Type | Canonical Type |
|-------------|---------------|
| INTEGER, INT | int |
| BIGINT | int64 |
| REAL, DOUBLE | float64 |
| TEXT, VARCHAR | string |
| BLOB | bytea |
| BOOLEAN | bool |
| DATE | date |
| DATETIME, TIMESTAMP | timestamp |

View File

@@ -0,0 +1,306 @@
package sqlite
import (
"fmt"
"strings"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// queryTables retrieves all tables from the SQLite database
func (r *Reader) queryTables() ([]*models.Table, error) {
query := `
SELECT name
FROM sqlite_master
WHERE type = 'table'
AND name NOT LIKE 'sqlite_%'
ORDER BY name
`
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
tables := make([]*models.Table, 0)
for rows.Next() {
var tableName string
if err := rows.Scan(&tableName); err != nil {
return nil, err
}
table := models.InitTable(tableName, "main")
tables = append(tables, table)
}
return tables, rows.Err()
}
// queryViews retrieves all views from the SQLite database
func (r *Reader) queryViews() ([]*models.View, error) {
query := `
SELECT name, sql
FROM sqlite_master
WHERE type = 'view'
ORDER BY name
`
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
views := make([]*models.View, 0)
for rows.Next() {
var viewName string
var sql *string
if err := rows.Scan(&viewName, &sql); err != nil {
return nil, err
}
view := models.InitView(viewName, "main")
if sql != nil {
view.Definition = *sql
}
views = append(views, view)
}
return views, rows.Err()
}
// queryColumns retrieves all columns for a given table or view
func (r *Reader) queryColumns(tableName string) (map[string]*models.Column, error) {
query := fmt.Sprintf("PRAGMA table_info(%s)", tableName)
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
columns := make(map[string]*models.Column)
for rows.Next() {
var cid int
var name, dataType string
var notNull, pk int
var defaultValue *string
if err := rows.Scan(&cid, &name, &dataType, &notNull, &defaultValue, &pk); err != nil {
return nil, err
}
column := models.InitColumn(name, tableName, "main")
column.Type = r.mapDataType(strings.ToUpper(dataType))
column.NotNull = (notNull == 1)
column.IsPrimaryKey = (pk > 0)
column.Sequence = uint(cid + 1)
if defaultValue != nil {
column.Default = *defaultValue
}
// Check for autoincrement (SQLite uses INTEGER PRIMARY KEY AUTOINCREMENT)
if pk > 0 && strings.EqualFold(dataType, "INTEGER") {
column.AutoIncrement = r.isAutoIncrement(tableName, name)
}
columns[name] = column
}
return columns, rows.Err()
}
// isAutoIncrement checks if a column is autoincrement
func (r *Reader) isAutoIncrement(tableName, columnName string) bool {
// Check sqlite_sequence table or parse CREATE TABLE statement
query := `
SELECT sql
FROM sqlite_master
WHERE type = 'table' AND name = ?
`
var sql string
err := r.db.QueryRowContext(r.ctx, query, tableName).Scan(&sql)
if err != nil {
return false
}
// Check if the SQL contains AUTOINCREMENT for this column
return strings.Contains(strings.ToUpper(sql), strings.ToUpper(columnName)+" INTEGER PRIMARY KEY AUTOINCREMENT") ||
strings.Contains(strings.ToUpper(sql), strings.ToUpper(columnName)+" INTEGER AUTOINCREMENT")
}
// queryPrimaryKey retrieves the primary key constraint for a table
func (r *Reader) queryPrimaryKey(tableName string) (*models.Constraint, error) {
query := fmt.Sprintf("PRAGMA table_info(%s)", tableName)
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
var pkColumns []string
for rows.Next() {
var cid int
var name, dataType string
var notNull, pk int
var defaultValue *string
if err := rows.Scan(&cid, &name, &dataType, &notNull, &defaultValue, &pk); err != nil {
return nil, err
}
if pk > 0 {
pkColumns = append(pkColumns, name)
}
}
if len(pkColumns) == 0 {
return nil, nil
}
// Create primary key constraint
constraintName := fmt.Sprintf("%s_pkey", tableName)
constraint := models.InitConstraint(constraintName, models.PrimaryKeyConstraint)
constraint.Schema = "main"
constraint.Table = tableName
constraint.Columns = pkColumns
return constraint, rows.Err()
}
// queryForeignKeys retrieves all foreign key constraints for a table
func (r *Reader) queryForeignKeys(tableName string) ([]*models.Constraint, error) {
query := fmt.Sprintf("PRAGMA foreign_key_list(%s)", tableName)
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
// Group foreign keys by id (since composite FKs have multiple rows)
fkMap := make(map[int]*models.Constraint)
for rows.Next() {
var id, seq int
var referencedTable, fromColumn, toColumn string
var onUpdate, onDelete, match string
if err := rows.Scan(&id, &seq, &referencedTable, &fromColumn, &toColumn, &onUpdate, &onDelete, &match); err != nil {
return nil, err
}
if _, exists := fkMap[id]; !exists {
constraintName := fmt.Sprintf("%s_%s_fkey", tableName, referencedTable)
if id > 0 {
constraintName = fmt.Sprintf("%s_%s_fkey_%d", tableName, referencedTable, id)
}
constraint := models.InitConstraint(constraintName, models.ForeignKeyConstraint)
constraint.Schema = "main"
constraint.Table = tableName
constraint.ReferencedSchema = "main"
constraint.ReferencedTable = referencedTable
constraint.OnUpdate = onUpdate
constraint.OnDelete = onDelete
constraint.Columns = []string{}
constraint.ReferencedColumns = []string{}
fkMap[id] = constraint
}
// Add column to the constraint
fkMap[id].Columns = append(fkMap[id].Columns, fromColumn)
fkMap[id].ReferencedColumns = append(fkMap[id].ReferencedColumns, toColumn)
}
// Convert map to slice
foreignKeys := make([]*models.Constraint, 0, len(fkMap))
for _, fk := range fkMap {
foreignKeys = append(foreignKeys, fk)
}
return foreignKeys, rows.Err()
}
// queryIndexes retrieves all indexes for a table
func (r *Reader) queryIndexes(tableName string) ([]*models.Index, error) {
query := fmt.Sprintf("PRAGMA index_list(%s)", tableName)
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
indexes := make([]*models.Index, 0)
for rows.Next() {
var seq int
var name string
var unique int
var origin string
var partial int
if err := rows.Scan(&seq, &name, &unique, &origin, &partial); err != nil {
return nil, err
}
// Skip auto-generated indexes (origin = 'pk' for primary keys, etc.)
// origin: c = CREATE INDEX, u = UNIQUE constraint, pk = PRIMARY KEY
if origin == "pk" || origin == "u" {
continue
}
index := models.InitIndex(name, tableName, "main")
index.Unique = (unique == 1)
// Get index columns
columns, err := r.queryIndexColumns(name)
if err != nil {
return nil, err
}
index.Columns = columns
indexes = append(indexes, index)
}
return indexes, rows.Err()
}
// queryIndexColumns retrieves the columns for a specific index
func (r *Reader) queryIndexColumns(indexName string) ([]string, error) {
query := fmt.Sprintf("PRAGMA index_info(%s)", indexName)
rows, err := r.db.QueryContext(r.ctx, query)
if err != nil {
return nil, err
}
defer rows.Close()
columns := make([]string, 0)
for rows.Next() {
var seqno, cid int
var name *string
if err := rows.Scan(&seqno, &cid, &name); err != nil {
return nil, err
}
if name != nil {
columns = append(columns, *name)
}
}
return columns, rows.Err()
}

View File

@@ -0,0 +1,261 @@
package sqlite
import (
"context"
"database/sql"
"fmt"
"path/filepath"
_ "modernc.org/sqlite" // SQLite driver
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
// Reader implements the readers.Reader interface for SQLite databases
type Reader struct {
options *readers.ReaderOptions
db *sql.DB
ctx context.Context
}
// NewReader creates a new SQLite reader
func NewReader(options *readers.ReaderOptions) *Reader {
return &Reader{
options: options,
ctx: context.Background(),
}
}
// ReadDatabase reads the entire database schema from SQLite
func (r *Reader) ReadDatabase() (*models.Database, error) {
// Validate file path or connection string
dbPath := r.options.FilePath
if dbPath == "" && r.options.ConnectionString != "" {
dbPath = r.options.ConnectionString
}
if dbPath == "" {
return nil, fmt.Errorf("file path or connection string is required")
}
// Connect to the database
if err := r.connect(dbPath); err != nil {
return nil, fmt.Errorf("failed to connect: %w", err)
}
defer r.close()
// Get database name from file path
dbName := filepath.Base(dbPath)
if dbName == "" {
dbName = "sqlite"
}
// Initialize database model
db := models.InitDatabase(dbName)
db.DatabaseType = models.SqlLiteDatabaseType
db.SourceFormat = "sqlite"
// Get SQLite version
var version string
err := r.db.QueryRowContext(r.ctx, "SELECT sqlite_version()").Scan(&version)
if err == nil {
db.DatabaseVersion = version
}
// SQLite doesn't have schemas, so we create a single "main" schema
schema := models.InitSchema("main")
schema.RefDatabase = db
// Query tables
tables, err := r.queryTables()
if err != nil {
return nil, fmt.Errorf("failed to query tables: %w", err)
}
schema.Tables = tables
// Query views
views, err := r.queryViews()
if err != nil {
return nil, fmt.Errorf("failed to query views: %w", err)
}
schema.Views = views
// Query columns for tables and views
for _, table := range schema.Tables {
columns, err := r.queryColumns(table.Name)
if err != nil {
return nil, fmt.Errorf("failed to query columns for table %s: %w", table.Name, err)
}
table.Columns = columns
table.RefSchema = schema
// Query primary key
pk, err := r.queryPrimaryKey(table.Name)
if err != nil {
return nil, fmt.Errorf("failed to query primary key for table %s: %w", table.Name, err)
}
if pk != nil {
table.Constraints[pk.Name] = pk
// Mark columns as primary key and not null
for _, colName := range pk.Columns {
if col, exists := table.Columns[colName]; exists {
col.IsPrimaryKey = true
col.NotNull = true
}
}
}
// Query foreign keys
foreignKeys, err := r.queryForeignKeys(table.Name)
if err != nil {
return nil, fmt.Errorf("failed to query foreign keys for table %s: %w", table.Name, err)
}
for _, fk := range foreignKeys {
table.Constraints[fk.Name] = fk
// Derive relationship from foreign key
r.deriveRelationship(table, fk)
}
// Query indexes
indexes, err := r.queryIndexes(table.Name)
if err != nil {
return nil, fmt.Errorf("failed to query indexes for table %s: %w", table.Name, err)
}
for _, idx := range indexes {
table.Indexes[idx.Name] = idx
}
}
// Query columns for views
for _, view := range schema.Views {
columns, err := r.queryColumns(view.Name)
if err != nil {
return nil, fmt.Errorf("failed to query columns for view %s: %w", view.Name, err)
}
view.Columns = columns
view.RefSchema = schema
}
// Add schema to database
db.Schemas = append(db.Schemas, schema)
return db, nil
}
// ReadSchema reads a single schema (returns the main schema from the database)
func (r *Reader) ReadSchema() (*models.Schema, error) {
db, err := r.ReadDatabase()
if err != nil {
return nil, err
}
if len(db.Schemas) == 0 {
return nil, fmt.Errorf("no schemas found in database")
}
return db.Schemas[0], nil
}
// ReadTable reads a single table (returns the first table from the schema)
func (r *Reader) ReadTable() (*models.Table, error) {
schema, err := r.ReadSchema()
if err != nil {
return nil, err
}
if len(schema.Tables) == 0 {
return nil, fmt.Errorf("no tables found in schema")
}
return schema.Tables[0], nil
}
// connect establishes a connection to the SQLite database
func (r *Reader) connect(dbPath string) error {
db, err := sql.Open("sqlite", dbPath)
if err != nil {
return err
}
r.db = db
return nil
}
// close closes the database connection
func (r *Reader) close() {
if r.db != nil {
r.db.Close()
}
}
// mapDataType maps SQLite data types to canonical types
func (r *Reader) mapDataType(sqliteType string) string {
// SQLite has a flexible type system, but we map common types
typeMap := map[string]string{
"INTEGER": "int",
"INT": "int",
"TINYINT": "int8",
"SMALLINT": "int16",
"MEDIUMINT": "int",
"BIGINT": "int64",
"UNSIGNED BIG INT": "uint64",
"INT2": "int16",
"INT8": "int64",
"REAL": "float64",
"DOUBLE": "float64",
"DOUBLE PRECISION": "float64",
"FLOAT": "float32",
"NUMERIC": "decimal",
"DECIMAL": "decimal",
"BOOLEAN": "bool",
"BOOL": "bool",
"DATE": "date",
"DATETIME": "timestamp",
"TIMESTAMP": "timestamp",
"TEXT": "string",
"VARCHAR": "string",
"CHAR": "string",
"CHARACTER": "string",
"VARYING CHARACTER": "string",
"NCHAR": "string",
"NVARCHAR": "string",
"CLOB": "text",
"BLOB": "bytea",
}
// Try exact match first
if mapped, exists := typeMap[sqliteType]; exists {
return mapped
}
// Try case-insensitive match for common types
sqliteTypeUpper := sqliteType
if len(sqliteType) > 0 {
// Extract base type (e.g., "VARCHAR(255)" -> "VARCHAR")
for baseType := range typeMap {
if len(sqliteTypeUpper) >= len(baseType) && sqliteTypeUpper[:len(baseType)] == baseType {
return typeMap[baseType]
}
}
}
// Default to string for unknown types
return "string"
}
// deriveRelationship creates a relationship from a foreign key constraint
func (r *Reader) deriveRelationship(table *models.Table, fk *models.Constraint) {
relationshipName := fmt.Sprintf("%s_to_%s", table.Name, fk.ReferencedTable)
relationship := models.InitRelationship(relationshipName, models.OneToMany)
relationship.FromTable = table.Name
relationship.FromSchema = table.Schema
relationship.ToTable = fk.ReferencedTable
relationship.ToSchema = fk.ReferencedSchema
relationship.ForeignKey = fk.Name
// Store constraint actions in properties
if fk.OnDelete != "" {
relationship.Properties["on_delete"] = fk.OnDelete
}
if fk.OnUpdate != "" {
relationship.Properties["on_update"] = fk.OnUpdate
}
table.Relationships[relationshipName] = relationship
}

View File

@@ -0,0 +1,334 @@
package sqlite
import (
"database/sql"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
)
// setupTestDatabase creates a temporary SQLite database with test data
func setupTestDatabase(t *testing.T) string {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "test.db")
db, err := sql.Open("sqlite", dbPath)
require.NoError(t, err)
defer db.Close()
// Create test schema
schema := `
PRAGMA foreign_keys = ON;
CREATE TABLE users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username VARCHAR(50) NOT NULL UNIQUE,
email VARCHAR(100) NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE posts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
title VARCHAR(200) NOT NULL,
content TEXT,
published BOOLEAN DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
);
CREATE TABLE comments (
id INTEGER PRIMARY KEY AUTOINCREMENT,
post_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
comment TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
);
CREATE INDEX idx_posts_user_id ON posts(user_id);
CREATE INDEX idx_comments_post_id ON comments(post_id);
CREATE UNIQUE INDEX idx_users_email ON users(email);
CREATE VIEW user_post_count AS
SELECT u.id, u.username, COUNT(p.id) as post_count
FROM users u
LEFT JOIN posts p ON u.id = p.user_id
GROUP BY u.id, u.username;
`
_, err = db.Exec(schema)
require.NoError(t, err)
return dbPath
}
func TestReader_ReadDatabase(t *testing.T) {
dbPath := setupTestDatabase(t)
defer os.Remove(dbPath)
options := &readers.ReaderOptions{
FilePath: dbPath,
}
reader := NewReader(options)
db, err := reader.ReadDatabase()
require.NoError(t, err)
require.NotNil(t, db)
// Check database metadata
assert.Equal(t, "test.db", db.Name)
assert.Equal(t, models.SqlLiteDatabaseType, db.DatabaseType)
assert.Equal(t, "sqlite", db.SourceFormat)
assert.NotEmpty(t, db.DatabaseVersion)
// Check schemas (SQLite should have a single "main" schema)
require.Len(t, db.Schemas, 1)
schema := db.Schemas[0]
assert.Equal(t, "main", schema.Name)
// Check tables
assert.Len(t, schema.Tables, 3)
tableNames := make([]string, len(schema.Tables))
for i, table := range schema.Tables {
tableNames[i] = table.Name
}
assert.Contains(t, tableNames, "users")
assert.Contains(t, tableNames, "posts")
assert.Contains(t, tableNames, "comments")
// Check views
assert.Len(t, schema.Views, 1)
assert.Equal(t, "user_post_count", schema.Views[0].Name)
assert.NotEmpty(t, schema.Views[0].Definition)
}
func TestReader_ReadTable_Users(t *testing.T) {
dbPath := setupTestDatabase(t)
defer os.Remove(dbPath)
options := &readers.ReaderOptions{
FilePath: dbPath,
}
reader := NewReader(options)
db, err := reader.ReadDatabase()
require.NoError(t, err)
require.NotNil(t, db)
// Find users table
var usersTable *models.Table
for _, table := range db.Schemas[0].Tables {
if table.Name == "users" {
usersTable = table
break
}
}
require.NotNil(t, usersTable)
assert.Equal(t, "users", usersTable.Name)
assert.Equal(t, "main", usersTable.Schema)
// Check columns
assert.Len(t, usersTable.Columns, 4)
// Check id column
idCol, exists := usersTable.Columns["id"]
require.True(t, exists)
assert.Equal(t, "int", idCol.Type)
assert.True(t, idCol.IsPrimaryKey)
assert.True(t, idCol.AutoIncrement)
assert.True(t, idCol.NotNull)
// Check username column
usernameCol, exists := usersTable.Columns["username"]
require.True(t, exists)
assert.Equal(t, "string", usernameCol.Type)
assert.True(t, usernameCol.NotNull)
assert.False(t, usernameCol.IsPrimaryKey)
// Check email column
emailCol, exists := usersTable.Columns["email"]
require.True(t, exists)
assert.Equal(t, "string", emailCol.Type)
assert.True(t, emailCol.NotNull)
// Check primary key constraint
assert.Len(t, usersTable.Constraints, 1)
pkConstraint, exists := usersTable.Constraints["users_pkey"]
require.True(t, exists)
assert.Equal(t, models.PrimaryKeyConstraint, pkConstraint.Type)
assert.Equal(t, []string{"id"}, pkConstraint.Columns)
// Check indexes (should have unique index on email and username)
assert.GreaterOrEqual(t, len(usersTable.Indexes), 1)
}
func TestReader_ReadTable_Posts(t *testing.T) {
dbPath := setupTestDatabase(t)
defer os.Remove(dbPath)
options := &readers.ReaderOptions{
FilePath: dbPath,
}
reader := NewReader(options)
db, err := reader.ReadDatabase()
require.NoError(t, err)
require.NotNil(t, db)
// Find posts table
var postsTable *models.Table
for _, table := range db.Schemas[0].Tables {
if table.Name == "posts" {
postsTable = table
break
}
}
require.NotNil(t, postsTable)
// Check columns
assert.Len(t, postsTable.Columns, 6)
// Check foreign key constraint
hasForeignKey := false
for _, constraint := range postsTable.Constraints {
if constraint.Type == models.ForeignKeyConstraint {
hasForeignKey = true
assert.Equal(t, "users", constraint.ReferencedTable)
assert.Equal(t, "CASCADE", constraint.OnDelete)
}
}
assert.True(t, hasForeignKey, "Posts table should have a foreign key constraint")
// Check relationships
assert.GreaterOrEqual(t, len(postsTable.Relationships), 1)
// Check indexes
hasUserIdIndex := false
for _, index := range postsTable.Indexes {
if index.Name == "idx_posts_user_id" {
hasUserIdIndex = true
assert.Contains(t, index.Columns, "user_id")
}
}
assert.True(t, hasUserIdIndex, "Posts table should have idx_posts_user_id index")
}
func TestReader_ReadTable_Comments(t *testing.T) {
dbPath := setupTestDatabase(t)
defer os.Remove(dbPath)
options := &readers.ReaderOptions{
FilePath: dbPath,
}
reader := NewReader(options)
db, err := reader.ReadDatabase()
require.NoError(t, err)
require.NotNil(t, db)
// Find comments table
var commentsTable *models.Table
for _, table := range db.Schemas[0].Tables {
if table.Name == "comments" {
commentsTable = table
break
}
}
require.NotNil(t, commentsTable)
// Check foreign key constraints (should have 2)
fkCount := 0
for _, constraint := range commentsTable.Constraints {
if constraint.Type == models.ForeignKeyConstraint {
fkCount++
}
}
assert.Equal(t, 2, fkCount, "Comments table should have 2 foreign key constraints")
}
func TestReader_ReadSchema(t *testing.T) {
dbPath := setupTestDatabase(t)
defer os.Remove(dbPath)
options := &readers.ReaderOptions{
FilePath: dbPath,
}
reader := NewReader(options)
schema, err := reader.ReadSchema()
require.NoError(t, err)
require.NotNil(t, schema)
assert.Equal(t, "main", schema.Name)
assert.Len(t, schema.Tables, 3)
assert.Len(t, schema.Views, 1)
}
func TestReader_ReadTable(t *testing.T) {
dbPath := setupTestDatabase(t)
defer os.Remove(dbPath)
options := &readers.ReaderOptions{
FilePath: dbPath,
}
reader := NewReader(options)
table, err := reader.ReadTable()
require.NoError(t, err)
require.NotNil(t, table)
assert.NotEmpty(t, table.Name)
assert.NotEmpty(t, table.Columns)
}
func TestReader_ConnectionString(t *testing.T) {
dbPath := setupTestDatabase(t)
defer os.Remove(dbPath)
options := &readers.ReaderOptions{
ConnectionString: dbPath,
}
reader := NewReader(options)
db, err := reader.ReadDatabase()
require.NoError(t, err)
require.NotNil(t, db)
assert.Len(t, db.Schemas, 1)
}
func TestReader_InvalidPath(t *testing.T) {
options := &readers.ReaderOptions{
FilePath: "/nonexistent/path/to/database.db",
}
reader := NewReader(options)
_, err := reader.ReadDatabase()
assert.Error(t, err)
}
func TestReader_MissingPath(t *testing.T) {
options := &readers.ReaderOptions{}
reader := NewReader(options)
_, err := reader.ReadDatabase()
assert.Error(t, err)
assert.Contains(t, err.Error(), "file path or connection string is required")
}

36
pkg/reflectutil/doc.go Normal file
View File

@@ -0,0 +1,36 @@
// Package reflectutil provides reflection utilities for analyzing Go code structures.
//
// # Overview
//
// The reflectutil package offers helper functions for working with Go's reflection
// capabilities, particularly for parsing Go struct definitions and extracting type
// information. This is used by readers that parse ORM model files.
//
// # Features
//
// - Struct tag parsing and extraction
// - Type information analysis
// - Field metadata extraction
// - ORM tag interpretation (GORM, Bun, etc.)
//
// # Usage
//
// This package is primarily used internally by readers like GORM and Bun to parse
// Go struct definitions and convert them to database schema models.
//
// // Example: Parse struct tags
// tags := reflectutil.ParseStructTags(field)
// columnName := tags.Get("db")
//
// # Supported ORM Tags
//
// The package understands tag conventions from:
// - GORM (gorm tag)
// - Bun (bun tag)
// - Standard database/sql (db tag)
//
// # Purpose
//
// This package enables RelSpec to read existing ORM models and convert them to
// a unified schema representation for transformation to other formats.
package reflectutil

326
pkg/reflectutil/helpers.go Normal file
View File

@@ -0,0 +1,326 @@
package reflectutil
import (
"reflect"
"strings"
)
// Deref dereferences pointers until it reaches a non-pointer value
// Returns the dereferenced value and true if successful, or the original value and false if nil
func Deref(v reflect.Value) (reflect.Value, bool) {
for v.Kind() == reflect.Ptr {
if v.IsNil() {
return v, false
}
v = v.Elem()
}
return v, true
}
// DerefInterface dereferences an interface{} until it reaches a non-pointer value
func DerefInterface(i interface{}) reflect.Value {
v := reflect.ValueOf(i)
v, _ = Deref(v)
return v
}
// GetFieldValue extracts a field value from a struct, map, or pointer
// Returns nil if the field doesn't exist or can't be accessed
func GetFieldValue(item interface{}, field string) interface{} {
v := reflect.ValueOf(item)
v, ok := Deref(v)
if !ok {
return nil
}
switch v.Kind() {
case reflect.Struct:
fieldVal := v.FieldByName(field)
if fieldVal.IsValid() {
return fieldVal.Interface()
}
return nil
case reflect.Map:
keyVal := reflect.ValueOf(field)
mapVal := v.MapIndex(keyVal)
if mapVal.IsValid() {
return mapVal.Interface()
}
return nil
default:
return nil
}
}
// IsSliceOrArray checks if an interface{} is a slice or array
func IsSliceOrArray(i interface{}) bool {
v := reflect.ValueOf(i)
v, ok := Deref(v)
if !ok {
return false
}
k := v.Kind()
return k == reflect.Slice || k == reflect.Array
}
// IsMap checks if an interface{} is a map
func IsMap(i interface{}) bool {
v := reflect.ValueOf(i)
v, ok := Deref(v)
if !ok {
return false
}
return v.Kind() == reflect.Map
}
// SliceLen returns the length of a slice/array, or 0 if not a slice/array
func SliceLen(i interface{}) int {
v := reflect.ValueOf(i)
v, ok := Deref(v)
if !ok {
return 0
}
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
return 0
}
return v.Len()
}
// MapLen returns the length of a map, or 0 if not a map
func MapLen(i interface{}) int {
v := reflect.ValueOf(i)
v, ok := Deref(v)
if !ok {
return 0
}
if v.Kind() != reflect.Map {
return 0
}
return v.Len()
}
// SliceToInterfaces converts a slice/array to []interface{}
// Returns empty slice if not a slice/array
func SliceToInterfaces(i interface{}) []interface{} {
v := reflect.ValueOf(i)
v, ok := Deref(v)
if !ok {
return []interface{}{}
}
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
return []interface{}{}
}
result := make([]interface{}, v.Len())
for i := 0; i < v.Len(); i++ {
result[i] = v.Index(i).Interface()
}
return result
}
// MapKeys returns all keys from a map as []interface{}
// Returns empty slice if not a map
func MapKeys(i interface{}) []interface{} {
v := reflect.ValueOf(i)
v, ok := Deref(v)
if !ok {
return []interface{}{}
}
if v.Kind() != reflect.Map {
return []interface{}{}
}
keys := v.MapKeys()
result := make([]interface{}, len(keys))
for i, key := range keys {
result[i] = key.Interface()
}
return result
}
// MapValues returns all values from a map as []interface{}
// Returns empty slice if not a map
func MapValues(i interface{}) []interface{} {
v := reflect.ValueOf(i)
v, ok := Deref(v)
if !ok {
return []interface{}{}
}
if v.Kind() != reflect.Map {
return []interface{}{}
}
result := make([]interface{}, 0, v.Len())
iter := v.MapRange()
for iter.Next() {
result = append(result, iter.Value().Interface())
}
return result
}
// MapGet safely gets a value from a map by key
// Returns nil if key doesn't exist or not a map
func MapGet(m interface{}, key interface{}) interface{} {
v := reflect.ValueOf(m)
v, ok := Deref(v)
if !ok {
return nil
}
if v.Kind() != reflect.Map {
return nil
}
keyVal := reflect.ValueOf(key)
mapVal := v.MapIndex(keyVal)
if mapVal.IsValid() {
return mapVal.Interface()
}
return nil
}
// SliceIndex safely gets an element from a slice/array by index
// Returns nil if index out of bounds or not a slice/array
func SliceIndex(slice interface{}, index int) interface{} {
v := reflect.ValueOf(slice)
v, ok := Deref(v)
if !ok {
return nil
}
if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
return nil
}
if index < 0 || index >= v.Len() {
return nil
}
return v.Index(index).Interface()
}
// CompareValues compares two values for sorting
// Returns -1 if a < b, 0 if a == b, 1 if a > b
func CompareValues(a, b interface{}) int {
if a == nil && b == nil {
return 0
}
if a == nil {
return -1
}
if b == nil {
return 1
}
va := reflect.ValueOf(a)
vb := reflect.ValueOf(b)
// Handle different types
switch va.Kind() {
case reflect.String:
if vb.Kind() == reflect.String {
as := va.String()
bs := vb.String()
if as < bs {
return -1
} else if as > bs {
return 1
}
return 0
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if vb.Kind() >= reflect.Int && vb.Kind() <= reflect.Int64 {
ai := va.Int()
bi := vb.Int()
if ai < bi {
return -1
} else if ai > bi {
return 1
}
return 0
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if vb.Kind() >= reflect.Uint && vb.Kind() <= reflect.Uint64 {
au := va.Uint()
bu := vb.Uint()
if au < bu {
return -1
} else if au > bu {
return 1
}
return 0
}
case reflect.Float32, reflect.Float64:
if vb.Kind() == reflect.Float32 || vb.Kind() == reflect.Float64 {
af := va.Float()
bf := vb.Float()
if af < bf {
return -1
} else if af > bf {
return 1
}
return 0
}
}
return 0
}
// GetNestedValue gets a nested value using dot notation path
// Example: GetNestedValue(obj, "database.schema.table")
func GetNestedValue(m interface{}, path string) interface{} {
if path == "" {
return m
}
parts := strings.Split(path, ".")
current := m
for _, part := range parts {
if current == nil {
return nil
}
v := reflect.ValueOf(current)
v, ok := Deref(v)
if !ok {
return nil
}
switch v.Kind() {
case reflect.Map:
keyVal := reflect.ValueOf(part)
mapVal := v.MapIndex(keyVal)
if !mapVal.IsValid() {
return nil
}
current = mapVal.Interface()
case reflect.Struct:
fieldVal := v.FieldByName(part)
if !fieldVal.IsValid() {
return nil
}
current = fieldVal.Interface()
default:
return nil
}
}
return current
}
// DeepEqual performs a deep equality check between two values
func DeepEqual(a, b interface{}) bool {
return reflect.DeepEqual(a, b)
}

View File

@@ -0,0 +1,490 @@
package reflectutil
import (
"reflect"
"testing"
)
type testStruct struct {
Name string
Age int
Active bool
Nested *nestedStruct
Private string
}
type nestedStruct struct {
Value string
Count int
}
func TestDeref(t *testing.T) {
tests := []struct {
name string
input interface{}
wantValid bool
wantKind reflect.Kind
}{
{
name: "non-pointer int",
input: 42,
wantValid: true,
wantKind: reflect.Int,
},
{
name: "single pointer",
input: ptrInt(42),
wantValid: true,
wantKind: reflect.Int,
},
{
name: "double pointer",
input: ptrPtr(ptrInt(42)),
wantValid: true,
wantKind: reflect.Int,
},
{
name: "nil pointer",
input: (*int)(nil),
wantValid: false,
wantKind: reflect.Ptr,
},
{
name: "string",
input: "test",
wantValid: true,
wantKind: reflect.String,
},
{
name: "struct",
input: testStruct{Name: "test"},
wantValid: true,
wantKind: reflect.Struct,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
v := reflect.ValueOf(tt.input)
got, valid := Deref(v)
if valid != tt.wantValid {
t.Errorf("Deref() valid = %v, want %v", valid, tt.wantValid)
}
if got.Kind() != tt.wantKind {
t.Errorf("Deref() kind = %v, want %v", got.Kind(), tt.wantKind)
}
})
}
}
func TestDerefInterface(t *testing.T) {
i := 42
pi := &i
ppi := &pi
tests := []struct {
name string
input interface{}
wantKind reflect.Kind
}{
{"int", 42, reflect.Int},
{"pointer to int", &i, reflect.Int},
{"double pointer to int", ppi, reflect.Int},
{"string", "test", reflect.String},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := DerefInterface(tt.input)
if got.Kind() != tt.wantKind {
t.Errorf("DerefInterface() kind = %v, want %v", got.Kind(), tt.wantKind)
}
})
}
}
func TestGetFieldValue(t *testing.T) {
ts := testStruct{
Name: "John",
Age: 30,
Active: true,
Nested: &nestedStruct{Value: "nested", Count: 5},
}
tests := []struct {
name string
item interface{}
field string
want interface{}
}{
{"struct field Name", ts, "Name", "John"},
{"struct field Age", ts, "Age", 30},
{"struct field Active", ts, "Active", true},
{"struct non-existent field", ts, "NonExistent", nil},
{"pointer to struct", &ts, "Name", "John"},
{"map string key", map[string]string{"key": "value"}, "key", "value"},
{"map int key", map[string]int{"count": 42}, "count", 42},
{"map non-existent key", map[string]string{"key": "value"}, "missing", nil},
{"nil pointer", (*testStruct)(nil), "Name", nil},
{"non-struct non-map", 42, "field", nil},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := GetFieldValue(tt.item, tt.field)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetFieldValue() = %v, want %v", got, tt.want)
}
})
}
}
func TestIsSliceOrArray(t *testing.T) {
arr := [3]int{1, 2, 3}
tests := []struct {
name string
input interface{}
want bool
}{
{"slice", []int{1, 2, 3}, true},
{"array", arr, true},
{"pointer to slice", &[]int{1, 2, 3}, true},
{"string", "test", false},
{"int", 42, false},
{"map", map[string]int{}, false},
{"nil slice", ([]int)(nil), true}, // nil slice is still Kind==Slice
{"nil pointer", (*[]int)(nil), false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := IsSliceOrArray(tt.input)
if got != tt.want {
t.Errorf("IsSliceOrArray() = %v, want %v", got, tt.want)
}
})
}
}
func TestIsMap(t *testing.T) {
tests := []struct {
name string
input interface{}
want bool
}{
{"map[string]int", map[string]int{"a": 1}, true},
{"map[int]string", map[int]string{1: "a"}, true},
{"pointer to map", &map[string]int{"a": 1}, true},
{"slice", []int{1, 2, 3}, false},
{"string", "test", false},
{"int", 42, false},
{"nil map", (map[string]int)(nil), true}, // nil map is still Kind==Map
{"nil pointer", (*map[string]int)(nil), false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := IsMap(tt.input)
if got != tt.want {
t.Errorf("IsMap() = %v, want %v", got, tt.want)
}
})
}
}
func TestSliceLen(t *testing.T) {
arr := [3]int{1, 2, 3}
tests := []struct {
name string
input interface{}
want int
}{
{"slice length 3", []int{1, 2, 3}, 3},
{"empty slice", []int{}, 0},
{"array length 3", arr, 3},
{"pointer to slice", &[]int{1, 2, 3}, 3},
{"not a slice", "test", 0},
{"int", 42, 0},
{"nil slice", ([]int)(nil), 0},
{"nil pointer", (*[]int)(nil), 0},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SliceLen(tt.input)
if got != tt.want {
t.Errorf("SliceLen() = %v, want %v", got, tt.want)
}
})
}
}
func TestMapLen(t *testing.T) {
tests := []struct {
name string
input interface{}
want int
}{
{"map length 2", map[string]int{"a": 1, "b": 2}, 2},
{"empty map", map[string]int{}, 0},
{"pointer to map", &map[string]int{"a": 1}, 1},
{"not a map", []int{1, 2, 3}, 0},
{"string", "test", 0},
{"nil map", (map[string]int)(nil), 0},
{"nil pointer", (*map[string]int)(nil), 0},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := MapLen(tt.input)
if got != tt.want {
t.Errorf("MapLen() = %v, want %v", got, tt.want)
}
})
}
}
func TestSliceToInterfaces(t *testing.T) {
tests := []struct {
name string
input interface{}
want []interface{}
}{
{"int slice", []int{1, 2, 3}, []interface{}{1, 2, 3}},
{"string slice", []string{"a", "b"}, []interface{}{"a", "b"}},
{"empty slice", []int{}, []interface{}{}},
{"pointer to slice", &[]int{1, 2}, []interface{}{1, 2}},
{"not a slice", "test", []interface{}{}},
{"nil slice", ([]int)(nil), []interface{}{}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SliceToInterfaces(tt.input)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("SliceToInterfaces() = %v, want %v", got, tt.want)
}
})
}
}
func TestMapKeys(t *testing.T) {
tests := []struct {
name string
input interface{}
want []interface{}
}{
{"map with keys", map[string]int{"a": 1, "b": 2}, []interface{}{"a", "b"}},
{"empty map", map[string]int{}, []interface{}{}},
{"not a map", []int{1, 2, 3}, []interface{}{}},
{"nil map", (map[string]int)(nil), []interface{}{}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := MapKeys(tt.input)
if len(got) != len(tt.want) {
t.Errorf("MapKeys() length = %v, want %v", len(got), len(tt.want))
}
// For maps, order is not guaranteed, so just check length
})
}
}
func TestMapValues(t *testing.T) {
tests := []struct {
name string
input interface{}
want int // length of values
}{
{"map with values", map[string]int{"a": 1, "b": 2}, 2},
{"empty map", map[string]int{}, 0},
{"not a map", []int{1, 2, 3}, 0},
{"nil map", (map[string]int)(nil), 0},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := MapValues(tt.input)
if len(got) != tt.want {
t.Errorf("MapValues() length = %v, want %v", len(got), tt.want)
}
})
}
}
func TestMapGet(t *testing.T) {
m := map[string]int{"a": 1, "b": 2}
tests := []struct {
name string
input interface{}
key interface{}
want interface{}
}{
{"existing key", m, "a", 1},
{"existing key b", m, "b", 2},
{"non-existing key", m, "c", nil},
{"pointer to map", &m, "a", 1},
{"not a map", []int{1, 2}, 0, nil},
{"nil map", (map[string]int)(nil), "a", nil},
{"nil pointer", (*map[string]int)(nil), "a", nil},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := MapGet(tt.input, tt.key)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("MapGet() = %v, want %v", got, tt.want)
}
})
}
}
func TestSliceIndex(t *testing.T) {
s := []int{10, 20, 30}
tests := []struct {
name string
slice interface{}
index int
want interface{}
}{
{"index 0", s, 0, 10},
{"index 1", s, 1, 20},
{"index 2", s, 2, 30},
{"negative index", s, -1, nil},
{"out of bounds", s, 5, nil},
{"pointer to slice", &s, 1, 20},
{"not a slice", "test", 0, nil},
{"nil slice", ([]int)(nil), 0, nil},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := SliceIndex(tt.slice, tt.index)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("SliceIndex() = %v, want %v", got, tt.want)
}
})
}
}
func TestCompareValues(t *testing.T) {
tests := []struct {
name string
a interface{}
b interface{}
want int
}{
{"both nil", nil, nil, 0},
{"a nil", nil, 5, -1},
{"b nil", 5, nil, 1},
{"equal strings", "abc", "abc", 0},
{"a less than b strings", "abc", "xyz", -1},
{"a greater than b strings", "xyz", "abc", 1},
{"equal ints", 5, 5, 0},
{"a less than b ints", 3, 7, -1},
{"a greater than b ints", 10, 5, 1},
{"equal floats", 3.14, 3.14, 0},
{"a less than b floats", 2.5, 5.5, -1},
{"a greater than b floats", 10.5, 5.5, 1},
{"equal uints", uint(5), uint(5), 0},
{"different types", "abc", 123, 0},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := CompareValues(tt.a, tt.b)
if got != tt.want {
t.Errorf("CompareValues(%v, %v) = %v, want %v", tt.a, tt.b, got, tt.want)
}
})
}
}
func TestGetNestedValue(t *testing.T) {
nested := map[string]interface{}{
"level1": map[string]interface{}{
"level2": map[string]interface{}{
"value": "deep",
},
},
}
ts := testStruct{
Name: "John",
Nested: &nestedStruct{
Value: "nested value",
Count: 42,
},
}
tests := []struct {
name string
input interface{}
path string
want interface{}
}{
{"empty path", nested, "", nested},
{"single level map", nested, "level1", nested["level1"]},
{"nested map", nested, "level1.level2", map[string]interface{}{"value": "deep"}},
{"deep nested map", nested, "level1.level2.value", "deep"},
{"struct field", ts, "Name", "John"},
{"nested struct field", ts, "Nested", ts.Nested},
{"non-existent path", nested, "missing.path", nil},
{"nil input", nil, "path", nil},
{"partial missing path", nested, "level1.missing", nil},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := GetNestedValue(tt.input, tt.path)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetNestedValue() = %v, want %v", got, tt.want)
}
})
}
}
func TestDeepEqual(t *testing.T) {
tests := []struct {
name string
a interface{}
b interface{}
want bool
}{
{"equal ints", 42, 42, true},
{"different ints", 42, 43, false},
{"equal strings", "test", "test", true},
{"different strings", "test", "other", false},
{"equal slices", []int{1, 2, 3}, []int{1, 2, 3}, true},
{"different slices", []int{1, 2, 3}, []int{1, 2, 4}, false},
{"equal maps", map[string]int{"a": 1}, map[string]int{"a": 1}, true},
{"different maps", map[string]int{"a": 1}, map[string]int{"a": 2}, false},
{"both nil", nil, nil, true},
{"one nil", nil, 42, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := DeepEqual(tt.a, tt.b)
if got != tt.want {
t.Errorf("DeepEqual(%v, %v) = %v, want %v", tt.a, tt.b, got, tt.want)
}
})
}
}
// Helper functions
func ptrInt(i int) *int {
return &i
}
func ptrPtr(p *int) **int {
return &p
}

34
pkg/transform/doc.go Normal file
View File

@@ -0,0 +1,34 @@
// Package transform provides validation and transformation utilities for database models.
//
// # Overview
//
// The transform package contains a Transformer type that provides methods for validating
// and normalizing database schemas. It ensures schema correctness and consistency across
// different format conversions.
//
// # Features
//
// - Database validation (structure and naming conventions)
// - Schema validation (completeness and integrity)
// - Table validation (column definitions and constraints)
// - Data type normalization
//
// # Usage
//
// transformer := transform.NewTransformer()
// err := transformer.ValidateDatabase(db)
// if err != nil {
// log.Fatal("Invalid database schema:", err)
// }
//
// # Validation Scope
//
// The transformer validates:
// - Required fields presence
// - Naming convention adherence
// - Data type compatibility
// - Constraint consistency
// - Relationship integrity
//
// Note: Some validation methods are currently stubs and will be implemented as needed.
package transform

95
pkg/ui/column_dataops.go Normal file
View File

@@ -0,0 +1,95 @@
package ui
import "git.warky.dev/wdevs/relspecgo/pkg/models"
// Column data operations - business logic for column management
// CreateColumn creates a new column and adds it to a table
func (se *SchemaEditor) CreateColumn(schemaIndex, tableIndex int, name, dataType string, isPrimaryKey, isNotNull bool) *models.Column {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return nil
}
if table.Columns == nil {
table.Columns = make(map[string]*models.Column)
}
newColumn := &models.Column{
Name: name,
Type: dataType,
IsPrimaryKey: isPrimaryKey,
NotNull: isNotNull,
}
table.UpdateDate()
table.Columns[name] = newColumn
return newColumn
}
// UpdateColumn updates an existing column's properties
func (se *SchemaEditor) UpdateColumn(schemaIndex, tableIndex int, oldName, newName, dataType string, isPrimaryKey, isNotNull bool, defaultValue interface{}, description string) bool {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return false
}
column, exists := table.Columns[oldName]
if !exists {
return false
}
table.UpdateDate()
// If name changed, remove old entry and create new one
if oldName != newName {
delete(table.Columns, oldName)
column.Name = newName
table.Columns[newName] = column
}
// Update properties
column.Type = dataType
column.IsPrimaryKey = isPrimaryKey
column.NotNull = isNotNull
column.Default = defaultValue
column.Description = description
return true
}
// DeleteColumn removes a column from a table
func (se *SchemaEditor) DeleteColumn(schemaIndex, tableIndex int, columnName string) bool {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return false
}
if _, exists := table.Columns[columnName]; !exists {
return false
}
table.UpdateDate()
delete(table.Columns, columnName)
return true
}
// GetColumn returns a column by name
func (se *SchemaEditor) GetColumn(schemaIndex, tableIndex int, columnName string) *models.Column {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return nil
}
return table.Columns[columnName]
}
// GetAllColumns returns all columns in a table
func (se *SchemaEditor) GetAllColumns(schemaIndex, tableIndex int) map[string]*models.Column {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return nil
}
return table.Columns
}

214
pkg/ui/column_screens.go Normal file
View File

@@ -0,0 +1,214 @@
package ui
import (
"fmt"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// showColumnEditor shows editor for a specific column
func (se *SchemaEditor) showColumnEditor(schemaIndex, tableIndex, colIndex int, column *models.Column) {
form := tview.NewForm()
// Store original name to handle renames
originalName := column.Name
// Local variables to collect changes
newName := column.Name
newType := column.Type
newIsPK := column.IsPrimaryKey
newIsNotNull := column.NotNull
newDefault := column.Default
newDescription := column.Description
newGUID := column.GUID
// Column type options: PostgreSQL, MySQL, SQL Server, and common SQL types
columnTypes := []string{
// Numeric Types
"SMALLINT", "INTEGER", "BIGINT", "INT", "TINYINT", "FLOAT", "REAL", "DOUBLE PRECISION",
"DECIMAL(10,2)", "NUMERIC", "DECIMAL", "NUMERIC(10,2)",
// Character Types
"CHAR", "VARCHAR", "VARCHAR(255)", "TEXT", "NCHAR", "NVARCHAR", "NVARCHAR(255)",
// Boolean
"BOOLEAN", "BOOL", "BIT",
// Date/Time Types
"DATE", "TIME", "TIMESTAMP", "TIMESTAMP WITH TIME ZONE", "INTERVAL",
"DATETIME", "DATETIME2", "DATEFIRST",
// UUID and JSON
"UUID", "GUID", "JSON", "JSONB",
// Binary Types
"BYTEA", "BLOB", "IMAGE", "VARBINARY", "VARBINARY(MAX)", "BINARY",
// PostgreSQL Special Types
"int4range", "int8range", "numrange", "tsrange", "tstzrange", "daterange",
"HSTORE", "CITEXT", "INET", "MACADDR", "POINT", "LINE", "LSEG", "BOX", "PATH", "POLYGON", "CIRCLE",
// Array Types
"INTEGER ARRAY", "VARCHAR ARRAY", "TEXT ARRAY", "BIGINT ARRAY",
// MySQL Specific
"MEDIUMINT", "DOUBLE", "FLOAT(10,2)",
// SQL Server Specific
"MONEY", "SMALLMONEY", "SQL_VARIANT",
}
selectedTypeIndex := 0
// Add existing type if not already in the list
typeExists := false
for i, opt := range columnTypes {
if opt == column.Type {
selectedTypeIndex = i
typeExists = true
break
}
}
if !typeExists && column.Type != "" {
columnTypes = append(columnTypes, column.Type)
selectedTypeIndex = len(columnTypes) - 1
}
form.AddInputField("Column Name", column.Name, 40, nil, func(value string) {
newName = value
})
form.AddDropDown("Type", columnTypes, selectedTypeIndex, func(option string, index int) {
newType = option
})
form.AddCheckbox("Primary Key", column.IsPrimaryKey, func(checked bool) {
newIsPK = checked
})
form.AddCheckbox("Not Null", column.NotNull, func(checked bool) {
newIsNotNull = checked
})
defaultStr := ""
if column.Default != nil {
defaultStr = fmt.Sprintf("%v", column.Default)
}
form.AddInputField("Default Value", defaultStr, 40, nil, func(value string) {
newDefault = value
})
form.AddTextArea("Description", column.Description, 40, 5, 0, func(value string) {
newDescription = value
})
form.AddInputField("GUID", column.GUID, 40, nil, func(value string) {
newGUID = value
})
form.AddButton("Save", func() {
// Apply changes using dataops
se.UpdateColumn(schemaIndex, tableIndex, originalName, newName, newType, newIsPK, newIsNotNull, newDefault, newDescription)
se.db.Schemas[schemaIndex].Tables[tableIndex].Columns[newName].GUID = newGUID
se.pages.RemovePage("column-editor")
se.pages.SwitchToPage("table-editor")
})
form.AddButton("Delete", func() {
se.showDeleteColumnConfirm(schemaIndex, tableIndex, originalName)
})
form.AddButton("Back", func() {
// Discard changes - don't apply them
se.pages.RemovePage("column-editor")
se.pages.SwitchToPage("table-editor")
})
form.SetBorder(true).SetTitle(" Edit Column ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitConfirmation("column-editor", "table-editor")
return nil
}
return event
})
se.pages.AddPage("column-editor", form, true, true)
}
// showNewColumnDialog shows dialog to create a new column
func (se *SchemaEditor) showNewColumnDialog(schemaIndex, tableIndex int) {
form := tview.NewForm()
columnName := ""
dataType := "VARCHAR(255)"
// Column type options: PostgreSQL, MySQL, SQL Server, and common SQL types
columnTypes := []string{
// Numeric Types
"SMALLINT", "INTEGER", "BIGINT", "INT", "TINYINT", "FLOAT", "REAL", "DOUBLE PRECISION",
"DECIMAL(10,2)", "NUMERIC", "DECIMAL", "NUMERIC(10,2)",
// Character Types
"CHAR", "VARCHAR", "VARCHAR(255)", "TEXT", "NCHAR", "NVARCHAR", "NVARCHAR(255)",
// Boolean
"BOOLEAN", "BOOL", "BIT",
// Date/Time Types
"DATE", "TIME", "TIMESTAMP", "TIMESTAMP WITH TIME ZONE", "INTERVAL",
"DATETIME", "DATETIME2", "DATEFIRST",
// UUID and JSON
"UUID", "GUID", "JSON", "JSONB",
// Binary Types
"BYTEA", "BLOB", "IMAGE", "VARBINARY", "VARBINARY(MAX)", "BINARY",
// PostgreSQL Special Types
"int4range", "int8range", "numrange", "tsrange", "tstzrange", "daterange",
"HSTORE", "CITEXT", "INET", "MACADDR", "POINT", "LINE", "LSEG", "BOX", "PATH", "POLYGON", "CIRCLE",
// Array Types
"INTEGER ARRAY", "VARCHAR ARRAY", "TEXT ARRAY", "BIGINT ARRAY",
// MySQL Specific
"MEDIUMINT", "DOUBLE", "FLOAT(10,2)",
// SQL Server Specific
"MONEY", "SMALLMONEY", "SQL_VARIANT",
}
selectedTypeIndex := 0
form.AddInputField("Column Name", "", 40, nil, func(value string) {
columnName = value
})
form.AddDropDown("Data Type", columnTypes, selectedTypeIndex, func(option string, index int) {
dataType = option
})
form.AddCheckbox("Primary Key", false, nil)
form.AddCheckbox("Not Null", false, nil)
form.AddCheckbox("Unique", false, nil)
form.AddButton("Save", func() {
if columnName == "" {
return
}
// Get form values
isPK := form.GetFormItemByLabel("Primary Key").(*tview.Checkbox).IsChecked()
isNotNull := form.GetFormItemByLabel("Not Null").(*tview.Checkbox).IsChecked()
se.CreateColumn(schemaIndex, tableIndex, columnName, dataType, isPK, isNotNull)
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
se.pages.RemovePage("new-column")
se.pages.RemovePage("table-editor")
se.showTableEditor(schemaIndex, tableIndex, table)
})
form.AddButton("Back", func() {
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
se.pages.RemovePage("new-column")
se.pages.RemovePage("table-editor")
se.showTableEditor(schemaIndex, tableIndex, table)
})
form.SetBorder(true).SetTitle(" New Column ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitConfirmation("new-column", "table-editor")
return nil
}
return event
})
se.pages.AddPage("new-column", form, true, true)
}

View File

@@ -0,0 +1,15 @@
package ui
import (
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// updateDatabase updates database properties
func (se *SchemaEditor) updateDatabase(name, description, comment, dbType, dbVersion string) {
se.db.Name = name
se.db.Description = description
se.db.Comment = comment
se.db.DatabaseType = models.DatabaseType(dbType)
se.db.DatabaseVersion = dbVersion
se.db.UpdateDate()
}

View File

@@ -0,0 +1,78 @@
package ui
import (
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
)
// showEditDatabaseForm displays a dialog to edit database properties
func (se *SchemaEditor) showEditDatabaseForm() {
form := tview.NewForm()
dbName := se.db.Name
dbDescription := se.db.Description
dbComment := se.db.Comment
dbType := string(se.db.DatabaseType)
dbVersion := se.db.DatabaseVersion
dbGUID := se.db.GUID
// Database type options
dbTypeOptions := []string{"pgsql", "mssql", "sqlite"}
selectedTypeIndex := 0
for i, opt := range dbTypeOptions {
if opt == dbType {
selectedTypeIndex = i
break
}
}
form.AddInputField("Database Name", dbName, 40, nil, func(value string) {
dbName = value
})
form.AddInputField("Description", dbDescription, 50, nil, func(value string) {
dbDescription = value
})
form.AddInputField("Comment", dbComment, 50, nil, func(value string) {
dbComment = value
})
form.AddDropDown("Database Type", dbTypeOptions, selectedTypeIndex, func(option string, index int) {
dbType = option
})
form.AddInputField("Database Version", dbVersion, 20, nil, func(value string) {
dbVersion = value
})
form.AddInputField("GUID", dbGUID, 40, nil, func(value string) {
dbGUID = value
})
form.AddButton("Save", func() {
if dbName == "" {
return
}
se.updateDatabase(dbName, dbDescription, dbComment, dbType, dbVersion)
se.db.GUID = dbGUID
se.pages.RemovePage("edit-database")
se.pages.RemovePage("main")
se.pages.AddPage("main", se.createMainMenu(), true, true)
})
form.AddButton("Back", func() {
se.pages.RemovePage("edit-database")
})
form.SetBorder(true).SetTitle(" Edit Database ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitConfirmation("edit-database", "main")
return nil
}
return event
})
se.pages.AddPage("edit-database", form, true, true)
}

139
pkg/ui/dialogs.go Normal file
View File

@@ -0,0 +1,139 @@
package ui
import (
"fmt"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
)
// showExitConfirmation shows a confirmation dialog when trying to exit without saving
func (se *SchemaEditor) showExitConfirmation(pageToRemove, pageToSwitchTo string) {
modal := tview.NewModal().
SetText("Exit without saving changes?").
AddButtons([]string{"Cancel", "No, exit without saving"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "No, exit without saving" {
se.pages.RemovePage(pageToRemove)
se.pages.SwitchToPage(pageToSwitchTo)
}
se.pages.RemovePage("exit-confirm")
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("exit-confirm")
return nil
}
return event
})
se.pages.AddPage("exit-confirm", modal, true, true)
}
// showExitEditorConfirm shows confirmation dialog when trying to exit the entire editor
func (se *SchemaEditor) showExitEditorConfirm() {
modal := tview.NewModal().
SetText("Exit RelSpec Editor? Press ESC again to confirm.").
AddButtons([]string{"Cancel", "Exit"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "Exit" {
se.app.Stop()
}
se.pages.RemovePage("exit-editor-confirm")
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.app.Stop()
return nil
}
return event
})
se.pages.AddPage("exit-editor-confirm", modal, true, true)
}
// showDeleteSchemaConfirm shows confirmation dialog for schema deletion
func (se *SchemaEditor) showDeleteSchemaConfirm(schemaIndex int) {
modal := tview.NewModal().
SetText(fmt.Sprintf("Delete schema '%s'? This will delete all tables in this schema.",
se.db.Schemas[schemaIndex].Name)).
AddButtons([]string{"Cancel", "Delete"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "Delete" {
se.DeleteSchema(schemaIndex)
se.pages.RemovePage("schema-editor")
se.pages.RemovePage("schemas")
se.showSchemaList()
}
se.pages.RemovePage("confirm-delete-schema")
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("confirm-delete-schema")
return nil
}
return event
})
se.pages.AddPage("confirm-delete-schema", modal, true, true)
}
// showDeleteTableConfirm shows confirmation dialog for table deletion
func (se *SchemaEditor) showDeleteTableConfirm(schemaIndex, tableIndex int) {
table := se.db.Schemas[schemaIndex].Tables[tableIndex]
modal := tview.NewModal().
SetText(fmt.Sprintf("Delete table '%s'? This action cannot be undone.",
table.Name)).
AddButtons([]string{"Cancel", "Delete"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "Delete" {
se.DeleteTable(schemaIndex, tableIndex)
schema := se.db.Schemas[schemaIndex]
se.pages.RemovePage("table-editor")
se.pages.RemovePage("schema-editor")
se.showSchemaEditor(schemaIndex, schema)
}
se.pages.RemovePage("confirm-delete-table")
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("confirm-delete-table")
return nil
}
return event
})
se.pages.AddPage("confirm-delete-table", modal, true, true)
}
// showDeleteColumnConfirm shows confirmation dialog for column deletion
func (se *SchemaEditor) showDeleteColumnConfirm(schemaIndex, tableIndex int, columnName string) {
modal := tview.NewModal().
SetText(fmt.Sprintf("Delete column '%s'? This action cannot be undone.",
columnName)).
AddButtons([]string{"Cancel", "Delete"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "Delete" {
se.DeleteColumn(schemaIndex, tableIndex, columnName)
se.pages.RemovePage("column-editor")
se.pages.RemovePage("confirm-delete-column")
se.pages.SwitchToPage("table-editor")
} else {
se.pages.RemovePage("confirm-delete-column")
}
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("confirm-delete-column")
return nil
}
return event
})
se.pages.AddPage("confirm-delete-column", modal, true, true)
}

57
pkg/ui/doc.go Normal file
View File

@@ -0,0 +1,57 @@
// Package ui provides an interactive terminal user interface (TUI) for editing database schemas.
//
// # Overview
//
// The ui package implements a full-featured terminal-based schema editor using tview,
// allowing users to visually create, modify, and manage database schemas without writing
// code or SQL.
//
// # Features
//
// The schema editor supports:
// - Database management: Edit name, description, and properties
// - Schema management: Create, edit, delete schemas
// - Table management: Create, edit, delete tables
// - Column management: Add, modify, delete columns with full property support
// - Relationship management: Define and edit table relationships
// - Domain management: Organize tables into logical domains
// - Import & merge: Combine schemas from multiple sources
// - Save: Export to any supported format
//
// # Architecture
//
// The package is organized into several components:
// - editor.go: Main editor and application lifecycle
// - *_screens.go: UI screens for each entity type
// - *_dataops.go: Business logic and data operations
// - dialogs.go: Reusable dialog components
// - load_save_screens.go: File I/O and format selection
// - main_menu.go: Primary navigation menu
//
// # Usage
//
// editor := ui.NewSchemaEditor(database)
// if err := editor.Run(); err != nil {
// log.Fatal(err)
// }
//
// Or with pre-configured load/save options:
//
// editor := ui.NewSchemaEditorWithConfigs(database, loadConfig, saveConfig)
// if err := editor.Run(); err != nil {
// log.Fatal(err)
// }
//
// # Navigation
//
// - Arrow keys: Navigate between items
// - Enter: Select/edit item
// - Tab/Shift+Tab: Navigate between buttons
// - Escape: Go back/cancel
// - Letter shortcuts: Quick actions (e.g., 'n' for new, 'e' for edit, 'd' for delete)
//
// # Integration
//
// The editor integrates with all readers and writers, supporting load/save operations
// for any format supported by RelSpec (DBML, PostgreSQL, GORM, Prisma, etc.).
package ui

35
pkg/ui/domain_dataops.go Normal file
View File

@@ -0,0 +1,35 @@
package ui
import (
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// createDomain creates a new domain
func (se *SchemaEditor) createDomain(name, description string) {
domain := &models.Domain{
Name: name,
Description: description,
Tables: make([]*models.DomainTable, 0),
Sequence: uint(len(se.db.Domains)),
}
se.db.Domains = append(se.db.Domains, domain)
se.showDomainList()
}
// updateDomain updates an existing domain
func (se *SchemaEditor) updateDomain(index int, name, description string) {
if index >= 0 && index < len(se.db.Domains) {
se.db.Domains[index].Name = name
se.db.Domains[index].Description = description
se.showDomainList()
}
}
// deleteDomain deletes a domain by index
func (se *SchemaEditor) deleteDomain(index int) {
if index >= 0 && index < len(se.db.Domains) {
se.db.Domains = append(se.db.Domains[:index], se.db.Domains[index+1:]...)
se.showDomainList()
}
}

258
pkg/ui/domain_screens.go Normal file
View File

@@ -0,0 +1,258 @@
package ui
import (
"fmt"
"strings"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// showDomainList displays the domain management screen
func (se *SchemaEditor) showDomainList() {
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title
title := tview.NewTextView().
SetText("[::b]Manage Domains").
SetDynamicColors(true).
SetTextAlign(tview.AlignCenter)
// Create domains table
domainTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
// Add header row
headers := []string{"Name", "Sequence", "Total Tables", "Description"}
headerWidths := []int{20, 15, 20}
for i, header := range headers {
padding := ""
if i < len(headerWidths) {
padding = strings.Repeat(" ", headerWidths[i]-len(header))
}
cell := tview.NewTableCell(header + padding).
SetTextColor(tcell.ColorYellow).
SetSelectable(false).
SetAlign(tview.AlignLeft)
domainTable.SetCell(0, i, cell)
}
// Add existing domains
for row, domain := range se.db.Domains {
domain := domain // capture for closure
// Name - pad to 20 chars
nameStr := fmt.Sprintf("%-20s", domain.Name)
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
domainTable.SetCell(row+1, 0, nameCell)
// Sequence - pad to 15 chars
seqStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", domain.Sequence))
seqCell := tview.NewTableCell(seqStr).SetSelectable(true)
domainTable.SetCell(row+1, 1, seqCell)
// Total Tables - pad to 20 chars
tablesStr := fmt.Sprintf("%-20s", fmt.Sprintf("%d", len(domain.Tables)))
tablesCell := tview.NewTableCell(tablesStr).SetSelectable(true)
domainTable.SetCell(row+1, 2, tablesCell)
// Description - no padding, takes remaining space
descCell := tview.NewTableCell(domain.Description).SetSelectable(true)
domainTable.SetCell(row+1, 3, descCell)
}
domainTable.SetTitle(" Domains ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
// Action buttons flex
btnFlex := tview.NewFlex()
btnNewDomain := tview.NewButton("New Domain [n]").SetSelectedFunc(func() {
se.showNewDomainDialog()
})
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
se.pages.SwitchToPage("main")
se.pages.RemovePage("domains")
})
// Set up button input captures for Tab/Shift+Tab navigation
btnNewDomain.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(domainTable)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnBack)
return nil
}
return event
})
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(btnNewDomain)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(domainTable)
return nil
}
return event
})
btnFlex.AddItem(btnNewDomain, 0, 1, true).
AddItem(btnBack, 0, 1, false)
domainTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.SwitchToPage("main")
se.pages.RemovePage("domains")
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnNewDomain)
return nil
}
if event.Key() == tcell.KeyEnter {
row, _ := domainTable.GetSelection()
if row > 0 && row <= len(se.db.Domains) { // Skip header row
domainIndex := row - 1
se.showDomainEditor(domainIndex, se.db.Domains[domainIndex])
return nil
}
}
if event.Rune() == 'n' {
se.showNewDomainDialog()
return nil
}
if event.Rune() == 'b' {
se.pages.SwitchToPage("main")
se.pages.RemovePage("domains")
return nil
}
return event
})
flex.AddItem(title, 1, 0, false).
AddItem(domainTable, 0, 1, true).
AddItem(btnFlex, 1, 0, false)
se.pages.AddPage("domains", flex, true, true)
}
// showNewDomainDialog displays a dialog to create a new domain
func (se *SchemaEditor) showNewDomainDialog() {
form := tview.NewForm()
domainName := ""
domainDesc := ""
form.AddInputField("Name", "", 40, nil, func(value string) {
domainName = value
})
form.AddInputField("Description", "", 50, nil, func(value string) {
domainDesc = value
})
form.AddButton("Save", func() {
if domainName == "" {
return
}
se.createDomain(domainName, domainDesc)
se.pages.RemovePage("new-domain")
se.pages.RemovePage("domains")
se.showDomainList()
})
form.AddButton("Back", func() {
se.pages.RemovePage("new-domain")
se.pages.RemovePage("domains")
se.showDomainList()
})
form.SetBorder(true).SetTitle(" New Domain ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitConfirmation("new-domain", "domains")
return nil
}
return event
})
se.pages.AddPage("new-domain", form, true, true)
}
// showDomainEditor displays a dialog to edit an existing domain
func (se *SchemaEditor) showDomainEditor(index int, domain *models.Domain) {
form := tview.NewForm()
domainName := domain.Name
domainDesc := domain.Description
form.AddInputField("Name", domainName, 40, nil, func(value string) {
domainName = value
})
form.AddInputField("Description", domainDesc, 50, nil, func(value string) {
domainDesc = value
})
form.AddButton("Save", func() {
if domainName == "" {
return
}
se.updateDomain(index, domainName, domainDesc)
se.pages.RemovePage("edit-domain")
se.pages.RemovePage("domains")
se.showDomainList()
})
form.AddButton("Delete", func() {
se.showDeleteDomainConfirm(index)
})
form.AddButton("Back", func() {
se.pages.RemovePage("edit-domain")
se.pages.RemovePage("domains")
se.showDomainList()
})
form.SetBorder(true).SetTitle(" Edit Domain ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitConfirmation("edit-domain", "domains")
return nil
}
return event
})
se.pages.AddPage("edit-domain", form, true, true)
}
// showDeleteDomainConfirm shows a confirmation dialog before deleting a domain
func (se *SchemaEditor) showDeleteDomainConfirm(index int) {
modal := tview.NewModal().
SetText(fmt.Sprintf("Delete domain '%s'? This action cannot be undone.", se.db.Domains[index].Name)).
AddButtons([]string{"Cancel", "Delete"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "Delete" {
se.deleteDomain(index)
se.pages.RemovePage("delete-domain-confirm")
se.pages.RemovePage("edit-domain")
se.pages.RemovePage("domains")
se.showDomainList()
} else {
se.pages.RemovePage("delete-domain-confirm")
}
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("delete-domain-confirm")
return nil
}
return event
})
se.pages.AddAndSwitchToPage("delete-domain-confirm", modal, true)
}

73
pkg/ui/editor.go Normal file
View File

@@ -0,0 +1,73 @@
package ui
import (
"fmt"
"github.com/rivo/tview"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// SchemaEditor represents the interactive schema editor
type SchemaEditor struct {
db *models.Database
app *tview.Application
pages *tview.Pages
loadConfig *LoadConfig
saveConfig *SaveConfig
}
// NewSchemaEditor creates a new schema editor
func NewSchemaEditor(db *models.Database) *SchemaEditor {
return &SchemaEditor{
db: db,
app: tview.NewApplication(),
pages: tview.NewPages(),
loadConfig: nil,
saveConfig: nil,
}
}
// NewSchemaEditorWithConfigs creates a new schema editor with load/save configurations
func NewSchemaEditorWithConfigs(db *models.Database, loadConfig *LoadConfig, saveConfig *SaveConfig) *SchemaEditor {
return &SchemaEditor{
db: db,
app: tview.NewApplication(),
pages: tview.NewPages(),
loadConfig: loadConfig,
saveConfig: saveConfig,
}
}
// Run starts the interactive editor
func (se *SchemaEditor) Run() error {
// If no database is loaded, show load screen
if se.db == nil {
se.showLoadScreen()
} else {
// Create main menu view
mainMenu := se.createMainMenu()
se.pages.AddPage("main", mainMenu, true, true)
}
// Run the application
if err := se.app.SetRoot(se.pages, true).Run(); err != nil {
return fmt.Errorf("application error: %w", err)
}
return nil
}
// GetDatabase returns the current database
func (se *SchemaEditor) GetDatabase() *models.Database {
return se.db
}
// Helper function to get sorted column names
func getColumnNames(table *models.Table) []string {
names := make([]string, 0, len(table.Columns))
for name := range table.Columns {
names = append(names, name)
}
return names
}

791
pkg/ui/load_save_screens.go Normal file
View File

@@ -0,0 +1,791 @@
package ui
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
"git.warky.dev/wdevs/relspecgo/pkg/merge"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
rbun "git.warky.dev/wdevs/relspecgo/pkg/readers/bun"
rdbml "git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
rdctx "git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
rdrawdb "git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
rdrizzle "git.warky.dev/wdevs/relspecgo/pkg/readers/drizzle"
rgorm "git.warky.dev/wdevs/relspecgo/pkg/readers/gorm"
rgraphql "git.warky.dev/wdevs/relspecgo/pkg/readers/graphql"
rjson "git.warky.dev/wdevs/relspecgo/pkg/readers/json"
rpgsql "git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
rprisma "git.warky.dev/wdevs/relspecgo/pkg/readers/prisma"
rtypeorm "git.warky.dev/wdevs/relspecgo/pkg/readers/typeorm"
ryaml "git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
wdrizzle "git.warky.dev/wdevs/relspecgo/pkg/writers/drizzle"
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
wgraphql "git.warky.dev/wdevs/relspecgo/pkg/writers/graphql"
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
wprisma "git.warky.dev/wdevs/relspecgo/pkg/writers/prisma"
wtypeorm "git.warky.dev/wdevs/relspecgo/pkg/writers/typeorm"
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
)
// LoadConfig holds the configuration for loading a database
type LoadConfig struct {
SourceType string
FilePath string
ConnString string
}
// SaveConfig holds the configuration for saving a database
type SaveConfig struct {
TargetType string
FilePath string
ConnString string
}
// showLoadScreen displays the database load screen
func (se *SchemaEditor) showLoadScreen() {
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title
title := tview.NewTextView().
SetText("[::b]Load Database Schema").
SetTextAlign(tview.AlignCenter).
SetDynamicColors(true)
// Form
form := tview.NewForm()
form.SetBorder(true).SetTitle(" Load Configuration ").SetTitleAlign(tview.AlignLeft)
// Format selection
formatOptions := []string{
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
}
selectedFormat := 0
currentFormat := formatOptions[selectedFormat]
// File path input
filePath := ""
connString := ""
form.AddDropDown("Format", formatOptions, 0, func(option string, index int) {
selectedFormat = index
currentFormat = option
})
form.AddInputField("File Path", "", 50, nil, func(value string) {
filePath = value
})
form.AddInputField("Connection String", "", 50, nil, func(value string) {
connString = value
})
form.AddTextView("Help", getLoadHelpText(), 0, 5, true, false)
// Buttons
form.AddButton("Load [l]", func() {
se.loadDatabase(currentFormat, filePath, connString)
})
form.AddButton("Create New [n]", func() {
se.createNewDatabase()
})
form.AddButton("Exit [q]", func() {
se.app.Stop()
})
// Keyboard shortcuts
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.app.Stop()
return nil
}
switch event.Rune() {
case 'l':
se.loadDatabase(currentFormat, filePath, connString)
return nil
case 'n':
se.createNewDatabase()
return nil
case 'q':
se.app.Stop()
return nil
}
return event
})
// Tab navigation
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.app.Stop()
return nil
}
if event.Rune() == 'l' || event.Rune() == 'n' || event.Rune() == 'q' {
return event
}
return event
})
flex.AddItem(title, 1, 0, false).
AddItem(form, 0, 1, true)
se.pages.AddAndSwitchToPage("load-database", flex, true)
}
// showSaveScreen displays the save database screen
func (se *SchemaEditor) showSaveScreen() {
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title
title := tview.NewTextView().
SetText("[::b]Save Database Schema").
SetTextAlign(tview.AlignCenter).
SetDynamicColors(true)
// Form
form := tview.NewForm()
form.SetBorder(true).SetTitle(" Save Configuration ").SetTitleAlign(tview.AlignLeft)
// Format selection
formatOptions := []string{
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
}
selectedFormat := 0
currentFormat := formatOptions[selectedFormat]
// File path input
filePath := ""
if se.saveConfig != nil {
// Pre-populate with existing save config
for i, format := range formatOptions {
if format == se.saveConfig.TargetType {
selectedFormat = i
currentFormat = format
break
}
}
filePath = se.saveConfig.FilePath
}
form.AddDropDown("Format", formatOptions, selectedFormat, func(option string, index int) {
selectedFormat = index
currentFormat = option
})
form.AddInputField("File Path", filePath, 50, nil, func(value string) {
filePath = value
})
form.AddTextView("Help", getSaveHelpText(), 0, 5, true, false)
// Buttons
form.AddButton("Save [s]", func() {
se.saveDatabase(currentFormat, filePath)
})
form.AddButton("Update Existing Database [u]", func() {
// Use saveConfig if available, otherwise use loadConfig
if se.saveConfig != nil {
se.showUpdateExistingDatabaseConfirm()
} else if se.loadConfig != nil {
se.showUpdateExistingDatabaseConfirm()
} else {
se.showErrorDialog("Error", "No database source found. Use Save instead.")
}
})
form.AddButton("Back [b]", func() {
se.pages.RemovePage("save-database")
se.pages.SwitchToPage("main")
})
// Keyboard shortcuts
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("save-database")
se.pages.SwitchToPage("main")
return nil
}
switch event.Rune() {
case 's':
se.saveDatabase(currentFormat, filePath)
return nil
case 'u':
// Use saveConfig if available, otherwise use loadConfig
if se.saveConfig != nil {
se.showUpdateExistingDatabaseConfirm()
} else if se.loadConfig != nil {
se.showUpdateExistingDatabaseConfirm()
} else {
se.showErrorDialog("Error", "No database source found. Use Save instead.")
}
return nil
case 'b':
se.pages.RemovePage("save-database")
se.pages.SwitchToPage("main")
return nil
}
return event
})
flex.AddItem(title, 1, 0, false).
AddItem(form, 0, 1, true)
se.pages.AddAndSwitchToPage("save-database", flex, true)
}
// loadDatabase loads a database from the specified configuration
func (se *SchemaEditor) loadDatabase(format, filePath, connString string) {
// Validate input
if format == "pgsql" {
if connString == "" {
se.showErrorDialog("Error", "Connection string is required for PostgreSQL")
return
}
} else {
if filePath == "" {
se.showErrorDialog("Error", "File path is required for "+format)
return
}
// Expand home directory
if len(filePath) > 0 && filePath[0] == '~' {
home, err := os.UserHomeDir()
if err == nil {
filePath = filepath.Join(home, filePath[1:])
}
}
}
// Create reader
var reader readers.Reader
switch format {
case "dbml":
reader = rdbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
reader = rdctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
reader = rdrawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "graphql":
reader = rgraphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
reader = rjson.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml":
reader = ryaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "gorm":
reader = rgorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "bun":
reader = rbun.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drizzle":
reader = rdrizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "prisma":
reader = rprisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "typeorm":
reader = rtypeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql":
reader = rpgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
default:
se.showErrorDialog("Error", "Unsupported format: "+format)
return
}
// Read database
db, err := reader.ReadDatabase()
if err != nil {
se.showErrorDialog("Load Error", fmt.Sprintf("Failed to load database: %v", err))
return
}
// Store load config
se.loadConfig = &LoadConfig{
SourceType: format,
FilePath: filePath,
ConnString: connString,
}
// Update database
se.db = db
// Show success and switch to main menu
se.showSuccessDialog("Load Complete", fmt.Sprintf("Successfully loaded database '%s'", db.Name), func() {
se.pages.RemovePage("load-database")
se.pages.RemovePage("main")
se.pages.AddPage("main", se.createMainMenu(), true, true)
})
}
// saveDatabase saves the database to the specified configuration
func (se *SchemaEditor) saveDatabase(format, filePath string) {
// Validate input
if format == "pgsql" {
se.showErrorDialog("Error", "Direct PostgreSQL save is not supported from the UI. Use --to pgsql --to-path output.sql")
return
}
if filePath == "" {
se.showErrorDialog("Error", "File path is required")
return
}
// Expand home directory
if len(filePath) > 0 && filePath[0] == '~' {
home, err := os.UserHomeDir()
if err == nil {
filePath = filepath.Join(home, filePath[1:])
}
}
// Create writer
var writer writers.Writer
switch format {
case "dbml":
writer = wdbml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "dctx":
writer = wdctx.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "drawdb":
writer = wdrawdb.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "graphql":
writer = wgraphql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "json":
writer = wjson.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "yaml":
writer = wyaml.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "gorm":
writer = wgorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "bun":
writer = wbun.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "drizzle":
writer = wdrizzle.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "prisma":
writer = wprisma.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "typeorm":
writer = wtypeorm.NewWriter(&writers.WriterOptions{OutputPath: filePath})
case "pgsql":
writer = wpgsql.NewWriter(&writers.WriterOptions{OutputPath: filePath})
default:
se.showErrorDialog("Error", "Unsupported format: "+format)
return
}
// Write database
err := writer.WriteDatabase(se.db)
if err != nil {
se.showErrorDialog("Save Error", fmt.Sprintf("Failed to save database: %v", err))
return
}
// Store save config
se.saveConfig = &SaveConfig{
TargetType: format,
FilePath: filePath,
}
// Show success
se.showSuccessDialog("Save Complete", fmt.Sprintf("Successfully saved database to %s", filePath), func() {
se.pages.RemovePage("save-database")
se.pages.SwitchToPage("main")
})
}
// createNewDatabase creates a new empty database
func (se *SchemaEditor) createNewDatabase() {
// Create a new empty database
se.db = &models.Database{
Name: "New Database",
Schemas: []*models.Schema{},
}
// Clear load config
se.loadConfig = nil
// Show success and switch to main menu
se.showSuccessDialog("New Database", "Created new empty database", func() {
se.pages.RemovePage("load-database")
se.pages.AddPage("main", se.createMainMenu(), true, true)
})
}
// showErrorDialog displays an error dialog
func (se *SchemaEditor) showErrorDialog(_title, message string) {
modal := tview.NewModal().
SetText(message).
AddButtons([]string{"OK"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
se.pages.RemovePage("error-dialog")
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("error-dialog")
return nil
}
return event
})
se.pages.AddPage("error-dialog", modal, true, true)
}
// showSuccessDialog displays a success dialog
func (se *SchemaEditor) showSuccessDialog(_title, message string, onClose func()) {
modal := tview.NewModal().
SetText(message).
AddButtons([]string{"OK"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
se.pages.RemovePage("success-dialog")
if onClose != nil {
onClose()
}
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("success-dialog")
if onClose != nil {
onClose()
}
return nil
}
return event
})
se.pages.AddPage("success-dialog", modal, true, true)
}
// getLoadHelpText returns the help text for the load screen
func getLoadHelpText() string {
return `File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm
Database formats: pgsql (requires connection string)
Examples:
- File path: ~/schemas/mydb.dbml or /path/to/schema.json
- Connection: postgres://user:pass@localhost/dbname`
}
// showUpdateExistingDatabaseConfirm displays a confirmation dialog before updating existing database
func (se *SchemaEditor) showUpdateExistingDatabaseConfirm() {
// Use saveConfig if available, otherwise use loadConfig
var targetType, targetPath string
if se.saveConfig != nil {
targetType = se.saveConfig.TargetType
targetPath = se.saveConfig.FilePath
} else if se.loadConfig != nil {
targetType = se.loadConfig.SourceType
targetPath = se.loadConfig.FilePath
} else {
return
}
confirmText := fmt.Sprintf("Update existing database?\n\nFormat: %s\nPath: %s\n\nThis will overwrite the source.",
targetType, targetPath)
modal := tview.NewModal().
SetText(confirmText).
AddButtons([]string{"Cancel", "Update"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "Update" {
se.pages.RemovePage("update-confirm")
se.pages.RemovePage("save-database")
se.saveDatabase(targetType, targetPath)
se.pages.SwitchToPage("main")
} else {
se.pages.RemovePage("update-confirm")
}
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("update-confirm")
return nil
}
return event
})
se.pages.AddAndSwitchToPage("update-confirm", modal, true)
}
// getSaveHelpText returns the help text for the save screen
func getSaveHelpText() string {
return `File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm, pgsql (SQL export)
Examples:
- File: ~/schemas/mydb.dbml
- Directory (for code formats): ./models/`
}
// showImportScreen displays the import/merge database screen
func (se *SchemaEditor) showImportScreen() {
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title
title := tview.NewTextView().
SetText("[::b]Import & Merge Database Schema").
SetTextAlign(tview.AlignCenter).
SetDynamicColors(true)
// Form
form := tview.NewForm()
form.SetBorder(true).SetTitle(" Import Configuration ").SetTitleAlign(tview.AlignLeft)
// Format selection
formatOptions := []string{
"dbml", "dctx", "drawdb", "graphql", "json", "yaml",
"gorm", "bun", "drizzle", "prisma", "typeorm", "pgsql",
}
selectedFormat := 0
currentFormat := formatOptions[selectedFormat]
// File path input
filePath := ""
connString := ""
skipDomains := false
skipRelations := false
skipEnums := false
skipViews := false
skipSequences := false
skipTables := ""
form.AddDropDown("Format", formatOptions, 0, func(option string, index int) {
selectedFormat = index
currentFormat = option
})
form.AddInputField("File Path", "", 50, nil, func(value string) {
filePath = value
})
form.AddInputField("Connection String", "", 50, nil, func(value string) {
connString = value
})
form.AddInputField("Skip Tables (comma-separated)", "", 50, nil, func(value string) {
skipTables = value
})
form.AddCheckbox("Skip Domains", false, func(checked bool) {
skipDomains = checked
})
form.AddCheckbox("Skip Relations", false, func(checked bool) {
skipRelations = checked
})
form.AddCheckbox("Skip Enums", false, func(checked bool) {
skipEnums = checked
})
form.AddCheckbox("Skip Views", false, func(checked bool) {
skipViews = checked
})
form.AddCheckbox("Skip Sequences", false, func(checked bool) {
skipSequences = checked
})
form.AddTextView("Help", getImportHelpText(), 0, 7, true, false)
// Buttons
form.AddButton("Import & Merge [i]", func() {
se.importAndMergeDatabase(currentFormat, filePath, connString, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
})
form.AddButton("Back [b]", func() {
se.pages.RemovePage("import-database")
se.pages.SwitchToPage("main")
})
form.AddButton("Exit [q]", func() {
se.app.Stop()
})
// Keyboard shortcuts
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("import-database")
se.pages.SwitchToPage("main")
return nil
}
switch event.Rune() {
case 'i':
se.importAndMergeDatabase(currentFormat, filePath, connString, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
return nil
case 'b':
se.pages.RemovePage("import-database")
se.pages.SwitchToPage("main")
return nil
case 'q':
se.app.Stop()
return nil
}
return event
})
flex.AddItem(title, 1, 0, false).
AddItem(form, 0, 1, true)
se.pages.AddAndSwitchToPage("import-database", flex, true)
}
// importAndMergeDatabase imports and merges a database from the specified configuration
func (se *SchemaEditor) importAndMergeDatabase(format, filePath, connString string, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
// Validate input
if format == "pgsql" {
if connString == "" {
se.showErrorDialog("Error", "Connection string is required for PostgreSQL")
return
}
} else {
if filePath == "" {
se.showErrorDialog("Error", "File path is required for "+format)
return
}
// Expand home directory
if len(filePath) > 0 && filePath[0] == '~' {
home, err := os.UserHomeDir()
if err == nil {
filePath = filepath.Join(home, filePath[1:])
}
}
}
// Create reader
var reader readers.Reader
switch format {
case "dbml":
reader = rdbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
reader = rdctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
reader = rdrawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "graphql":
reader = rgraphql.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
reader = rjson.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml":
reader = ryaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "gorm":
reader = rgorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "bun":
reader = rbun.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drizzle":
reader = rdrizzle.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "prisma":
reader = rprisma.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "typeorm":
reader = rtypeorm.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql":
reader = rpgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
default:
se.showErrorDialog("Error", "Unsupported format: "+format)
return
}
// Read the database to import
importDb, err := reader.ReadDatabase()
if err != nil {
se.showErrorDialog("Import Error", fmt.Sprintf("Failed to read database: %v", err))
return
}
// Show confirmation dialog
se.showImportConfirmation(importDb, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
}
// showImportConfirmation shows a confirmation dialog before merging
func (se *SchemaEditor) showImportConfirmation(importDb *models.Database, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
confirmText := fmt.Sprintf("Import & Merge Database?\n\nSource: %s\nTarget: %s\n\nThis will add missing schemas, tables, columns, and other objects from the source to your database.\n\nExisting items will NOT be modified.",
importDb.Name, se.db.Name)
modal := tview.NewModal().
SetText(confirmText).
AddButtons([]string{"Cancel", "Merge"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
se.pages.RemovePage("import-confirm")
if buttonLabel == "Merge" {
se.performMerge(importDb, skipDomains, skipRelations, skipEnums, skipViews, skipSequences, skipTables)
}
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("import-confirm")
se.pages.SwitchToPage("import-database")
return nil
}
return event
})
se.pages.AddAndSwitchToPage("import-confirm", modal, true)
}
// performMerge performs the actual merge operation
func (se *SchemaEditor) performMerge(importDb *models.Database, skipDomains, skipRelations, skipEnums, skipViews, skipSequences bool, skipTables string) {
// Create merge options
opts := &merge.MergeOptions{
SkipDomains: skipDomains,
SkipRelations: skipRelations,
SkipEnums: skipEnums,
SkipViews: skipViews,
SkipSequences: skipSequences,
}
// Parse skip tables
if skipTables != "" {
opts.SkipTableNames = parseSkipTablesUI(skipTables)
}
// Perform the merge
result := merge.MergeDatabases(se.db, importDb, opts)
// Update the database timestamp
se.db.UpdateDate()
// Show success dialog with summary
summary := merge.GetMergeSummary(result)
se.showSuccessDialog("Import Complete", summary, func() {
se.pages.RemovePage("import-database")
se.pages.RemovePage("main")
se.pages.AddPage("main", se.createMainMenu(), true, true)
})
}
// getImportHelpText returns the help text for the import screen
func getImportHelpText() string {
return `Import & Merge: Adds missing schemas, tables, columns, and other objects to your existing database.
File-based formats: dbml, dctx, drawdb, graphql, json, yaml, gorm, bun, drizzle, prisma, typeorm
Database formats: pgsql (requires connection string)
Skip options: Check to exclude specific object types from the merge.`
}
func parseSkipTablesUI(skipTablesStr string) map[string]bool {
skipTables := make(map[string]bool)
if skipTablesStr == "" {
return skipTables
}
// Split by comma and trim whitespace
parts := strings.Split(skipTablesStr, ",")
for _, part := range parts {
trimmed := strings.TrimSpace(part)
if trimmed != "" {
// Store in lowercase for case-insensitive matching
skipTables[strings.ToLower(trimmed)] = true
}
}
return skipTables
}

65
pkg/ui/main_menu.go Normal file
View File

@@ -0,0 +1,65 @@
package ui
import (
"fmt"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
)
// createMainMenu creates the main menu screen
func (se *SchemaEditor) createMainMenu() tview.Primitive {
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title with database name
dbName := se.db.Name
if dbName == "" {
dbName = "Untitled"
}
updateAtStr := ""
if se.db.UpdatedAt != "" {
updateAtStr = fmt.Sprintf("Updated @ %s", se.db.UpdatedAt)
}
titleText := fmt.Sprintf("[::b]RelSpec Schema Editor\n[::d]Database: %s %s\n[::d]Press arrow keys to navigate, Enter to select", dbName, updateAtStr)
title := tview.NewTextView().
SetText(titleText).
SetDynamicColors(true)
// Menu options
menu := tview.NewList().
AddItem("Edit Database", "Edit database name, description, and properties", 'e', func() {
se.showEditDatabaseForm()
}).
AddItem("Manage Schemas", "View, create, edit, and delete schemas", 's', func() {
se.showSchemaList()
}).
AddItem("Manage Tables", "View and manage tables in schemas", 't', func() {
se.showTableList()
}).
AddItem("Manage Domains", "View, create, edit, and delete domains", 'd', func() {
se.showDomainList()
}).
AddItem("Import & Merge", "Import and merge schema from another database", 'i', func() {
se.showImportScreen()
}).
AddItem("Save Database", "Save database to file or database", 'w', func() {
se.showSaveScreen()
}).
AddItem("Exit Editor", "Exit the editor", 'q', func() {
se.app.Stop()
})
menu.SetBorder(true).SetTitle(" Menu ").SetTitleAlign(tview.AlignLeft)
menu.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitEditorConfirm()
return nil
}
return event
})
flex.AddItem(title, 5, 0, false).
AddItem(menu, 0, 1, true)
return flex
}

115
pkg/ui/relation_dataops.go Normal file
View File

@@ -0,0 +1,115 @@
package ui
import "git.warky.dev/wdevs/relspecgo/pkg/models"
// Relationship data operations - business logic for relationship management
// CreateRelationship creates a new relationship and adds it to a table
func (se *SchemaEditor) CreateRelationship(schemaIndex, tableIndex int, rel *models.Relationship) *models.Relationship {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return nil
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return nil
}
table := schema.Tables[tableIndex]
if table.Relationships == nil {
table.Relationships = make(map[string]*models.Relationship)
}
table.Relationships[rel.Name] = rel
table.UpdateDate()
return rel
}
// UpdateRelationship updates an existing relationship
func (se *SchemaEditor) UpdateRelationship(schemaIndex, tableIndex int, oldName string, rel *models.Relationship) bool {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return false
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return false
}
table := schema.Tables[tableIndex]
if table.Relationships == nil {
return false
}
// Delete old entry if name changed
if oldName != rel.Name {
delete(table.Relationships, oldName)
}
table.Relationships[rel.Name] = rel
table.UpdateDate()
return true
}
// DeleteRelationship removes a relationship from a table
func (se *SchemaEditor) DeleteRelationship(schemaIndex, tableIndex int, relName string) bool {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return false
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return false
}
table := schema.Tables[tableIndex]
if table.Relationships == nil {
return false
}
delete(table.Relationships, relName)
table.UpdateDate()
return true
}
// GetRelationship returns a relationship by name
func (se *SchemaEditor) GetRelationship(schemaIndex, tableIndex int, relName string) *models.Relationship {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return nil
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return nil
}
table := schema.Tables[tableIndex]
if table.Relationships == nil {
return nil
}
return table.Relationships[relName]
}
// GetRelationshipNames returns all relationship names for a table
func (se *SchemaEditor) GetRelationshipNames(schemaIndex, tableIndex int) []string {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return nil
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return nil
}
table := schema.Tables[tableIndex]
if table.Relationships == nil {
return nil
}
names := make([]string, 0, len(table.Relationships))
for name := range table.Relationships {
names = append(names, name)
}
return names
}

486
pkg/ui/relation_screens.go Normal file
View File

@@ -0,0 +1,486 @@
package ui
import (
"fmt"
"strings"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// showRelationshipList displays all relationships for a table
func (se *SchemaEditor) showRelationshipList(schemaIndex, tableIndex int) {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return
}
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title
title := tview.NewTextView().
SetText(fmt.Sprintf("[::b]Relationships for Table: %s", table.Name)).
SetDynamicColors(true).
SetTextAlign(tview.AlignCenter)
// Create relationships table
relTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
// Add header row
headers := []string{"Name", "Type", "From Columns", "To Table", "To Columns", "Description"}
headerWidths := []int{20, 15, 20, 20, 20}
for i, header := range headers {
padding := ""
if i < len(headerWidths) {
padding = strings.Repeat(" ", headerWidths[i]-len(header))
}
cell := tview.NewTableCell(header + padding).
SetTextColor(tcell.ColorYellow).
SetSelectable(false).
SetAlign(tview.AlignLeft)
relTable.SetCell(0, i, cell)
}
// Get relationship names
relNames := se.GetRelationshipNames(schemaIndex, tableIndex)
for row, relName := range relNames {
rel := table.Relationships[relName]
// Name
nameStr := fmt.Sprintf("%-20s", rel.Name)
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
relTable.SetCell(row+1, 0, nameCell)
// Type
typeStr := fmt.Sprintf("%-15s", string(rel.Type))
typeCell := tview.NewTableCell(typeStr).SetSelectable(true)
relTable.SetCell(row+1, 1, typeCell)
// From Columns
fromColsStr := strings.Join(rel.FromColumns, ", ")
fromColsStr = fmt.Sprintf("%-20s", fromColsStr)
fromColsCell := tview.NewTableCell(fromColsStr).SetSelectable(true)
relTable.SetCell(row+1, 2, fromColsCell)
// To Table
toTableStr := rel.ToTable
if rel.ToSchema != "" && rel.ToSchema != table.Schema {
toTableStr = rel.ToSchema + "." + rel.ToTable
}
toTableStr = fmt.Sprintf("%-20s", toTableStr)
toTableCell := tview.NewTableCell(toTableStr).SetSelectable(true)
relTable.SetCell(row+1, 3, toTableCell)
// To Columns
toColsStr := strings.Join(rel.ToColumns, ", ")
toColsStr = fmt.Sprintf("%-20s", toColsStr)
toColsCell := tview.NewTableCell(toColsStr).SetSelectable(true)
relTable.SetCell(row+1, 4, toColsCell)
// Description
descCell := tview.NewTableCell(rel.Description).SetSelectable(true)
relTable.SetCell(row+1, 5, descCell)
}
relTable.SetTitle(" Relationships ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
// Action buttons
btnFlex := tview.NewFlex()
btnNew := tview.NewButton("New Relationship [n]").SetSelectedFunc(func() {
se.showNewRelationshipDialog(schemaIndex, tableIndex)
})
btnEdit := tview.NewButton("Edit [e]").SetSelectedFunc(func() {
row, _ := relTable.GetSelection()
if row > 0 && row <= len(relNames) {
relName := relNames[row-1]
se.showEditRelationshipDialog(schemaIndex, tableIndex, relName)
}
})
btnDelete := tview.NewButton("Delete [d]").SetSelectedFunc(func() {
row, _ := relTable.GetSelection()
if row > 0 && row <= len(relNames) {
relName := relNames[row-1]
se.showDeleteRelationshipConfirm(schemaIndex, tableIndex, relName)
}
})
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
se.pages.RemovePage("relationships")
se.pages.SwitchToPage("table-editor")
})
// Set up button navigation
btnNew.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(relTable)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnEdit)
return nil
}
return event
})
btnEdit.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(btnNew)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnDelete)
return nil
}
return event
})
btnDelete.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(btnEdit)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnBack)
return nil
}
return event
})
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(btnDelete)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(relTable)
return nil
}
return event
})
btnFlex.AddItem(btnNew, 0, 1, true).
AddItem(btnEdit, 0, 1, false).
AddItem(btnDelete, 0, 1, false).
AddItem(btnBack, 0, 1, false)
relTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("relationships")
se.pages.SwitchToPage("table-editor")
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnNew)
return nil
}
if event.Key() == tcell.KeyEnter {
row, _ := relTable.GetSelection()
if row > 0 && row <= len(relNames) {
relName := relNames[row-1]
se.showEditRelationshipDialog(schemaIndex, tableIndex, relName)
}
return nil
}
if event.Rune() == 'n' {
se.showNewRelationshipDialog(schemaIndex, tableIndex)
return nil
}
if event.Rune() == 'e' {
row, _ := relTable.GetSelection()
if row > 0 && row <= len(relNames) {
relName := relNames[row-1]
se.showEditRelationshipDialog(schemaIndex, tableIndex, relName)
}
return nil
}
if event.Rune() == 'd' {
row, _ := relTable.GetSelection()
if row > 0 && row <= len(relNames) {
relName := relNames[row-1]
se.showDeleteRelationshipConfirm(schemaIndex, tableIndex, relName)
}
return nil
}
if event.Rune() == 'b' {
se.pages.RemovePage("relationships")
se.pages.SwitchToPage("table-editor")
return nil
}
return event
})
flex.AddItem(title, 1, 0, false).
AddItem(relTable, 0, 1, true).
AddItem(btnFlex, 1, 0, false)
se.pages.AddPage("relationships", flex, true, true)
}
// showNewRelationshipDialog shows dialog to create a new relationship
func (se *SchemaEditor) showNewRelationshipDialog(schemaIndex, tableIndex int) {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return
}
form := tview.NewForm()
// Collect all tables for dropdown
var allTables []string
var tableMap []struct{ schemaIdx, tableIdx int }
for si, schema := range se.db.Schemas {
for ti, t := range schema.Tables {
tableName := t.Name
if schema.Name != table.Schema {
tableName = schema.Name + "." + t.Name
}
allTables = append(allTables, tableName)
tableMap = append(tableMap, struct{ schemaIdx, tableIdx int }{si, ti})
}
}
relName := ""
relType := models.OneToMany
fromColumns := ""
toColumns := ""
description := ""
selectedTableIdx := 0
form.AddInputField("Name", "", 40, nil, func(value string) {
relName = value
})
form.AddDropDown("Type", []string{
string(models.OneToOne),
string(models.OneToMany),
string(models.ManyToMany),
}, 1, func(option string, optionIndex int) {
relType = models.RelationType(option)
})
form.AddInputField("From Columns (comma-separated)", "", 40, nil, func(value string) {
fromColumns = value
})
form.AddDropDown("To Table", allTables, 0, func(option string, optionIndex int) {
selectedTableIdx = optionIndex
})
form.AddInputField("To Columns (comma-separated)", "", 40, nil, func(value string) {
toColumns = value
})
form.AddInputField("Description", "", 60, nil, func(value string) {
description = value
})
form.AddButton("Save", func() {
if relName == "" {
return
}
// Parse columns
fromCols := strings.Split(fromColumns, ",")
for i := range fromCols {
fromCols[i] = strings.TrimSpace(fromCols[i])
}
toCols := strings.Split(toColumns, ",")
for i := range toCols {
toCols[i] = strings.TrimSpace(toCols[i])
}
// Get target table
targetSchema := se.db.Schemas[tableMap[selectedTableIdx].schemaIdx]
targetTable := targetSchema.Tables[tableMap[selectedTableIdx].tableIdx]
rel := models.InitRelationship(relName, relType)
rel.FromTable = table.Name
rel.FromSchema = table.Schema
rel.FromColumns = fromCols
rel.ToTable = targetTable.Name
rel.ToSchema = targetTable.Schema
rel.ToColumns = toCols
rel.Description = description
se.CreateRelationship(schemaIndex, tableIndex, rel)
se.pages.RemovePage("new-relationship")
se.pages.RemovePage("relationships")
se.showRelationshipList(schemaIndex, tableIndex)
})
form.AddButton("Back", func() {
se.pages.RemovePage("new-relationship")
})
form.SetBorder(true).SetTitle(" New Relationship ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("new-relationship")
return nil
}
return event
})
se.pages.AddPage("new-relationship", form, true, true)
}
// showEditRelationshipDialog shows dialog to edit a relationship
func (se *SchemaEditor) showEditRelationshipDialog(schemaIndex, tableIndex int, relName string) {
table := se.GetTable(schemaIndex, tableIndex)
if table == nil {
return
}
rel := se.GetRelationship(schemaIndex, tableIndex, relName)
if rel == nil {
return
}
form := tview.NewForm()
// Collect all tables for dropdown
var allTables []string
var tableMap []struct{ schemaIdx, tableIdx int }
selectedTableIdx := 0
for si, schema := range se.db.Schemas {
for ti, t := range schema.Tables {
tableName := t.Name
if schema.Name != table.Schema {
tableName = schema.Name + "." + t.Name
}
allTables = append(allTables, tableName)
tableMap = append(tableMap, struct{ schemaIdx, tableIdx int }{si, ti})
// Check if this is the current target table
if t.Name == rel.ToTable && schema.Name == rel.ToSchema {
selectedTableIdx = len(allTables) - 1
}
}
}
newName := rel.Name
relType := rel.Type
fromColumns := strings.Join(rel.FromColumns, ", ")
toColumns := strings.Join(rel.ToColumns, ", ")
description := rel.Description
form.AddInputField("Name", rel.Name, 40, nil, func(value string) {
newName = value
})
// Find initial type index
typeIdx := 1 // OneToMany default
typeOptions := []string{
string(models.OneToOne),
string(models.OneToMany),
string(models.ManyToMany),
}
for i, opt := range typeOptions {
if opt == string(rel.Type) {
typeIdx = i
break
}
}
form.AddDropDown("Type", typeOptions, typeIdx, func(option string, optionIndex int) {
relType = models.RelationType(option)
})
form.AddInputField("From Columns (comma-separated)", fromColumns, 40, nil, func(value string) {
fromColumns = value
})
form.AddDropDown("To Table", allTables, selectedTableIdx, func(option string, optionIndex int) {
selectedTableIdx = optionIndex
})
form.AddInputField("To Columns (comma-separated)", toColumns, 40, nil, func(value string) {
toColumns = value
})
form.AddInputField("Description", rel.Description, 60, nil, func(value string) {
description = value
})
form.AddButton("Save", func() {
if newName == "" {
return
}
// Parse columns
fromCols := strings.Split(fromColumns, ",")
for i := range fromCols {
fromCols[i] = strings.TrimSpace(fromCols[i])
}
toCols := strings.Split(toColumns, ",")
for i := range toCols {
toCols[i] = strings.TrimSpace(toCols[i])
}
// Get target table
targetSchema := se.db.Schemas[tableMap[selectedTableIdx].schemaIdx]
targetTable := targetSchema.Tables[tableMap[selectedTableIdx].tableIdx]
updatedRel := models.InitRelationship(newName, relType)
updatedRel.FromTable = table.Name
updatedRel.FromSchema = table.Schema
updatedRel.FromColumns = fromCols
updatedRel.ToTable = targetTable.Name
updatedRel.ToSchema = targetTable.Schema
updatedRel.ToColumns = toCols
updatedRel.Description = description
updatedRel.GUID = rel.GUID
se.UpdateRelationship(schemaIndex, tableIndex, relName, updatedRel)
se.pages.RemovePage("edit-relationship")
se.pages.RemovePage("relationships")
se.showRelationshipList(schemaIndex, tableIndex)
})
form.AddButton("Back", func() {
se.pages.RemovePage("edit-relationship")
})
form.SetBorder(true).SetTitle(" Edit Relationship ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("edit-relationship")
return nil
}
return event
})
se.pages.AddPage("edit-relationship", form, true, true)
}
// showDeleteRelationshipConfirm shows confirmation dialog for deleting a relationship
func (se *SchemaEditor) showDeleteRelationshipConfirm(schemaIndex, tableIndex int, relName string) {
modal := tview.NewModal().
SetText(fmt.Sprintf("Delete relationship '%s'? This action cannot be undone.", relName)).
AddButtons([]string{"Cancel", "Delete"}).
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
if buttonLabel == "Delete" {
se.DeleteRelationship(schemaIndex, tableIndex, relName)
se.pages.RemovePage("delete-relationship-confirm")
se.pages.RemovePage("relationships")
se.showRelationshipList(schemaIndex, tableIndex)
} else {
se.pages.RemovePage("delete-relationship-confirm")
}
})
modal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("delete-relationship-confirm")
return nil
}
return event
})
se.pages.AddAndSwitchToPage("delete-relationship-confirm", modal, true)
}

55
pkg/ui/schema_dataops.go Normal file
View File

@@ -0,0 +1,55 @@
package ui
import "git.warky.dev/wdevs/relspecgo/pkg/models"
// Schema data operations - business logic for schema management
// CreateSchema creates a new schema and adds it to the database
func (se *SchemaEditor) CreateSchema(name, description string) *models.Schema {
newSchema := &models.Schema{
Name: name,
Description: description,
Tables: make([]*models.Table, 0),
Sequences: make([]*models.Sequence, 0),
Enums: make([]*models.Enum, 0),
}
se.db.UpdateDate()
se.db.Schemas = append(se.db.Schemas, newSchema)
return newSchema
}
// UpdateSchema updates an existing schema's properties
func (se *SchemaEditor) UpdateSchema(schemaIndex int, name, owner, description string) {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return
}
se.db.UpdateDate()
schema := se.db.Schemas[schemaIndex]
schema.Name = name
schema.Owner = owner
schema.Description = description
schema.UpdateDate()
}
// DeleteSchema removes a schema from the database
func (se *SchemaEditor) DeleteSchema(schemaIndex int) bool {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return false
}
se.db.UpdateDate()
se.db.Schemas = append(se.db.Schemas[:schemaIndex], se.db.Schemas[schemaIndex+1:]...)
return true
}
// GetSchema returns a schema by index
func (se *SchemaEditor) GetSchema(schemaIndex int) *models.Schema {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return nil
}
return se.db.Schemas[schemaIndex]
}
// GetAllSchemas returns all schemas
func (se *SchemaEditor) GetAllSchemas() []*models.Schema {
return se.db.Schemas
}

362
pkg/ui/schema_screens.go Normal file
View File

@@ -0,0 +1,362 @@
package ui
import (
"fmt"
"strings"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
// showSchemaList displays the schema management screen
func (se *SchemaEditor) showSchemaList() {
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title
title := tview.NewTextView().
SetText("[::b]Manage Schemas").
SetDynamicColors(true).
SetTextAlign(tview.AlignCenter)
// Create schemas table
schemaTable := tview.NewTable().SetBorders(true).SetSelectable(true, false).SetFixed(1, 0)
// Add header row with padding for full width
headers := []string{"Name", "Sequence", "Total Tables", "Total Sequences", "Total Views", "GUID", "Description"}
headerWidths := []int{20, 15, 20, 20, 15, 36} // Last column takes remaining space
for i, header := range headers {
padding := ""
if i < len(headerWidths) {
padding = strings.Repeat(" ", headerWidths[i]-len(header))
}
cell := tview.NewTableCell(header + padding).
SetTextColor(tcell.ColorYellow).
SetSelectable(false).
SetAlign(tview.AlignLeft)
schemaTable.SetCell(0, i, cell)
}
// Add existing schemas
for row, schema := range se.db.Schemas {
schema := schema // capture for closure
// Name - pad to 20 chars
nameStr := fmt.Sprintf("%-20s", schema.Name)
nameCell := tview.NewTableCell(nameStr).SetSelectable(true)
schemaTable.SetCell(row+1, 0, nameCell)
// Sequence - pad to 15 chars
seqStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", schema.Sequence))
seqCell := tview.NewTableCell(seqStr).SetSelectable(true)
schemaTable.SetCell(row+1, 1, seqCell)
// Total Tables - pad to 20 chars
tablesStr := fmt.Sprintf("%-20s", fmt.Sprintf("%d", len(schema.Tables)))
tablesCell := tview.NewTableCell(tablesStr).SetSelectable(true)
schemaTable.SetCell(row+1, 2, tablesCell)
// Total Sequences - pad to 20 chars
sequencesStr := fmt.Sprintf("%-20s", fmt.Sprintf("%d", len(schema.Sequences)))
sequencesCell := tview.NewTableCell(sequencesStr).SetSelectable(true)
schemaTable.SetCell(row+1, 3, sequencesCell)
// Total Views - pad to 15 chars
viewsStr := fmt.Sprintf("%-15s", fmt.Sprintf("%d", len(schema.Views)))
viewsCell := tview.NewTableCell(viewsStr).SetSelectable(true)
schemaTable.SetCell(row+1, 4, viewsCell)
// GUID - pad to 36 chars
guidStr := fmt.Sprintf("%-36s", schema.GUID)
guidCell := tview.NewTableCell(guidStr).SetSelectable(true)
schemaTable.SetCell(row+1, 5, guidCell)
// Description - no padding, takes remaining space
descCell := tview.NewTableCell(schema.Description).SetSelectable(true)
schemaTable.SetCell(row+1, 6, descCell)
}
schemaTable.SetTitle(" Schemas ").SetBorder(true).SetTitleAlign(tview.AlignLeft)
// Action buttons flex (define before input capture)
btnFlex := tview.NewFlex()
btnNewSchema := tview.NewButton("New Schema [n]").SetSelectedFunc(func() {
se.showNewSchemaDialog()
})
btnBack := tview.NewButton("Back [b]").SetSelectedFunc(func() {
se.pages.SwitchToPage("main")
se.pages.RemovePage("schemas")
})
// Set up button input captures for Tab/Shift+Tab navigation
btnNewSchema.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(schemaTable)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnBack)
return nil
}
return event
})
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(btnNewSchema)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(schemaTable)
return nil
}
return event
})
btnFlex.AddItem(btnNewSchema, 0, 1, true).
AddItem(btnBack, 0, 1, false)
schemaTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.SwitchToPage("main")
se.pages.RemovePage("schemas")
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnNewSchema)
return nil
}
if event.Key() == tcell.KeyEnter {
row, _ := schemaTable.GetSelection()
if row > 0 && row <= len(se.db.Schemas) { // Skip header row
schemaIndex := row - 1
se.showSchemaEditor(schemaIndex, se.db.Schemas[schemaIndex])
return nil
}
}
if event.Rune() == 'n' {
se.showNewSchemaDialog()
return nil
}
if event.Rune() == 'b' {
se.pages.SwitchToPage("main")
se.pages.RemovePage("schemas")
return nil
}
return event
})
flex.AddItem(title, 1, 0, false).
AddItem(schemaTable, 0, 1, true).
AddItem(btnFlex, 1, 0, false)
se.pages.AddPage("schemas", flex, true, true)
}
// showSchemaEditor shows the editor for a specific schema
func (se *SchemaEditor) showSchemaEditor(index int, schema *models.Schema) {
flex := tview.NewFlex().SetDirection(tview.FlexRow)
// Title
title := tview.NewTextView().
SetText(fmt.Sprintf("[::b]Schema: %s", schema.Name)).
SetDynamicColors(true).
SetTextAlign(tview.AlignCenter)
// Schema info display
info := tview.NewTextView().SetDynamicColors(true)
info.SetText(fmt.Sprintf("Tables: %d | Description: %s",
len(schema.Tables), schema.Description))
// Table list
tableList := tview.NewList().ShowSecondaryText(true)
for i, table := range schema.Tables {
tableIndex := i
table := table
colCount := len(table.Columns)
tableList.AddItem(table.Name, fmt.Sprintf("%d columns", colCount), rune('0'+i), func() {
se.showTableEditor(index, tableIndex, table)
})
}
tableList.AddItem("[New Table]", "Add a new table to this schema", 'n', func() {
se.showNewTableDialog(index)
})
tableList.AddItem("[Edit Schema Info]", "Edit schema properties", 'e', func() {
se.showEditSchemaDialog(index)
})
tableList.AddItem("[Delete Schema]", "Delete this schema", 'd', func() {
se.showDeleteSchemaConfirm(index)
})
tableList.SetBorder(true).SetTitle(" Tables ").SetTitleAlign(tview.AlignLeft)
// Action buttons (define before input capture)
btnFlex := tview.NewFlex()
btnNewTable := tview.NewButton("New Table [n]").SetSelectedFunc(func() {
se.showNewTableDialog(index)
})
btnBack := tview.NewButton("Back to Schemas [b]").SetSelectedFunc(func() {
se.pages.RemovePage("schema-editor")
se.pages.SwitchToPage("schemas")
})
// Set up button input captures for Tab/Shift+Tab navigation
btnNewTable.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(tableList)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnBack)
return nil
}
return event
})
btnBack.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyBacktab {
se.app.SetFocus(btnNewTable)
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(tableList)
return nil
}
return event
})
tableList.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.pages.RemovePage("schema-editor")
se.pages.SwitchToPage("schemas")
return nil
}
if event.Key() == tcell.KeyTab {
se.app.SetFocus(btnNewTable)
return nil
}
if event.Rune() == 'b' {
se.pages.RemovePage("schema-editor")
se.pages.SwitchToPage("schemas")
}
return event
})
btnFlex.AddItem(btnNewTable, 0, 1, true).
AddItem(btnBack, 0, 1, false)
flex.AddItem(title, 1, 0, false).
AddItem(info, 2, 0, false).
AddItem(tableList, 0, 1, true).
AddItem(btnFlex, 1, 0, false)
se.pages.AddPage("schema-editor", flex, true, true)
}
// showNewSchemaDialog shows dialog to create a new schema
func (se *SchemaEditor) showNewSchemaDialog() {
form := tview.NewForm()
schemaName := ""
description := ""
form.AddInputField("Schema Name", "", 40, nil, func(value string) {
schemaName = value
})
form.AddInputField("Description", "", 40, nil, func(value string) {
description = value
})
form.AddButton("Save", func() {
if schemaName == "" {
return
}
se.CreateSchema(schemaName, description)
se.pages.RemovePage("new-schema")
se.pages.RemovePage("schemas")
se.showSchemaList()
})
form.AddButton("Back", func() {
se.pages.RemovePage("new-schema")
se.pages.RemovePage("schemas")
se.showSchemaList()
})
form.SetBorder(true).SetTitle(" New Schema ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitConfirmation("new-schema", "schemas")
return nil
}
return event
})
se.pages.AddPage("new-schema", form, true, true)
}
// showEditSchemaDialog shows dialog to edit schema properties
func (se *SchemaEditor) showEditSchemaDialog(schemaIndex int) {
schema := se.db.Schemas[schemaIndex]
form := tview.NewForm()
// Local variables to collect changes
newName := schema.Name
newOwner := schema.Owner
newDescription := schema.Description
newGUID := schema.GUID
form.AddInputField("Schema Name", schema.Name, 40, nil, func(value string) {
newName = value
})
form.AddInputField("Owner", schema.Owner, 40, nil, func(value string) {
newOwner = value
})
form.AddTextArea("Description", schema.Description, 40, 5, 0, func(value string) {
newDescription = value
})
form.AddInputField("GUID", schema.GUID, 40, nil, func(value string) {
newGUID = value
})
form.AddButton("Save", func() {
// Apply changes using dataops
se.UpdateSchema(schemaIndex, newName, newOwner, newDescription)
se.db.Schemas[schemaIndex].GUID = newGUID
schema := se.db.Schemas[schemaIndex]
se.pages.RemovePage("edit-schema")
se.pages.RemovePage("schema-editor")
se.showSchemaEditor(schemaIndex, schema)
})
form.AddButton("Back", func() {
// Discard changes - don't apply them
schema := se.db.Schemas[schemaIndex]
se.pages.RemovePage("edit-schema")
se.pages.RemovePage("schema-editor")
se.showSchemaEditor(schemaIndex, schema)
})
form.SetBorder(true).SetTitle(" Edit Schema ").SetTitleAlign(tview.AlignLeft)
form.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
se.showExitConfirmation("edit-schema", "schema-editor")
return nil
}
return event
})
se.pages.AddPage("edit-schema", form, true, true)
}

88
pkg/ui/table_dataops.go Normal file
View File

@@ -0,0 +1,88 @@
package ui
import "git.warky.dev/wdevs/relspecgo/pkg/models"
// Table data operations - business logic for table management
// CreateTable creates a new table and adds it to a schema
func (se *SchemaEditor) CreateTable(schemaIndex int, name, description string) *models.Table {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return nil
}
schema := se.db.Schemas[schemaIndex]
newTable := &models.Table{
Name: name,
Schema: schema.Name,
Description: description,
Columns: make(map[string]*models.Column),
Constraints: make(map[string]*models.Constraint),
Indexes: make(map[string]*models.Index),
}
schema.UpdateDate()
schema.Tables = append(schema.Tables, newTable)
return newTable
}
// UpdateTable updates an existing table's properties
func (se *SchemaEditor) UpdateTable(schemaIndex, tableIndex int, name, description string) {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return
}
schema.UpdateDate()
table := schema.Tables[tableIndex]
table.Name = name
table.Description = description
table.UpdateDate()
}
// DeleteTable removes a table from a schema
func (se *SchemaEditor) DeleteTable(schemaIndex, tableIndex int) bool {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return false
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return false
}
schema.UpdateDate()
schema.Tables = append(schema.Tables[:tableIndex], schema.Tables[tableIndex+1:]...)
return true
}
// GetTable returns a table by schema and table index
func (se *SchemaEditor) GetTable(schemaIndex, tableIndex int) *models.Table {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return nil
}
schema := se.db.Schemas[schemaIndex]
if tableIndex < 0 || tableIndex >= len(schema.Tables) {
return nil
}
return schema.Tables[tableIndex]
}
// GetAllTables returns all tables across all schemas
func (se *SchemaEditor) GetAllTables() []*models.Table {
var tables []*models.Table
for _, schema := range se.db.Schemas {
tables = append(tables, schema.Tables...)
}
return tables
}
// GetTablesInSchema returns all tables in a specific schema
func (se *SchemaEditor) GetTablesInSchema(schemaIndex int) []*models.Table {
if schemaIndex < 0 || schemaIndex >= len(se.db.Schemas) {
return nil
}
return se.db.Schemas[schemaIndex].Tables
}

Some files were not shown because too many files have changed in this diff Show More