Fixed git ignore bug
Some checks are pending
CI / Test (1.23) (push) Waiting to run
CI / Test (1.24) (push) Waiting to run
CI / Test (1.25) (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Build (push) Waiting to run

This commit is contained in:
Hein
2025-12-18 14:37:34 +02:00
parent 1e38e9e9ce
commit b7950057eb
5 changed files with 686 additions and 2 deletions

2
.gitignore vendored
View File

@@ -22,7 +22,6 @@
go.work
# RelSpec specific
relspec
*.test
coverage.out
coverage.html
@@ -32,7 +31,6 @@ coverage.html
.idea/
*.swp
*.swo
*~
# OS
.DS_Store

363
cmd/relspec/convert.go Normal file
View File

@@ -0,0 +1,363 @@
package main
import (
"fmt"
"os"
"strings"
"time"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
"git.warky.dev/wdevs/relspecgo/pkg/writers"
wbun "git.warky.dev/wdevs/relspecgo/pkg/writers/bun"
wdbml "git.warky.dev/wdevs/relspecgo/pkg/writers/dbml"
wdctx "git.warky.dev/wdevs/relspecgo/pkg/writers/dctx"
wdrawdb "git.warky.dev/wdevs/relspecgo/pkg/writers/drawdb"
wgorm "git.warky.dev/wdevs/relspecgo/pkg/writers/gorm"
wjson "git.warky.dev/wdevs/relspecgo/pkg/writers/json"
wpgsql "git.warky.dev/wdevs/relspecgo/pkg/writers/pgsql"
wyaml "git.warky.dev/wdevs/relspecgo/pkg/writers/yaml"
"github.com/spf13/cobra"
)
var (
convertSourceType string
convertSourcePath string
convertSourceConn string
convertTargetType string
convertTargetPath string
convertPackageName string
convertSchemaFilter string
)
var convertCmd = &cobra.Command{
Use: "convert",
Short: "Convert database schemas between different formats",
Long: `Convert database schemas from one format to another.
Supports reading from multiple sources (live databases, DBML, DCTX, DrawDB,
JSON, YAML, etc.) and writing to various formats (GORM, Bun, DBML, JSON,
YAML, SQL, etc.).
Input formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- json: JSON database schema
- yaml: YAML database schema
- pgsql: PostgreSQL database (live connection)
Output formats:
- dbml: DBML schema files
- dctx: DCTX schema files
- drawdb: DrawDB JSON files
- json: JSON database schema
- yaml: YAML database schema
- gorm: GORM model files (Go)
- bun: Bun model files (Go)
- pgsql: PostgreSQL SQL schema
PostgreSQL Connection String Examples:
postgres://username:password@localhost:5432/database_name
postgres://username:password@localhost/database_name
postgresql://user:pass@host:5432/dbname?sslmode=disable
postgresql://user:pass@host/dbname?sslmode=require
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
Examples:
# Convert DBML to GORM models
relspec convert --from dbml --from-path schema.dbml \
--to gorm --to-path models/ --package models
# Convert PostgreSQL database to DBML
relspec convert --from pgsql \
--from-conn "postgres://myuser:mypass@localhost:5432/mydb" \
--to dbml --to-path schema.dbml
# Convert PostgreSQL to JSON (with SSL disabled)
relspec convert --from pgsql \
--from-conn "postgresql://user:pass@localhost/db?sslmode=disable" \
--to json --to-path schema.json
# Convert DBML to JSON
relspec convert --from dbml --from-path schema.dbml \
--to json --to-path schema.json
# Convert DrawDB to PostgreSQL SQL
relspec convert --from drawdb --from-path design.ddb \
--to pgsql --to-path schema.sql
# Convert local PostgreSQL database to YAML
relspec convert --from pgsql \
--from-conn "host=localhost port=5432 user=admin password=secret dbname=prod" \
--to yaml --to-path schema.yaml
# Convert PostgreSQL to DCTX (single schema required)
relspec convert --from pgsql \
--from-conn "postgres://user:pass@localhost:5432/mydb" \
--to dctx --to-path schema.dctx --schema public
# Convert DBML to DCTX with specific schema
relspec convert --from dbml --from-path schema.dbml \
--to dctx --to-path output.dctx --schema public`,
RunE: runConvert,
}
func init() {
convertCmd.Flags().StringVar(&convertSourceType, "from", "", "Source format (dbml, dctx, drawdb, json, yaml, pgsql)")
convertCmd.Flags().StringVar(&convertSourcePath, "from-path", "", "Source file path (for file-based formats)")
convertCmd.Flags().StringVar(&convertSourceConn, "from-conn", "", "Source connection string (for database formats)")
convertCmd.Flags().StringVar(&convertTargetType, "to", "", "Target format (dbml, dctx, drawdb, json, yaml, gorm, bun, pgsql)")
convertCmd.Flags().StringVar(&convertTargetPath, "to-path", "", "Target output path (file or directory)")
convertCmd.Flags().StringVar(&convertPackageName, "package", "", "Package name (for code generation formats like gorm/bun)")
convertCmd.Flags().StringVar(&convertSchemaFilter, "schema", "", "Filter to a specific schema by name (required for formats like dctx that only support single schemas)")
convertCmd.MarkFlagRequired("from")
convertCmd.MarkFlagRequired("to")
convertCmd.MarkFlagRequired("to-path")
}
func runConvert(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Converter ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", getCurrentTimestamp())
// Read source database
fmt.Fprintf(os.Stderr, "[1/2] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", convertSourceType)
if convertSourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", convertSourcePath)
}
if convertSourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPassword(convertSourceConn))
}
db, err := readDatabaseForConvert(convertSourceType, convertSourcePath, convertSourceConn)
if err != nil {
return fmt.Errorf("failed to read source: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", db.Name)
fmt.Fprintf(os.Stderr, " Found: %d schema(s)\n", len(db.Schemas))
totalTables := 0
for _, schema := range db.Schemas {
totalTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d table(s)\n\n", totalTables)
// Write to target format
fmt.Fprintf(os.Stderr, "[2/2] Writing to target format...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", convertTargetType)
fmt.Fprintf(os.Stderr, " Output: %s\n", convertTargetPath)
if convertPackageName != "" {
fmt.Fprintf(os.Stderr, " Package: %s\n", convertPackageName)
}
if convertSchemaFilter != "" {
fmt.Fprintf(os.Stderr, " Schema: %s\n", convertSchemaFilter)
}
if err := writeDatabase(db, convertTargetType, convertTargetPath, convertPackageName, convertSchemaFilter); err != nil {
return fmt.Errorf("failed to write target: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully wrote output\n\n")
fmt.Fprintf(os.Stderr, "=== Conversion Complete ===\n")
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", getCurrentTimestamp())
return nil
}
func readDatabaseForConvert(dbType, filePath, connString string) (*models.Database, error) {
var reader readers.Reader
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DBML format")
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DCTX format")
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("file path is required for DrawDB format")
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
if filePath == "" {
return nil, fmt.Errorf("file path is required for JSON format")
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml", "yml":
if filePath == "" {
return nil, fmt.Errorf("file path is required for YAML format")
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql", "postgres", "postgresql":
if connString == "" {
return nil, fmt.Errorf("connection string is required for PostgreSQL format")
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
default:
return nil, fmt.Errorf("unsupported source format: %s", dbType)
}
db, err := reader.ReadDatabase()
if err != nil {
return nil, fmt.Errorf("failed to read database: %w", err)
}
return db, nil
}
func writeDatabase(db *models.Database, dbType, outputPath, packageName, schemaFilter string) error {
var writer writers.Writer
writerOpts := &writers.WriterOptions{
OutputPath: outputPath,
PackageName: packageName,
}
switch strings.ToLower(dbType) {
case "dbml":
writer = wdbml.NewWriter(writerOpts)
case "dctx":
writer = wdctx.NewWriter(writerOpts)
case "drawdb":
writer = wdrawdb.NewWriter(writerOpts)
case "json":
writer = wjson.NewWriter(writerOpts)
case "yaml", "yml":
writer = wyaml.NewWriter(writerOpts)
case "gorm":
if packageName == "" {
return fmt.Errorf("package name is required for GORM format (use --package flag)")
}
writer = wgorm.NewWriter(writerOpts)
case "bun":
if packageName == "" {
return fmt.Errorf("package name is required for Bun format (use --package flag)")
}
writer = wbun.NewWriter(writerOpts)
case "pgsql", "postgres", "postgresql", "sql":
writer = wpgsql.NewWriter(writerOpts)
default:
return fmt.Errorf("unsupported target format: %s", dbType)
}
// Handle schema filtering for formats that only support single schemas
if schemaFilter != "" {
// Find the specified schema
var targetSchema *models.Schema
for _, schema := range db.Schemas {
if schema.Name == schemaFilter {
targetSchema = schema
break
}
}
if targetSchema == nil {
return fmt.Errorf("schema '%s' not found in database. Available schemas: %v",
schemaFilter, getSchemaNames(db))
}
// Write just the filtered schema
if err := writer.WriteSchema(targetSchema); err != nil {
return fmt.Errorf("failed to write schema: %w", err)
}
return nil
}
// For formats like DCTX that don't support full database writes, require schema filter
if strings.ToLower(dbType) == "dctx" {
if len(db.Schemas) == 0 {
return fmt.Errorf("no schemas found in database")
}
if len(db.Schemas) == 1 {
// Auto-select the only schema
if err := writer.WriteSchema(db.Schemas[0]); err != nil {
return fmt.Errorf("failed to write schema: %w", err)
}
return nil
}
// Multiple schemas, require user to specify which one
return fmt.Errorf("multiple schemas found, please specify which schema to export using --schema flag. Available schemas: %v",
getSchemaNames(db))
}
// Write full database for formats that support it
if err := writer.WriteDatabase(db); err != nil {
return fmt.Errorf("failed to write database: %w", err)
}
return nil
}
// getSchemaNames returns a slice of schema names from a database
func getSchemaNames(db *models.Database) []string {
names := make([]string, len(db.Schemas))
for i, schema := range db.Schemas {
names[i] = schema.Name
}
return names
}
func getCurrentTimestamp() string {
return time.Now().Format("2006-01-02 15:04:05")
}
func maskPassword(connStr string) string {
// Mask password in connection strings for security
// Handle postgres://user:password@host format
if strings.Contains(connStr, "://") && strings.Contains(connStr, "@") {
parts := strings.Split(connStr, "@")
if len(parts) >= 2 {
userPart := parts[0]
if strings.Contains(userPart, ":") {
userPassParts := strings.Split(userPart, ":")
if len(userPassParts) >= 3 {
// postgres://user:pass -> postgres://user:***
return userPassParts[0] + ":" + userPassParts[1] + ":***@" + strings.Join(parts[1:], "@")
}
}
}
}
// Handle key=value format with password=
if strings.Contains(connStr, "password=") {
result := ""
parts := strings.Split(connStr, " ")
for i, part := range parts {
if strings.HasPrefix(part, "password=") {
parts[i] = "password=***"
}
}
result = strings.Join(parts, " ")
return result
}
return connStr
}

289
cmd/relspec/diff.go Normal file
View File

@@ -0,0 +1,289 @@
package main
import (
"fmt"
"os"
"strings"
"time"
"git.warky.dev/wdevs/relspecgo/pkg/diff"
"git.warky.dev/wdevs/relspecgo/pkg/models"
"git.warky.dev/wdevs/relspecgo/pkg/readers"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dbml"
"git.warky.dev/wdevs/relspecgo/pkg/readers/dctx"
"git.warky.dev/wdevs/relspecgo/pkg/readers/drawdb"
"git.warky.dev/wdevs/relspecgo/pkg/readers/json"
"git.warky.dev/wdevs/relspecgo/pkg/readers/pgsql"
"git.warky.dev/wdevs/relspecgo/pkg/readers/yaml"
"github.com/spf13/cobra"
)
var (
sourceType string
sourcePath string
sourceConn string
targetType string
targetPath string
targetConn string
outputFormat string
outputPath string
)
var diffCmd = &cobra.Command{
Use: "diff",
Short: "Compare two database schemas and report differences",
Long: `Compare two database schemas from different sources and generate a
differences report.
The command reads schemas from two sources (source and target) and
analyzes differences including missing tables, schemas, columns,
indexes, constraints, and sequences.
Output formats:
- summary: Human-readable summary to stdout (default)
- json: Detailed JSON report
- html: HTML report with visual formatting
PostgreSQL Connection String Examples:
postgres://username:password@localhost:5432/database_name
postgres://username:password@localhost/database_name
postgresql://user:pass@host:5432/dbname?sslmode=disable
postgresql://user:pass@host/dbname?sslmode=require
host=localhost port=5432 user=username password=pass dbname=mydb sslmode=disable
Examples:
# Compare two DBML files with summary output
relspec diff --from dbml --from-path schema1.dbml \
--to dbml --to-path schema2.dbml
# Compare PostgreSQL database with DBML file, output as JSON
relspec diff --from pgsql \
--from-conn "postgres://myuser:mypass@localhost:5432/prod_db" \
--to dbml --to-path schema.dbml \
--format json --output diff.json
# Compare two PostgreSQL databases, output as HTML
relspec diff --from pgsql \
--from-conn "postgres://user:pass@localhost:5432/db1" \
--to pgsql \
--to-conn "postgres://user:pass@localhost:5432/db2" \
--format html --output report.html
# Compare local and remote PostgreSQL databases
relspec diff --from pgsql \
--from-conn "postgresql://admin:secret@localhost/staging?sslmode=disable" \
--to pgsql \
--to-conn "postgresql://admin:secret@prod.example.com/production?sslmode=require" \
--format summary
# Compare DBML files with detailed JSON output
relspec diff --from dbml --from-path v1.dbml \
--to dbml --to-path v2.dbml \
--format json --output changes.json`,
RunE: runDiff,
}
func init() {
diffCmd.Flags().StringVar(&sourceType, "from", "", "Source database format (dbml, dctx, drawdb, json, yaml, pgsql)")
diffCmd.Flags().StringVar(&sourcePath, "from-path", "", "Source file path (for file-based formats)")
diffCmd.Flags().StringVar(&sourceConn, "from-conn", "", "Source connection string (for database formats)")
diffCmd.Flags().StringVar(&targetType, "to", "", "Target database format (dbml, dctx, drawdb, json, yaml, pgsql)")
diffCmd.Flags().StringVar(&targetPath, "to-path", "", "Target file path (for file-based formats)")
diffCmd.Flags().StringVar(&targetConn, "to-conn", "", "Target connection string (for database formats)")
diffCmd.Flags().StringVar(&outputFormat, "format", "summary", "Output format (summary, json, html)")
diffCmd.Flags().StringVar(&outputPath, "output", "", "Output file path (default: stdout for summary, required for json/html)")
diffCmd.MarkFlagRequired("from")
diffCmd.MarkFlagRequired("to")
}
func runDiff(cmd *cobra.Command, args []string) error {
fmt.Fprintf(os.Stderr, "\n=== RelSpec Schema Diff ===\n")
fmt.Fprintf(os.Stderr, "Started at: %s\n\n", time.Now().Format("2006-01-02 15:04:05"))
// Read source database
fmt.Fprintf(os.Stderr, "[1/3] Reading source schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", sourceType)
if sourcePath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", sourcePath)
}
if sourceConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPasswordInDiff(sourceConn))
}
sourceDB, err := readDatabase(sourceType, sourcePath, sourceConn, "source")
if err != nil {
return fmt.Errorf("failed to read source database: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", sourceDB.Name)
sourceTables := 0
for _, schema := range sourceDB.Schemas {
sourceTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d schema(s), %d table(s)\n\n", len(sourceDB.Schemas), sourceTables)
// Read target database
fmt.Fprintf(os.Stderr, "[2/3] Reading target schema...\n")
fmt.Fprintf(os.Stderr, " Format: %s\n", targetType)
if targetPath != "" {
fmt.Fprintf(os.Stderr, " Path: %s\n", targetPath)
}
if targetConn != "" {
fmt.Fprintf(os.Stderr, " Conn: %s\n", maskPasswordInDiff(targetConn))
}
targetDB, err := readDatabase(targetType, targetPath, targetConn, "target")
if err != nil {
return fmt.Errorf("failed to read target database: %w", err)
}
fmt.Fprintf(os.Stderr, " ✓ Successfully read database '%s'\n", targetDB.Name)
targetTables := 0
for _, schema := range targetDB.Schemas {
targetTables += len(schema.Tables)
}
fmt.Fprintf(os.Stderr, " Found: %d schema(s), %d table(s)\n\n", len(targetDB.Schemas), targetTables)
// Compare databases
fmt.Fprintf(os.Stderr, "[3/3] Comparing schemas...\n")
result := diff.CompareDatabases(sourceDB, targetDB)
summary := diff.ComputeSummary(result)
totalDiffs := summary.Schemas.Missing + summary.Schemas.Extra + summary.Schemas.Modified +
summary.Tables.Missing + summary.Tables.Extra + summary.Tables.Modified +
summary.Columns.Missing + summary.Columns.Extra + summary.Columns.Modified +
summary.Indexes.Missing + summary.Indexes.Extra + summary.Indexes.Modified +
summary.Constraints.Missing + summary.Constraints.Extra + summary.Constraints.Modified
fmt.Fprintf(os.Stderr, " ✓ Comparison complete\n")
fmt.Fprintf(os.Stderr, " Found: %d difference(s)\n\n", totalDiffs)
// Determine output format
var format diff.OutputFormat
switch strings.ToLower(outputFormat) {
case "summary":
format = diff.FormatSummary
case "json":
format = diff.FormatJSON
case "html":
format = diff.FormatHTML
default:
return fmt.Errorf("unsupported output format: %s", outputFormat)
}
// Determine output writer
var writer *os.File
if outputPath == "" {
if format != diff.FormatSummary {
return fmt.Errorf("output path is required for %s format", outputFormat)
}
writer = os.Stdout
} else {
fmt.Fprintf(os.Stderr, "Writing %s report to: %s\n", outputFormat, outputPath)
f, err := os.Create(outputPath)
if err != nil {
return fmt.Errorf("failed to create output file: %w", err)
}
defer f.Close()
writer = f
}
// Format and write output
if err := diff.FormatDiff(result, format, writer); err != nil {
return fmt.Errorf("failed to format diff output: %w", err)
}
if outputPath != "" {
fmt.Fprintf(os.Stderr, "✓ Report written successfully\n\n")
}
fmt.Fprintf(os.Stderr, "=== Diff Complete ===\n")
fmt.Fprintf(os.Stderr, "Completed at: %s\n\n", time.Now().Format("2006-01-02 15:04:05"))
return nil
}
func readDatabase(dbType, filePath, connString, label string) (*models.Database, error) {
var reader readers.Reader
switch strings.ToLower(dbType) {
case "dbml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DBML format", label)
}
reader = dbml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "dctx":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DCTX format", label)
}
reader = dctx.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "drawdb":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for DrawDB format", label)
}
reader = drawdb.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "json":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for JSON format", label)
}
reader = json.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "yaml":
if filePath == "" {
return nil, fmt.Errorf("%s: file path is required for YAML format", label)
}
reader = yaml.NewReader(&readers.ReaderOptions{FilePath: filePath})
case "pgsql", "postgres", "postgresql":
if connString == "" {
return nil, fmt.Errorf("%s: connection string is required for PostgreSQL format", label)
}
reader = pgsql.NewReader(&readers.ReaderOptions{ConnectionString: connString})
default:
return nil, fmt.Errorf("%s: unsupported database format: %s", label, dbType)
}
db, err := reader.ReadDatabase()
if err != nil {
return nil, fmt.Errorf("%s: failed to read database: %w", label, err)
}
return db, nil
}
func maskPasswordInDiff(connStr string) string {
// Mask password in connection strings for security
// Handle postgres://user:password@host format
if strings.Contains(connStr, "://") && strings.Contains(connStr, "@") {
parts := strings.Split(connStr, "@")
if len(parts) >= 2 {
userPart := parts[0]
if strings.Contains(userPart, ":") {
userPassParts := strings.Split(userPart, ":")
if len(userPassParts) >= 3 {
// postgres://user:pass -> postgres://user:***
return userPassParts[0] + ":" + userPassParts[1] + ":***@" + strings.Join(parts[1:], "@")
}
}
}
}
// Handle key=value format with password=
if strings.Contains(connStr, "password=") {
parts := strings.Split(connStr, " ")
for i, part := range parts {
if strings.HasPrefix(part, "password=") {
parts[i] = "password=***"
}
}
return strings.Join(parts, " ")
}
return connStr
}

13
cmd/relspec/main.go Normal file
View File

@@ -0,0 +1,13 @@
package main
import (
"fmt"
"os"
)
func main() {
if err := rootCmd.Execute(); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}

21
cmd/relspec/root.go Normal file
View File

@@ -0,0 +1,21 @@
package main
import (
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "relspec",
Short: "RelSpec - Database schema conversion and analysis tool",
Long: `RelSpec is a database relations specification tool that provides
bidirectional conversion between various database schema formats.
It reads database schemas from multiple sources (live databases, DBML,
DCTX, DrawDB, etc.) and writes them to various formats (GORM, Bun,
JSON, YAML, SQL, etc.).`,
}
func init() {
rootCmd.AddCommand(convertCmd)
rootCmd.AddCommand(diffCmd)
}