Compare commits

...

13 Commits

Author SHA1 Message Date
Hein
aa99e8e4bc Added WrapHTTPRequest 2025-11-24 10:13:48 +02:00
Hein
163593901f Huge preload chains causing errors, workaround to do seperate selects.
Some checks failed
Tests / Run Tests (1.23.x) (push) Has been cancelled
Tests / Run Tests (1.24.x) (push) Has been cancelled
Tests / Lint Code (push) Has been cancelled
Tests / Build (push) Has been cancelled
2025-11-21 17:09:11 +02:00
Hein
1261960e97 Ability to handle multiple x-custom- headers
Some checks are pending
Tests / Run Tests (1.23.x) (push) Waiting to run
Tests / Run Tests (1.24.x) (push) Waiting to run
Tests / Lint Code (push) Waiting to run
Tests / Build (push) Waiting to run
2025-11-21 12:15:07 +02:00
Hein
76bbf33db2 Fixed SingleRecordAsObject true when handleRead with no id 2025-11-21 11:49:08 +02:00
Hein
02c9b96b0c Better SanitizeWhereClause 2025-11-21 11:42:01 +02:00
Hein
9a3564f05f SanitizeWhereClause with tablename on handlers. 2025-11-21 11:00:44 +02:00
Hein
a931b8cdd2 Better preloads 2025-11-21 10:41:58 +02:00
Hein
7e76977dcc Lots of refactoring, Fixes to preloads 2025-11-21 10:17:20 +02:00
Hein
7853a3f56a cql_columns parsing and recursive preloading. Also added legacy header support for limt(s,e) ,sort(x,y,-z) 2025-11-21 09:15:40 +02:00
Hein
c2e0c36c79 Restheadspec now takes parameters from query parameters and headers. Allows for backward compatibility with our old dojo clients 2025-11-21 08:56:58 +02:00
Hein
59bd709460 More reflection function to handle sql columns and get default sqlcolumn lists. 2025-11-21 08:35:46 +02:00
Hein
05962035b6 when you specify computed columns without explicitly listing base columns, you'll get all base model column
Some checks are pending
Tests / Run Tests (1.23.x) (push) Waiting to run
Tests / Run Tests (1.24.x) (push) Waiting to run
Tests / Lint Code (push) Waiting to run
Tests / Build (push) Waiting to run
2025-11-20 17:34:46 +02:00
Hein
1cd04b7083 Better where clause handling for preloads 2025-11-20 17:02:27 +02:00
19 changed files with 2307 additions and 537 deletions

View File

@@ -86,7 +86,6 @@
"emptyFallthrough",
"equalFold",
"flagName",
"ifElseChain",
"indexAlloc",
"initClause",
"methodExprCall",
@@ -106,6 +105,9 @@
"unnecessaryBlock",
"weakCond",
"yodaStyleExpr"
],
"disabled-checks": [
"ifElseChain"
]
},
"revive": {

View File

@@ -4,6 +4,7 @@ import (
"context"
"database/sql"
"fmt"
"reflect"
"strings"
"github.com/uptrace/bun"
@@ -99,12 +100,20 @@ func (b *BunAdapter) RunInTransaction(ctx context.Context, fn func(common.Databa
// BunSelectQuery implements SelectQuery for Bun
type BunSelectQuery struct {
query *bun.SelectQuery
db bun.IDB // Store DB connection for count queries
hasModel bool // Track if Model() was called
schema string // Separated schema name
tableName string // Just the table name, without schema
tableAlias string
query *bun.SelectQuery
db bun.IDB // Store DB connection for count queries
hasModel bool // Track if Model() was called
schema string // Separated schema name
tableName string // Just the table name, without schema
tableAlias string
deferredPreloads []deferredPreload // Preloads to execute as separate queries
}
// deferredPreload represents a preload that will be executed as a separate query
// to avoid PostgreSQL identifier length limits
type deferredPreload struct {
relation string
apply []func(common.SelectQuery) common.SelectQuery
}
func (b *BunSelectQuery) Model(model interface{}) common.SelectQuery {
@@ -233,11 +242,99 @@ func (b *BunSelectQuery) Preload(relation string, conditions ...interface{}) com
return b
}
// // shortenAliasForPostgres shortens a table/relation alias if it would exceed PostgreSQL's 63-char limit
// // when combined with typical column names
// func shortenAliasForPostgres(relationPath string) (string, bool) {
// // Convert relation path to the alias format Bun uses: dots become double underscores
// // Also convert to lowercase and use snake_case as Bun does
// parts := strings.Split(relationPath, ".")
// alias := strings.ToLower(strings.Join(parts, "__"))
// // PostgreSQL truncates identifiers to 63 chars
// // If the alias + typical column name would exceed this, we need to shorten
// // Reserve at least 30 chars for column names (e.g., "__rid_mastertype_hubtype")
// const maxAliasLength = 30
// if len(alias) > maxAliasLength {
// // Create a shortened alias using a hash of the original
// hash := md5.Sum([]byte(alias))
// hashStr := hex.EncodeToString(hash[:])[:8]
// // Keep first few chars of original for readability + hash
// prefixLen := maxAliasLength - 9 // 9 = 1 underscore + 8 hash chars
// if prefixLen > len(alias) {
// prefixLen = len(alias)
// }
// shortened := alias[:prefixLen] + "_" + hashStr
// logger.Debug("Shortened alias '%s' (%d chars) to '%s' (%d chars) to avoid PostgreSQL 63-char limit",
// alias, len(alias), shortened, len(shortened))
// return shortened, true
// }
// return alias, false
// }
// // estimateColumnAliasLength estimates the length of a column alias in a nested preload
// // Bun creates aliases like: relationChain__columnName
// func estimateColumnAliasLength(relationPath string, columnName string) int {
// relationParts := strings.Split(relationPath, ".")
// aliasChain := strings.ToLower(strings.Join(relationParts, "__"))
// // Bun adds "__" between alias and column name
// return len(aliasChain) + 2 + len(columnName)
// }
func (b *BunSelectQuery) PreloadRelation(relation string, apply ...func(common.SelectQuery) common.SelectQuery) common.SelectQuery {
// Check if this relation chain would create problematic long aliases
relationParts := strings.Split(relation, ".")
aliasChain := strings.ToLower(strings.Join(relationParts, "__"))
// PostgreSQL's identifier limit is 63 characters
const postgresIdentifierLimit = 63
const safeAliasLimit = 35 // Leave room for column names
// If the alias chain is too long, defer this preload to be executed as a separate query
if len(aliasChain) > safeAliasLimit {
logger.Info("Preload relation '%s' creates long alias chain '%s' (%d chars). "+
"Using separate query to avoid PostgreSQL %d-char identifier limit.",
relation, aliasChain, len(aliasChain), postgresIdentifierLimit)
// For nested preloads (e.g., "Parent.Child"), split into separate preloads
// This avoids the long concatenated alias
if len(relationParts) > 1 {
// Load first level normally: "Parent"
firstLevel := relationParts[0]
remainingPath := strings.Join(relationParts[1:], ".")
logger.Info("Splitting nested preload: loading '%s' first, then '%s' separately",
firstLevel, remainingPath)
// Apply the first level preload normally
b.query = b.query.Relation(firstLevel)
// Store the remaining nested preload to be executed after the main query
b.deferredPreloads = append(b.deferredPreloads, deferredPreload{
relation: relation,
apply: apply,
})
return b
}
// Single level but still too long - just warn and continue
logger.Warn("Single-level preload '%s' has a very long name (%d chars). "+
"Consider renaming the field to avoid potential issues.",
relation, len(aliasChain))
}
// Normal preload handling
b.query = b.query.Relation(relation, func(sq *bun.SelectQuery) *bun.SelectQuery {
defer func() {
if r := recover(); r != nil {
logger.HandlePanic("BunSelectQuery.PreloadRelation", r)
err := logger.HandlePanic("BunSelectQuery.PreloadRelation", r)
if err != nil {
return
}
}
}()
if len(apply) == 0 {
@@ -306,7 +403,23 @@ func (b *BunSelectQuery) Scan(ctx context.Context, dest interface{}) (err error)
if dest == nil {
return fmt.Errorf("destination cannot be nil")
}
return b.query.Scan(ctx, dest)
// Execute the main query first
err = b.query.Scan(ctx, dest)
if err != nil {
return err
}
// Execute any deferred preloads
if len(b.deferredPreloads) > 0 {
err = b.executeDeferredPreloads(ctx, dest)
if err != nil {
logger.Warn("Failed to execute deferred preloads: %v", err)
// Don't fail the whole query, just log the warning
}
}
return nil
}
func (b *BunSelectQuery) ScanModel(ctx context.Context) (err error) {
@@ -319,7 +432,132 @@ func (b *BunSelectQuery) ScanModel(ctx context.Context) (err error) {
return fmt.Errorf("model is nil")
}
return b.query.Scan(ctx)
// Execute the main query first
err = b.query.Scan(ctx)
if err != nil {
return err
}
// Execute any deferred preloads
if len(b.deferredPreloads) > 0 {
model := b.query.GetModel()
err = b.executeDeferredPreloads(ctx, model.Value())
if err != nil {
logger.Warn("Failed to execute deferred preloads: %v", err)
// Don't fail the whole query, just log the warning
}
}
return nil
}
// executeDeferredPreloads executes preloads that were deferred to avoid PostgreSQL identifier length limits
func (b *BunSelectQuery) executeDeferredPreloads(ctx context.Context, dest interface{}) error {
if len(b.deferredPreloads) == 0 {
return nil
}
for _, dp := range b.deferredPreloads {
err := b.executeSingleDeferredPreload(ctx, dest, dp)
if err != nil {
return fmt.Errorf("failed to execute deferred preload '%s': %w", dp.relation, err)
}
}
return nil
}
// executeSingleDeferredPreload executes a single deferred preload
// For a relation like "Parent.Child", it:
// 1. Finds all loaded Parent records in dest
// 2. Loads Child records for those Parents using a separate query (loading only "Child", not "Parent.Child")
// 3. Bun automatically assigns the Child records to the appropriate Parent.Child field
func (b *BunSelectQuery) executeSingleDeferredPreload(ctx context.Context, dest interface{}, dp deferredPreload) error {
relationParts := strings.Split(dp.relation, ".")
if len(relationParts) < 2 {
return fmt.Errorf("deferred preload must be nested (e.g., 'Parent.Child'), got: %s", dp.relation)
}
// The parent relation that was already loaded
parentRelation := relationParts[0]
// The child relation we need to load
childRelation := strings.Join(relationParts[1:], ".")
logger.Debug("Executing deferred preload: loading '%s' on already-loaded '%s'", childRelation, parentRelation)
// Use reflection to access the parent relation field(s) in the loaded records
// Then load the child relation for those parent records
destValue := reflect.ValueOf(dest)
if destValue.Kind() == reflect.Ptr {
destValue = destValue.Elem()
}
// Handle both slice and single record
if destValue.Kind() == reflect.Slice {
// Iterate through each record in the slice
for i := 0; i < destValue.Len(); i++ {
record := destValue.Index(i)
if err := b.loadChildRelationForRecord(ctx, record, parentRelation, childRelation, dp.apply); err != nil {
logger.Warn("Failed to load child relation '%s' for record %d: %v", childRelation, i, err)
// Continue with other records
}
}
} else {
// Single record
if err := b.loadChildRelationForRecord(ctx, destValue, parentRelation, childRelation, dp.apply); err != nil {
return fmt.Errorf("failed to load child relation '%s': %w", childRelation, err)
}
}
return nil
}
// loadChildRelationForRecord loads a child relation for a single parent record
func (b *BunSelectQuery) loadChildRelationForRecord(ctx context.Context, record reflect.Value, parentRelation, childRelation string, apply []func(common.SelectQuery) common.SelectQuery) error {
// Ensure we're working with the actual struct value, not a pointer
if record.Kind() == reflect.Ptr {
record = record.Elem()
}
// Get the parent relation field
parentField := record.FieldByName(parentRelation)
if !parentField.IsValid() {
// Parent relation field doesn't exist
logger.Debug("Parent relation field '%s' not found in record", parentRelation)
return nil
}
// Check if the parent field is nil (for pointer fields)
if parentField.Kind() == reflect.Ptr && parentField.IsNil() {
// Parent relation not loaded or nil, skip
logger.Debug("Parent relation field '%s' is nil, skipping child preload", parentRelation)
return nil
}
// Get the interface value to pass to Bun
parentValue := parentField.Interface()
// Load the child relation on the parent record
// This uses a shorter alias since we're only loading "Child", not "Parent.Child"
return b.db.NewSelect().
Model(parentValue).
Relation(childRelation, func(sq *bun.SelectQuery) *bun.SelectQuery {
// Apply any custom query modifications
if len(apply) > 0 {
wrapper := &BunSelectQuery{query: sq, db: b.db}
current := common.SelectQuery(wrapper)
for _, fn := range apply {
if fn != nil {
current = fn(current)
}
}
if finalBun, ok := current.(*BunSelectQuery); ok {
return finalBun.query
}
}
return sq
}).
Scan(ctx)
}
func (b *BunSelectQuery) Count(ctx context.Context) (count int, err error) {
@@ -401,7 +639,7 @@ func (b *BunInsertQuery) Exec(ctx context.Context) (res common.Result, err error
err = logger.HandlePanic("BunInsertQuery.Exec", r)
}
}()
if b.values != nil && len(b.values) > 0 {
if len(b.values) > 0 {
if !b.hasModel {
// If no model was set, use the values map as the model
// Bun can insert map[string]interface{} directly

View File

@@ -121,6 +121,16 @@ func (b *BunRouterRequest) QueryParam(key string) string {
return b.req.URL.Query().Get(key)
}
func (b *BunRouterRequest) AllQueryParams() map[string]string {
params := make(map[string]string)
for key, values := range b.req.URL.Query() {
if len(values) > 0 {
params[key] = values[0]
}
}
return params
}
func (b *BunRouterRequest) AllHeaders() map[string]string {
headers := make(map[string]string)
for key, values := range b.req.Header {

View File

@@ -117,6 +117,16 @@ func (h *HTTPRequest) QueryParam(key string) string {
return h.req.URL.Query().Get(key)
}
func (h *HTTPRequest) AllQueryParams() map[string]string {
params := make(map[string]string)
for key, values := range h.req.URL.Query() {
if len(values) > 0 {
params[key] = values[0]
}
}
return params
}
func (h *HTTPRequest) AllHeaders() map[string]string {
headers := make(map[string]string)
for key, values := range h.req.Header {

View File

@@ -1,6 +1,11 @@
package common
import "context"
import (
"context"
"encoding/json"
"io"
"net/http"
)
// Database interface designed to work with both GORM and Bun
type Database interface {
@@ -116,6 +121,7 @@ type Request interface {
Body() ([]byte, error)
PathParam(key string) string
QueryParam(key string) string
AllQueryParams() map[string]string // Get all query parameters as a map
}
// ResponseWriter interface abstracts HTTP response
@@ -129,6 +135,99 @@ type ResponseWriter interface {
// HTTPHandlerFunc type for HTTP handlers
type HTTPHandlerFunc func(ResponseWriter, Request)
// WrapHTTPRequest wraps standard http.ResponseWriter and *http.Request into common interfaces
func WrapHTTPRequest(w http.ResponseWriter, r *http.Request) (ResponseWriter, Request) {
return &StandardResponseWriter{w: w}, &StandardRequest{r: r}
}
// StandardResponseWriter adapts http.ResponseWriter to ResponseWriter interface
type StandardResponseWriter struct {
w http.ResponseWriter
status int
}
func (s *StandardResponseWriter) SetHeader(key, value string) {
s.w.Header().Set(key, value)
}
func (s *StandardResponseWriter) WriteHeader(statusCode int) {
s.status = statusCode
s.w.WriteHeader(statusCode)
}
func (s *StandardResponseWriter) Write(data []byte) (int, error) {
return s.w.Write(data)
}
func (s *StandardResponseWriter) WriteJSON(data interface{}) error {
s.SetHeader("Content-Type", "application/json")
return json.NewEncoder(s.w).Encode(data)
}
// StandardRequest adapts *http.Request to Request interface
type StandardRequest struct {
r *http.Request
body []byte
}
func (s *StandardRequest) Method() string {
return s.r.Method
}
func (s *StandardRequest) URL() string {
return s.r.URL.String()
}
func (s *StandardRequest) Header(key string) string {
return s.r.Header.Get(key)
}
func (s *StandardRequest) AllHeaders() map[string]string {
headers := make(map[string]string)
for key, values := range s.r.Header {
if len(values) > 0 {
headers[key] = values[0]
}
}
return headers
}
func (s *StandardRequest) Body() ([]byte, error) {
if s.body != nil {
return s.body, nil
}
if s.r.Body == nil {
return nil, nil
}
defer s.r.Body.Close()
body, err := io.ReadAll(s.r.Body)
if err != nil {
return nil, err
}
s.body = body
return body, nil
}
func (s *StandardRequest) PathParam(key string) string {
// Standard http.Request doesn't have path params
// This should be set by the router
return ""
}
func (s *StandardRequest) QueryParam(key string) string {
return s.r.URL.Query().Get(key)
}
func (s *StandardRequest) AllQueryParams() map[string]string {
params := make(map[string]string)
for key, values := range s.r.URL.Query() {
if len(values) > 0 {
params[key] = values[0]
}
}
return params
}
// TableNameProvider interface for models that provide table names
type TableNameProvider interface {
TableName() string

340
pkg/common/sql_helpers.go Normal file
View File

@@ -0,0 +1,340 @@
package common
import (
"fmt"
"strings"
"github.com/bitechdev/ResolveSpec/pkg/logger"
"github.com/bitechdev/ResolveSpec/pkg/modelregistry"
"github.com/bitechdev/ResolveSpec/pkg/reflection"
)
// ValidateAndFixPreloadWhere validates that the WHERE clause for a preload contains
// the relation prefix (alias). If not present, it attempts to add it to column references.
// Returns the fixed WHERE clause and an error if it cannot be safely fixed.
func ValidateAndFixPreloadWhere(where string, relationName string) (string, error) {
if where == "" {
return where, nil
}
// Check if the relation name is already present in the WHERE clause
lowerWhere := strings.ToLower(where)
lowerRelation := strings.ToLower(relationName)
// Check for patterns like "relation.", "relation ", or just "relation" followed by a dot
if strings.Contains(lowerWhere, lowerRelation+".") ||
strings.Contains(lowerWhere, "`"+lowerRelation+"`.") ||
strings.Contains(lowerWhere, "\""+lowerRelation+"\".") {
// Relation prefix is already present
return where, nil
}
// If the WHERE clause is complex (contains OR, parentheses, subqueries, etc.),
// we can't safely auto-fix it - require explicit prefix
if strings.Contains(lowerWhere, " or ") ||
strings.Contains(where, "(") ||
strings.Contains(where, ")") {
return "", fmt.Errorf("preload WHERE condition must reference the relation '%s' (e.g., '%s.column_name'). Complex WHERE clauses with OR/parentheses must explicitly use the relation prefix", relationName, relationName)
}
// Try to add the relation prefix to simple column references
// This handles basic cases like "column = value" or "column = value AND other_column = value"
// Split by AND to handle multiple conditions (case-insensitive)
originalConditions := strings.Split(where, " AND ")
// If uppercase split didn't work, try lowercase
if len(originalConditions) == 1 {
originalConditions = strings.Split(where, " and ")
}
fixedConditions := make([]string, 0, len(originalConditions))
for _, cond := range originalConditions {
cond = strings.TrimSpace(cond)
if cond == "" {
continue
}
// Check if this condition already has a table prefix (contains a dot)
if strings.Contains(cond, ".") {
fixedConditions = append(fixedConditions, cond)
continue
}
// Check if this is a SQL expression/literal that shouldn't be prefixed
lowerCond := strings.ToLower(strings.TrimSpace(cond))
if IsSQLExpression(lowerCond) {
// Don't prefix SQL expressions like "true", "false", "1=1", etc.
fixedConditions = append(fixedConditions, cond)
continue
}
// Extract the column name (first identifier before operator)
columnName := ExtractColumnName(cond)
if columnName == "" {
// Can't identify column name, require explicit prefix
return "", fmt.Errorf("preload WHERE condition must reference the relation '%s' (e.g., '%s.column_name'). Cannot auto-fix condition: %s", relationName, relationName, cond)
}
// Add relation prefix to the column name only
fixedCond := strings.Replace(cond, columnName, relationName+"."+columnName, 1)
fixedConditions = append(fixedConditions, fixedCond)
}
fixedWhere := strings.Join(fixedConditions, " AND ")
logger.Debug("Auto-fixed preload WHERE clause: '%s' -> '%s'", where, fixedWhere)
return fixedWhere, nil
}
// IsSQLExpression checks if a condition is a SQL expression that shouldn't be prefixed
func IsSQLExpression(cond string) bool {
// Common SQL literals and expressions
sqlLiterals := []string{"true", "false", "null", "1=1", "1 = 1", "0=0", "0 = 0"}
for _, literal := range sqlLiterals {
if cond == literal {
return true
}
}
return false
}
// IsTrivialCondition checks if a condition is trivial and always evaluates to true
// These conditions should be removed from WHERE clauses as they have no filtering effect
func IsTrivialCondition(cond string) bool {
cond = strings.TrimSpace(cond)
lowerCond := strings.ToLower(cond)
// Conditions that always evaluate to true
trivialConditions := []string{
"1=1", "1 = 1", "1= 1", "1 =1",
"true", "true = true", "true=true", "true= true", "true =true",
"0=0", "0 = 0", "0= 0", "0 =0",
}
for _, trivial := range trivialConditions {
if lowerCond == trivial {
return true
}
}
return false
}
// SanitizeWhereClause removes trivial conditions and optionally prefixes table/relation names to columns
// This function should be used everywhere a WHERE statement is sent to ensure clean, efficient SQL
//
// Parameters:
// - where: The WHERE clause string to sanitize
// - tableName: Optional table/relation name to prefix to column references (empty string to skip prefixing)
//
// Returns:
// - The sanitized WHERE clause with trivial conditions removed and columns optionally prefixed
// - An empty string if all conditions were trivial or the input was empty
func SanitizeWhereClause(where string, tableName string) string {
if where == "" {
return ""
}
where = strings.TrimSpace(where)
// Strip outer parentheses and re-trim
where = stripOuterParentheses(where)
// Get valid columns from the model if tableName is provided
var validColumns map[string]bool
if tableName != "" {
validColumns = getValidColumnsForTable(tableName)
}
// Split by AND to handle multiple conditions
conditions := splitByAND(where)
validConditions := make([]string, 0, len(conditions))
for _, cond := range conditions {
cond = strings.TrimSpace(cond)
if cond == "" {
continue
}
// Strip parentheses from the condition before checking
condToCheck := stripOuterParentheses(cond)
// Skip trivial conditions that always evaluate to true
if IsTrivialCondition(condToCheck) {
logger.Debug("Removing trivial condition: '%s'", cond)
continue
}
// If tableName is provided and the condition doesn't already have a table prefix,
// attempt to add it
if tableName != "" && !hasTablePrefix(condToCheck) {
// Check if this is a SQL expression/literal that shouldn't be prefixed
if !IsSQLExpression(strings.ToLower(condToCheck)) {
// Extract the column name and prefix it
columnName := ExtractColumnName(condToCheck)
if columnName != "" {
// Only prefix if this is a valid column in the model
// If we don't have model info (validColumns is nil), prefix anyway for backward compatibility
if validColumns == nil || isValidColumn(columnName, validColumns) {
// Replace in the original condition (without stripped parens)
cond = strings.Replace(cond, columnName, tableName+"."+columnName, 1)
logger.Debug("Prefixed column in condition: '%s'", cond)
} else {
logger.Debug("Skipping prefix for '%s' - not a valid column in model", columnName)
}
}
}
}
validConditions = append(validConditions, cond)
}
if len(validConditions) == 0 {
return ""
}
result := strings.Join(validConditions, " AND ")
if result != where {
logger.Debug("Sanitized WHERE clause: '%s' -> '%s'", where, result)
}
return result
}
// stripOuterParentheses removes matching outer parentheses from a string
// It handles nested parentheses correctly
func stripOuterParentheses(s string) string {
s = strings.TrimSpace(s)
for {
if len(s) < 2 || s[0] != '(' || s[len(s)-1] != ')' {
return s
}
// Check if these parentheses match (i.e., they're the outermost pair)
depth := 0
matched := false
for i := 0; i < len(s); i++ {
switch s[i] {
case '(':
depth++
case ')':
depth--
if depth == 0 && i == len(s)-1 {
matched = true
} else if depth == 0 {
// Found a closing paren before the end, so outer parens don't match
return s
}
}
}
if !matched {
return s
}
// Strip the outer parentheses and continue
s = strings.TrimSpace(s[1 : len(s)-1])
}
}
// splitByAND splits a WHERE clause by AND operators (case-insensitive)
// This is a simple split that doesn't handle nested parentheses or complex expressions
func splitByAND(where string) []string {
// First try uppercase AND
conditions := strings.Split(where, " AND ")
// If we didn't split on uppercase, try lowercase
if len(conditions) == 1 {
conditions = strings.Split(where, " and ")
}
// If we still didn't split, try mixed case
if len(conditions) == 1 {
conditions = strings.Split(where, " And ")
}
return conditions
}
// hasTablePrefix checks if a condition already has a table/relation prefix (contains a dot)
func hasTablePrefix(cond string) bool {
// Look for patterns like "table.column" or "`table`.`column`" or "\"table\".\"column\""
return strings.Contains(cond, ".")
}
// ExtractColumnName extracts the column name from a WHERE condition
// For example: "status = 'active'" returns "status"
func ExtractColumnName(cond string) string {
// Common SQL operators
operators := []string{" = ", " != ", " <> ", " > ", " >= ", " < ", " <= ", " LIKE ", " like ", " IN ", " in ", " IS ", " is "}
for _, op := range operators {
if idx := strings.Index(cond, op); idx > 0 {
columnName := strings.TrimSpace(cond[:idx])
// Remove quotes if present
columnName = strings.Trim(columnName, "`\"'")
return columnName
}
}
// If no operator found, check if it's a simple identifier (for boolean columns)
parts := strings.Fields(cond)
if len(parts) > 0 {
columnName := strings.Trim(parts[0], "`\"'")
// Check if it's a valid identifier (not a SQL keyword)
if !IsSQLKeyword(strings.ToLower(columnName)) {
return columnName
}
}
return ""
}
// IsSQLKeyword checks if a string is a SQL keyword that shouldn't be treated as a column name
func IsSQLKeyword(word string) bool {
keywords := []string{"select", "from", "where", "and", "or", "not", "in", "is", "null", "true", "false", "like", "between", "exists"}
for _, kw := range keywords {
if word == kw {
return true
}
}
return false
}
// getValidColumnsForTable retrieves the valid SQL columns for a table from the model registry
// Returns a map of column names for fast lookup, or nil if the model is not found
func getValidColumnsForTable(tableName string) map[string]bool {
// Try to get the model from the registry
model, err := modelregistry.GetModelByName(tableName)
if err != nil {
// Model not found, return nil to indicate we should use fallback behavior
return nil
}
// Get SQL columns from the model
columns := reflection.GetSQLModelColumns(model)
if len(columns) == 0 {
// No columns found, return nil
return nil
}
// Build a map for fast lookup
columnMap := make(map[string]bool, len(columns))
for _, col := range columns {
columnMap[strings.ToLower(col)] = true
}
return columnMap
}
// isValidColumn checks if a column name exists in the valid columns map
// Handles case-insensitive comparison
func isValidColumn(columnName string, validColumns map[string]bool) bool {
if validColumns == nil {
return true // No model info, assume valid
}
return validColumns[strings.ToLower(columnName)]
}

View File

@@ -0,0 +1,224 @@
package common
import (
"testing"
"github.com/bitechdev/ResolveSpec/pkg/modelregistry"
)
func TestSanitizeWhereClause(t *testing.T) {
tests := []struct {
name string
where string
tableName string
expected string
}{
{
name: "trivial conditions in parentheses",
where: "(true AND true AND true)",
tableName: "mastertask",
expected: "",
},
{
name: "trivial conditions without parentheses",
where: "true AND true AND true",
tableName: "mastertask",
expected: "",
},
{
name: "single trivial condition",
where: "true",
tableName: "mastertask",
expected: "",
},
{
name: "valid condition with parentheses",
where: "(status = 'active')",
tableName: "users",
expected: "users.status = 'active'",
},
{
name: "mixed trivial and valid conditions",
where: "true AND status = 'active' AND 1=1",
tableName: "users",
expected: "users.status = 'active'",
},
{
name: "condition already with table prefix",
where: "users.status = 'active'",
tableName: "users",
expected: "users.status = 'active'",
},
{
name: "multiple valid conditions",
where: "status = 'active' AND age > 18",
tableName: "users",
expected: "users.status = 'active' AND users.age > 18",
},
{
name: "no table name provided",
where: "status = 'active'",
tableName: "",
expected: "status = 'active'",
},
{
name: "empty where clause",
where: "",
tableName: "users",
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := SanitizeWhereClause(tt.where, tt.tableName)
if result != tt.expected {
t.Errorf("SanitizeWhereClause(%q, %q) = %q; want %q", tt.where, tt.tableName, result, tt.expected)
}
})
}
}
func TestStripOuterParentheses(t *testing.T) {
tests := []struct {
name string
input string
expected string
}{
{
name: "single level parentheses",
input: "(true)",
expected: "true",
},
{
name: "multiple levels",
input: "((true))",
expected: "true",
},
{
name: "no parentheses",
input: "true",
expected: "true",
},
{
name: "mismatched parentheses",
input: "(true",
expected: "(true",
},
{
name: "complex expression",
input: "(a AND b)",
expected: "a AND b",
},
{
name: "nested but not outer",
input: "(a AND (b OR c)) AND d",
expected: "(a AND (b OR c)) AND d",
},
{
name: "with spaces",
input: " ( true ) ",
expected: "true",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := stripOuterParentheses(tt.input)
if result != tt.expected {
t.Errorf("stripOuterParentheses(%q) = %q; want %q", tt.input, result, tt.expected)
}
})
}
}
func TestIsTrivialCondition(t *testing.T) {
tests := []struct {
name string
input string
expected bool
}{
{"true", "true", true},
{"true with spaces", " true ", true},
{"TRUE uppercase", "TRUE", true},
{"1=1", "1=1", true},
{"1 = 1", "1 = 1", true},
{"true = true", "true = true", true},
{"valid condition", "status = 'active'", false},
{"false", "false", false},
{"column name", "is_active", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := IsTrivialCondition(tt.input)
if result != tt.expected {
t.Errorf("IsTrivialCondition(%q) = %v; want %v", tt.input, result, tt.expected)
}
})
}
}
// Test model for model-aware sanitization tests
type MasterTask struct {
ID int `bun:"id,pk"`
Name string `bun:"name"`
Status string `bun:"status"`
UserID int `bun:"user_id"`
}
func TestSanitizeWhereClauseWithModel(t *testing.T) {
// Register the test model
err := modelregistry.RegisterModel(MasterTask{}, "mastertask")
if err != nil {
// Model might already be registered, ignore error
t.Logf("Model registration returned: %v", err)
}
tests := []struct {
name string
where string
tableName string
expected string
}{
{
name: "valid column gets prefixed",
where: "status = 'active'",
tableName: "mastertask",
expected: "mastertask.status = 'active'",
},
{
name: "multiple valid columns get prefixed",
where: "status = 'active' AND user_id = 123",
tableName: "mastertask",
expected: "mastertask.status = 'active' AND mastertask.user_id = 123",
},
{
name: "invalid column does not get prefixed",
where: "invalid_column = 'value'",
tableName: "mastertask",
expected: "invalid_column = 'value'",
},
{
name: "mix of valid and trivial conditions",
where: "true AND status = 'active' AND 1=1",
tableName: "mastertask",
expected: "mastertask.status = 'active'",
},
{
name: "parentheses with valid column",
where: "(status = 'active')",
tableName: "mastertask",
expected: "mastertask.status = 'active'",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := SanitizeWhereClause(tt.where, tt.tableName)
if result != tt.expected {
t.Errorf("SanitizeWhereClause(%q, %q) = %q; want %q", tt.where, tt.tableName, result, tt.expected)
}
})
}
}

View File

@@ -238,13 +238,13 @@ func (t *SqlTimeStamp) UnmarshalJSON(b []byte) error {
var err error
if b == nil {
t = &SqlTimeStamp{}
return nil
}
s := strings.Trim(strings.Trim(string(b), " "), "\"")
if s == "null" || s == "" || s == "0" ||
s == "0001-01-01T00:00:00" || s == "0001-01-01" {
t = &SqlTimeStamp{}
return nil
}
@@ -293,7 +293,7 @@ func (t *SqlTimeStamp) Scan(value interface{}) error {
// String - Override String format of time
func (t SqlTimeStamp) String() string {
return fmt.Sprintf("%s", time.Time(t).Format("2006-01-02T15:04:05"))
return time.Time(t).Format("2006-01-02T15:04:05")
}
// GetTime - Returns Time
@@ -308,7 +308,7 @@ func (t *SqlTimeStamp) SetTime(pTime time.Time) {
// Format - Formats the time
func (t SqlTimeStamp) Format(layout string) string {
return fmt.Sprintf("%s", time.Time(t).Format(layout))
return time.Time(t).Format(layout)
}
func SqlTimeStampNow() SqlTimeStamp {
@@ -420,7 +420,6 @@ func (t *SqlDate) UnmarshalJSON(b []byte) error {
if s == "null" || s == "" || s == "0" ||
strings.HasPrefix(s, "0001-01-01T00:00:00") ||
s == "0001-01-01" {
t = &SqlDate{}
return nil
}
@@ -434,7 +433,7 @@ func (t *SqlDate) UnmarshalJSON(b []byte) error {
// MarshalJSON - Override JSON format of time
func (t SqlDate) MarshalJSON() ([]byte, error) {
tmstr := time.Time(t).Format("2006-01-02") //time.RFC3339
tmstr := time.Time(t).Format("2006-01-02") // time.RFC3339
if strings.HasPrefix(tmstr, "0001-01-01") {
return []byte("null"), nil
}
@@ -482,7 +481,7 @@ func (t SqlDate) Int64() int64 {
// String - Override String format of time
func (t SqlDate) String() string {
tmstr := time.Time(t).Format("2006-01-02") //time.RFC3339
tmstr := time.Time(t).Format("2006-01-02") // time.RFC3339
if strings.HasPrefix(tmstr, "0001-01-01") || strings.HasPrefix(tmstr, "1800-12-31") {
return "0"
}
@@ -517,8 +516,8 @@ func (t *SqlTime) UnmarshalJSON(b []byte) error {
*t = SqlTime{}
return nil
}
tx := time.Time{}
tx, err = tryParseDT(s)
tx, err := tryParseDT(s)
*t = SqlTime(tx)
return err
@@ -642,9 +641,8 @@ func (n SqlJSONB) AsSlice() ([]any, error) {
func (n *SqlJSONB) UnmarshalJSON(b []byte) error {
s := strings.Trim(strings.Trim(string(b), " "), "\"")
invalid := (s == "null" || s == "" || len(s) < 2) || !(strings.Contains(s, "{") || strings.Contains(s, "["))
invalid := (s == "null" || s == "" || len(s) < 2) || (!strings.Contains(s, "{") && !strings.Contains(s, "["))
if invalid {
s = ""
return nil
}
@@ -661,7 +659,7 @@ func (n SqlJSONB) MarshalJSON() ([]byte, error) {
var obj interface{}
err := json.Unmarshal(n, &obj)
if err != nil {
//fmt.Printf("Invalid JSON %v", err)
// fmt.Printf("Invalid JSON %v", err)
return []byte("null"), nil
}
@@ -725,7 +723,6 @@ func (n *SqlUUID) UnmarshalJSON(b []byte) error {
s := strings.Trim(strings.Trim(string(b), " "), "\"")
invalid := (s == "null" || s == "" || len(s) < 30)
if invalid {
s = ""
return nil
}
*n = SqlUUID(sql.NullString{String: s, Valid: !invalid})

View File

@@ -32,15 +32,22 @@ type Parameter struct {
}
type PreloadOption struct {
Relation string `json:"relation"`
Columns []string `json:"columns"`
OmitColumns []string `json:"omit_columns"`
Sort []SortOption `json:"sort"`
Filters []FilterOption `json:"filters"`
Where string `json:"where"`
Limit *int `json:"limit"`
Offset *int `json:"offset"`
Updatable *bool `json:"updateable"` // if true, the relation can be updated
Relation string `json:"relation"`
Columns []string `json:"columns"`
OmitColumns []string `json:"omit_columns"`
Sort []SortOption `json:"sort"`
Filters []FilterOption `json:"filters"`
Where string `json:"where"`
Limit *int `json:"limit"`
Offset *int `json:"offset"`
Updatable *bool `json:"updateable"` // if true, the relation can be updated
ComputedQL map[string]string `json:"computed_ql"` // Computed columns as SQL expressions
Recursive bool `json:"recursive"` // if true, preload recursively up to 5 levels
// Relationship keys from XFiles - used to build proper foreign key filters
PrimaryKey string `json:"primary_key"` // Primary key of the related table
RelatedKey string `json:"related_key"` // For child tables: column in child that references parent
ForeignKey string `json:"foreign_key"` // For parent tables: column in current table that references parent
}
type FilterOption struct {

View File

@@ -6,6 +6,7 @@ import (
"strings"
"github.com/bitechdev/ResolveSpec/pkg/logger"
"github.com/bitechdev/ResolveSpec/pkg/reflection"
)
// ColumnValidator validates column names against a model's fields
@@ -92,23 +93,6 @@ func (v *ColumnValidator) getColumnName(field reflect.StructField) string {
return strings.ToLower(field.Name)
}
// extractSourceColumn extracts the base column name from PostgreSQL JSON operators
// Examples:
// - "columna->>'val'" returns "columna"
// - "columna->'key'" returns "columna"
// - "columna" returns "columna"
// - "table.columna->>'val'" returns "table.columna"
func extractSourceColumn(colName string) string {
// Check for PostgreSQL JSON operators: -> and ->>
if idx := strings.Index(colName, "->>"); idx != -1 {
return strings.TrimSpace(colName[:idx])
}
if idx := strings.Index(colName, "->"); idx != -1 {
return strings.TrimSpace(colName[:idx])
}
return colName
}
// ValidateColumn validates a single column name
// Returns nil if valid, error if invalid
// Columns prefixed with "cql" (case insensitive) are always valid
@@ -125,7 +109,7 @@ func (v *ColumnValidator) ValidateColumn(column string) error {
}
// Extract source column name (remove JSON operators like ->> or ->)
sourceColumn := extractSourceColumn(column)
sourceColumn := reflection.ExtractSourceColumn(column)
// Check if column exists in model
if _, exists := v.validColumns[strings.ToLower(sourceColumn)]; !exists {

View File

@@ -2,6 +2,8 @@ package common
import (
"testing"
"github.com/bitechdev/ResolveSpec/pkg/reflection"
)
func TestExtractSourceColumn(t *testing.T) {
@@ -49,9 +51,9 @@ func TestExtractSourceColumn(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
result := extractSourceColumn(tc.input)
result := reflection.ExtractSourceColumn(tc.input)
if result != tc.expected {
t.Errorf("extractSourceColumn(%q) = %q; want %q", tc.input, result, tc.expected)
t.Errorf("reflection.ExtractSourceColumn(%q) = %q; want %q", tc.input, result, tc.expected)
}
})
}

View File

@@ -26,8 +26,7 @@ func GetModelColumnDetail(record reflect.Value) []ModelFieldDetail {
}
}()
var lst []ModelFieldDetail
lst = make([]ModelFieldDetail, 0)
lst := make([]ModelFieldDetail, 0)
if !record.IsValid() {
return lst

View File

@@ -17,3 +17,33 @@ func Len(v any) int {
return 0
}
}
// ExtractTableNameOnly extracts the table name from a fully qualified table reference.
// It removes any schema prefix (e.g., "schema.table" -> "table") and truncates at
// the first delimiter (comma, space, tab, or newline). If the input contains multiple
// dots, it returns everything after the last dot up to the first delimiter.
func ExtractTableNameOnly(fullName string) string {
// First, split by dot to remove schema prefix if present
lastDotIndex := -1
for i, char := range fullName {
if char == '.' {
lastDotIndex = i
}
}
// Start from after the last dot (or from beginning if no dot)
startIndex := 0
if lastDotIndex != -1 {
startIndex = lastDotIndex + 1
}
// Now find the end (first delimiter after the table name)
for i := startIndex; i < len(fullName); i++ {
char := rune(fullName[i])
if char == ',' || char == ' ' || char == '\t' || char == '\n' {
return fullName[startIndex:i]
}
}
return fullName[startIndex:]
}

View File

@@ -1,7 +1,9 @@
package reflection
import (
"fmt"
"reflect"
"strconv"
"strings"
"github.com/bitechdev/ResolveSpec/pkg/modelregistry"
@@ -132,7 +134,7 @@ func findFieldByName(val reflect.Value, name string) any {
}
// Check if field name matches
if strings.ToLower(field.Name) == name && fieldValue.CanInterface() {
if strings.EqualFold(field.Name, name) && fieldValue.CanInterface() {
return fieldValue.Interface()
}
}
@@ -323,6 +325,127 @@ func ExtractColumnFromBunTag(tag string) string {
return ""
}
// GetSQLModelColumns extracts column names that have valid SQL field mappings
// This function only returns columns that:
// 1. Have bun or gorm tags (not just json tags)
// 2. Are not relations (no rel:, join:, foreignKey, references, many2many tags)
// 3. Are not scan-only embedded fields
func GetSQLModelColumns(model any) []string {
var columns []string
modelType := reflect.TypeOf(model)
// Unwrap pointers, slices, and arrays to get to the base struct type
for modelType != nil && (modelType.Kind() == reflect.Pointer || modelType.Kind() == reflect.Slice || modelType.Kind() == reflect.Array) {
modelType = modelType.Elem()
}
// Validate that we have a struct type
if modelType == nil || modelType.Kind() != reflect.Struct {
return columns
}
collectSQLColumnsFromType(modelType, &columns, false)
return columns
}
// collectSQLColumnsFromType recursively collects SQL column names from a struct type
// scanOnlyEmbedded indicates if we're inside a scan-only embedded struct
func collectSQLColumnsFromType(typ reflect.Type, columns *[]string, scanOnlyEmbedded bool) {
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
// Check if this is an embedded struct
if field.Anonymous {
// Unwrap pointer type if necessary
fieldType := field.Type
if fieldType.Kind() == reflect.Pointer {
fieldType = fieldType.Elem()
}
// Check if the embedded struct itself is scan-only
isScanOnly := scanOnlyEmbedded
bunTag := field.Tag.Get("bun")
if bunTag != "" && isBunFieldScanOnly(bunTag) {
isScanOnly = true
}
// Recursively process embedded struct
if fieldType.Kind() == reflect.Struct {
collectSQLColumnsFromType(fieldType, columns, isScanOnly)
continue
}
}
// Skip fields in scan-only embedded structs
if scanOnlyEmbedded {
continue
}
// Get bun and gorm tags
bunTag := field.Tag.Get("bun")
gormTag := field.Tag.Get("gorm")
// Skip if neither bun nor gorm tag exists
if bunTag == "" && gormTag == "" {
continue
}
// Skip if explicitly marked with "-"
if bunTag == "-" || gormTag == "-" {
continue
}
// Skip if field itself is scan-only (bun)
if bunTag != "" && isBunFieldScanOnly(bunTag) {
continue
}
// Skip if field itself is read-only (gorm)
if gormTag != "" && isGormFieldReadOnly(gormTag) {
continue
}
// Skip relation fields (bun)
if bunTag != "" {
// Skip if it's a bun relation (rel:, join:, or m2m:)
if strings.Contains(bunTag, "rel:") ||
strings.Contains(bunTag, "join:") ||
strings.Contains(bunTag, "m2m:") {
continue
}
}
// Skip relation fields (gorm)
if gormTag != "" {
// Skip if it has gorm relationship tags
if strings.Contains(gormTag, "foreignKey:") ||
strings.Contains(gormTag, "references:") ||
strings.Contains(gormTag, "many2many:") ||
strings.Contains(gormTag, "constraint:") {
continue
}
}
// Get column name
columnName := ""
if bunTag != "" {
columnName = ExtractColumnFromBunTag(bunTag)
}
if columnName == "" && gormTag != "" {
columnName = ExtractColumnFromGormTag(gormTag)
}
// Skip if we couldn't extract a column name
if columnName == "" {
continue
}
*columns = append(*columns, columnName)
}
}
// IsColumnWritable checks if a column can be written to in the database
// For bun: returns false if the field has "scanonly" tag
// For gorm: returns false if the field has "<-:false" or "->" (read-only) tag
@@ -351,7 +474,7 @@ func IsColumnWritable(model any, columnName string) bool {
// isColumnWritableInType recursively searches for a column and checks if it's writable
// Returns (found, writable) where found indicates if the column was found
func isColumnWritableInType(typ reflect.Type, columnName string) (bool, bool) {
func isColumnWritableInType(typ reflect.Type, columnName string) (found bool, writable bool) {
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
@@ -440,3 +563,321 @@ func isGormFieldReadOnly(tag string) bool {
}
return false
}
// ExtractSourceColumn extracts the base column name from PostgreSQL JSON operators
// Examples:
// - "columna->>'val'" returns "columna"
// - "columna->'key'" returns "columna"
// - "columna" returns "columna"
// - "table.columna->>'val'" returns "table.columna"
func ExtractSourceColumn(colName string) string {
// Check for PostgreSQL JSON operators: -> and ->>
if idx := strings.Index(colName, "->>"); idx != -1 {
return strings.TrimSpace(colName[:idx])
}
if idx := strings.Index(colName, "->"); idx != -1 {
return strings.TrimSpace(colName[:idx])
}
return colName
}
// ToSnakeCase converts a string from CamelCase to snake_case
func ToSnakeCase(s string) string {
var result strings.Builder
for i, r := range s {
if i > 0 && r >= 'A' && r <= 'Z' {
result.WriteRune('_')
}
result.WriteRune(r)
}
return strings.ToLower(result.String())
}
// GetColumnTypeFromModel uses reflection to determine the Go type of a column in a model
func GetColumnTypeFromModel(model interface{}, colName string) reflect.Kind {
if model == nil {
return reflect.Invalid
}
// Extract the source column name (remove JSON operators like ->> or ->)
sourceColName := ExtractSourceColumn(colName)
modelType := reflect.TypeOf(model)
// Dereference pointer if needed
if modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
// Ensure it's a struct
if modelType.Kind() != reflect.Struct {
return reflect.Invalid
}
// Find the field by JSON tag or field name
for i := 0; i < modelType.NumField(); i++ {
field := modelType.Field(i)
// Check JSON tag
jsonTag := field.Tag.Get("json")
if jsonTag != "" {
// Parse JSON tag (format: "name,omitempty")
parts := strings.Split(jsonTag, ",")
if parts[0] == sourceColName {
return field.Type.Kind()
}
}
// Check field name (case-insensitive)
if strings.EqualFold(field.Name, sourceColName) {
return field.Type.Kind()
}
// Check snake_case conversion
snakeCaseName := ToSnakeCase(field.Name)
if snakeCaseName == sourceColName {
return field.Type.Kind()
}
}
return reflect.Invalid
}
// IsNumericType checks if a reflect.Kind is a numeric type
func IsNumericType(kind reflect.Kind) bool {
return kind == reflect.Int || kind == reflect.Int8 || kind == reflect.Int16 ||
kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.Uint ||
kind == reflect.Uint8 || kind == reflect.Uint16 || kind == reflect.Uint32 ||
kind == reflect.Uint64 || kind == reflect.Float32 || kind == reflect.Float64
}
// IsStringType checks if a reflect.Kind is a string type
func IsStringType(kind reflect.Kind) bool {
return kind == reflect.String
}
// IsNumericValue checks if a string value can be parsed as a number
func IsNumericValue(value string) bool {
value = strings.TrimSpace(value)
_, err := strconv.ParseFloat(value, 64)
return err == nil
}
// ConvertToNumericType converts a string value to the appropriate numeric type
func ConvertToNumericType(value string, kind reflect.Kind) (interface{}, error) {
value = strings.TrimSpace(value)
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
// Parse as integer
bitSize := 64
switch kind {
case reflect.Int8:
bitSize = 8
case reflect.Int16:
bitSize = 16
case reflect.Int32:
bitSize = 32
}
intVal, err := strconv.ParseInt(value, 10, bitSize)
if err != nil {
return nil, fmt.Errorf("invalid integer value: %w", err)
}
// Return the appropriate type
switch kind {
case reflect.Int:
return int(intVal), nil
case reflect.Int8:
return int8(intVal), nil
case reflect.Int16:
return int16(intVal), nil
case reflect.Int32:
return int32(intVal), nil
case reflect.Int64:
return intVal, nil
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
// Parse as unsigned integer
bitSize := 64
switch kind {
case reflect.Uint8:
bitSize = 8
case reflect.Uint16:
bitSize = 16
case reflect.Uint32:
bitSize = 32
}
uintVal, err := strconv.ParseUint(value, 10, bitSize)
if err != nil {
return nil, fmt.Errorf("invalid unsigned integer value: %w", err)
}
// Return the appropriate type
switch kind {
case reflect.Uint:
return uint(uintVal), nil
case reflect.Uint8:
return uint8(uintVal), nil
case reflect.Uint16:
return uint16(uintVal), nil
case reflect.Uint32:
return uint32(uintVal), nil
case reflect.Uint64:
return uintVal, nil
}
case reflect.Float32, reflect.Float64:
// Parse as float
bitSize := 64
if kind == reflect.Float32 {
bitSize = 32
}
floatVal, err := strconv.ParseFloat(value, bitSize)
if err != nil {
return nil, fmt.Errorf("invalid float value: %w", err)
}
if kind == reflect.Float32 {
return float32(floatVal), nil
}
return floatVal, nil
}
return nil, fmt.Errorf("unsupported numeric type: %v", kind)
}
// GetRelationModel gets the model type for a relation field
// It searches for the field by name in the following order (case-insensitive):
// 1. Actual field name
// 2. Bun tag name (if exists)
// 3. Gorm tag name (if exists)
// 4. JSON tag name (if exists)
//
// Supports recursive field paths using dot notation (e.g., "MAL.MAL.DEF")
// For nested fields, it traverses through each level of the struct hierarchy
func GetRelationModel(model interface{}, fieldName string) interface{} {
if model == nil || fieldName == "" {
return nil
}
// Split the field name by "." to handle nested/recursive relations
fieldParts := strings.Split(fieldName, ".")
// Start with the current model
currentModel := model
// Traverse through each level of the field path
for _, part := range fieldParts {
if part == "" {
continue
}
currentModel = getRelationModelSingleLevel(currentModel, part)
if currentModel == nil {
return nil
}
}
return currentModel
}
// getRelationModelSingleLevel gets the model type for a single level field (non-recursive)
// This is a helper function used by GetRelationModel to handle one level at a time
func getRelationModelSingleLevel(model interface{}, fieldName string) interface{} {
if model == nil || fieldName == "" {
return nil
}
modelType := reflect.TypeOf(model)
if modelType == nil {
return nil
}
if modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
if modelType == nil || modelType.Kind() != reflect.Struct {
return nil
}
// Find the field by checking in priority order (case-insensitive)
var field *reflect.StructField
normalizedFieldName := strings.ToLower(fieldName)
for i := 0; i < modelType.NumField(); i++ {
f := modelType.Field(i)
// 1. Check actual field name (case-insensitive)
if strings.EqualFold(f.Name, fieldName) {
field = &f
break
}
// 2. Check bun tag name
bunTag := f.Tag.Get("bun")
if bunTag != "" {
bunColName := ExtractColumnFromBunTag(bunTag)
if bunColName != "" && strings.EqualFold(bunColName, normalizedFieldName) {
field = &f
break
}
}
// 3. Check gorm tag name
gormTag := f.Tag.Get("gorm")
if gormTag != "" {
gormColName := ExtractColumnFromGormTag(gormTag)
if gormColName != "" && strings.EqualFold(gormColName, normalizedFieldName) {
field = &f
break
}
}
// 4. Check JSON tag name
jsonTag := f.Tag.Get("json")
if jsonTag != "" {
parts := strings.Split(jsonTag, ",")
if len(parts) > 0 && parts[0] != "" && parts[0] != "-" {
if strings.EqualFold(parts[0], normalizedFieldName) {
field = &f
break
}
}
}
}
if field == nil {
return nil
}
// Get the target type
targetType := field.Type
if targetType == nil {
return nil
}
if targetType.Kind() == reflect.Slice {
targetType = targetType.Elem()
if targetType == nil {
return nil
}
}
if targetType.Kind() == reflect.Ptr {
targetType = targetType.Elem()
if targetType == nil {
return nil
}
}
if targetType.Kind() != reflect.Struct {
return nil
}
// Create a zero value of the target type
return reflect.New(targetType).Elem().Interface()
}

View File

@@ -474,3 +474,143 @@ func TestIsColumnWritableWithEmbedded(t *testing.T) {
})
}
}
// Test models with relations for GetSQLModelColumns
type User struct {
ID int `bun:"id,pk" json:"id"`
Name string `bun:"name" json:"name"`
Email string `bun:"email" json:"email"`
ProfileData string `json:"profile_data"` // No bun/gorm tag
Posts []Post `bun:"rel:has-many,join:id=user_id" json:"posts"`
Profile *Profile `bun:"rel:has-one,join:id=user_id" json:"profile"`
RowNumber int64 `bun:",scanonly" json:"_rownumber"`
}
type Post struct {
ID int `gorm:"column:id;primaryKey" json:"id"`
Title string `gorm:"column:title" json:"title"`
UserID int `gorm:"column:user_id;foreignKey" json:"user_id"`
User *User `gorm:"foreignKey:UserID;references:ID" json:"user"`
Tags []Tag `gorm:"many2many:post_tags" json:"tags"`
Content string `json:"content"` // No bun/gorm tag
}
type Profile struct {
ID int `bun:"id,pk" json:"id"`
Bio string `bun:"bio" json:"bio"`
UserID int `bun:"user_id" json:"user_id"`
}
type Tag struct {
ID int `gorm:"column:id;primaryKey" json:"id"`
Name string `gorm:"column:name" json:"name"`
}
// Model with scan-only embedded struct
type EntityWithScanOnlyEmbedded struct {
ID int `bun:"id,pk" json:"id"`
Name string `bun:"name" json:"name"`
AdhocBuffer `bun:",scanonly"` // Entire embedded struct is scan-only
}
func TestGetSQLModelColumns(t *testing.T) {
tests := []struct {
name string
model any
expected []string
}{
{
name: "Bun model with relations - excludes relations and non-SQL fields",
model: User{},
// Should include: id, name, email (has bun tags)
// Should exclude: profile_data (no bun tag), Posts/Profile (relations), RowNumber (scan-only in embedded would be excluded)
expected: []string{"id", "name", "email"},
},
{
name: "GORM model with relations - excludes relations and non-SQL fields",
model: Post{},
// Should include: id, title, user_id (has gorm tags)
// Should exclude: content (no gorm tag), User/Tags (relations)
expected: []string{"id", "title", "user_id"},
},
{
name: "Model with embedded base and scan-only embedded",
model: EntityWithScanOnlyEmbedded{},
// Should include: id, name from main struct
// Should exclude: all fields from AdhocBuffer (scan-only embedded struct)
expected: []string{"id", "name"},
},
{
name: "Model with embedded - includes SQL fields, excludes scan-only",
model: ModelWithEmbedded{},
// Should include: rid_base, created_at (from BaseModel), name, description (from main)
// Should exclude: cql1, cql2, _rownumber (from AdhocBuffer - scan-only fields)
expected: []string{"rid_base", "created_at", "name", "description"},
},
{
name: "GORM model with embedded - includes SQL fields, excludes scan-only",
model: GormModelWithEmbedded{},
// Should include: rid_base, created_at (from GormBaseModel), name, description (from main)
// Should exclude: cql1, cql2 (scan-only), _rownumber (no gorm column tag, marked as -)
expected: []string{"rid_base", "created_at", "name", "description"},
},
{
name: "Simple Profile model",
model: Profile{},
// Should include all fields with bun tags
expected: []string{"id", "bio", "user_id"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := GetSQLModelColumns(tt.model)
if len(result) != len(tt.expected) {
t.Errorf("GetSQLModelColumns() returned %d columns, want %d.\nGot: %v\nWant: %v",
len(result), len(tt.expected), result, tt.expected)
return
}
for i, col := range result {
if col != tt.expected[i] {
t.Errorf("GetSQLModelColumns()[%d] = %v, want %v.\nFull result: %v",
i, col, tt.expected[i], result)
}
}
})
}
}
func TestGetSQLModelColumnsVsGetModelColumns(t *testing.T) {
// Demonstrate the difference between GetModelColumns and GetSQLModelColumns
user := User{}
allColumns := GetModelColumns(user)
sqlColumns := GetSQLModelColumns(user)
t.Logf("GetModelColumns(User): %v", allColumns)
t.Logf("GetSQLModelColumns(User): %v", sqlColumns)
// GetModelColumns should return more columns (includes fields with only json tags)
if len(allColumns) <= len(sqlColumns) {
t.Errorf("Expected GetModelColumns to return more columns than GetSQLModelColumns")
}
// GetSQLModelColumns should not include 'profile_data' (no bun tag)
for _, col := range sqlColumns {
if col == "profile_data" {
t.Errorf("GetSQLModelColumns should not include 'profile_data' (no bun/gorm tag)")
}
}
// GetModelColumns should include 'profile_data' (has json tag)
hasProfileData := false
for _, col := range allColumns {
if col == "profile_data" {
hasProfileData = true
break
}
}
if !hasProfileData {
t.Errorf("GetModelColumns should include 'profile_data' (has json tag)")
}
}

View File

@@ -191,10 +191,17 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
query = query.Table(tableName)
}
if len(options.Columns) == 0 && (len(options.ComputedColumns) > 0) {
logger.Debug("Populating options.Columns with all model columns since computed columns are additions")
options.Columns = reflection.GetSQLModelColumns(model)
}
// Apply column selection
if len(options.Columns) > 0 {
logger.Debug("Selecting columns: %v", options.Columns)
query = query.Column(options.Columns...)
for _, col := range options.Columns {
query = query.Column(reflection.ExtractSourceColumn(col))
}
}
if len(options.ComputedColumns) > 0 {
@@ -1105,69 +1112,6 @@ type relationshipInfo struct {
relatedModel interface{}
}
// validateAndFixPreloadWhere validates that the WHERE clause for a preload contains
// the relation prefix (alias). If not present, it attempts to add it to column references.
// Returns the fixed WHERE clause and an error if it cannot be safely fixed.
func (h *Handler) validateAndFixPreloadWhere(where string, relationName string) (string, error) {
if where == "" {
return where, nil
}
// Check if the relation name is already present in the WHERE clause
lowerWhere := strings.ToLower(where)
lowerRelation := strings.ToLower(relationName)
// Check for patterns like "relation.", "relation ", or just "relation" followed by a dot
if strings.Contains(lowerWhere, lowerRelation+".") ||
strings.Contains(lowerWhere, "`"+lowerRelation+"`.") ||
strings.Contains(lowerWhere, "\""+lowerRelation+"\".") {
// Relation prefix is already present
return where, nil
}
// If the WHERE clause is complex (contains OR, parentheses, subqueries, etc.),
// we can't safely auto-fix it - require explicit prefix
if strings.Contains(lowerWhere, " or ") ||
strings.Contains(where, "(") ||
strings.Contains(where, ")") {
return "", fmt.Errorf("preload WHERE condition must reference the relation '%s' (e.g., '%s.column_name'). Complex WHERE clauses with OR/parentheses must explicitly use the relation prefix", relationName, relationName)
}
// Try to add the relation prefix to simple column references
// This handles basic cases like "column = value" or "column = value AND other_column = value"
// Split by AND to handle multiple conditions (case-insensitive)
originalConditions := strings.Split(where, " AND ")
// If uppercase split didn't work, try lowercase
if len(originalConditions) == 1 {
originalConditions = strings.Split(where, " and ")
}
fixedConditions := make([]string, 0, len(originalConditions))
for _, cond := range originalConditions {
cond = strings.TrimSpace(cond)
if cond == "" {
continue
}
// Check if this condition already has a table prefix (contains a dot)
if strings.Contains(cond, ".") {
fixedConditions = append(fixedConditions, cond)
continue
}
// Add relation prefix to the column name
// This prefixes the entire condition with "relationName."
fixedCond := fmt.Sprintf("%s.%s", relationName, cond)
fixedConditions = append(fixedConditions, fixedCond)
}
fixedWhere := strings.Join(fixedConditions, " AND ")
logger.Debug("Auto-fixed preload WHERE clause: '%s' -> '%s'", where, fixedWhere)
return fixedWhere, nil
}
func (h *Handler) applyPreloads(model interface{}, query common.SelectQuery, preloads []common.PreloadOption) common.SelectQuery {
modelType := reflect.TypeOf(model)
@@ -1197,7 +1141,7 @@ func (h *Handler) applyPreloads(model interface{}, query common.SelectQuery, pre
// Validate and fix WHERE clause to ensure it contains the relation prefix
if len(preload.Where) > 0 {
fixedWhere, err := h.validateAndFixPreloadWhere(preload.Where, relationFieldName)
fixedWhere, err := common.ValidateAndFixPreloadWhere(preload.Where, relationFieldName)
if err != nil {
logger.Error("Invalid preload WHERE clause for relation '%s': %v", relationFieldName, err)
panic(fmt.Errorf("invalid preload WHERE clause for relation '%s': %w", relationFieldName, err))
@@ -1207,8 +1151,13 @@ func (h *Handler) applyPreloads(model interface{}, query common.SelectQuery, pre
logger.Debug("Applying preload: %s", relationFieldName)
query = query.PreloadRelation(relationFieldName, func(sq common.SelectQuery) common.SelectQuery {
if len(preload.Columns) == 0 && (len(preload.ComputedQL) > 0 || len(preload.OmitColumns) > 0) {
preload.Columns = reflection.GetSQLModelColumns(model)
}
// Handle column selection and omission
if len(preload.OmitColumns) > 0 {
allCols := reflection.GetModelColumns(model)
allCols := reflection.GetSQLModelColumns(model)
// Remove omitted columns
preload.Columns = []string{}
for _, col := range allCols {
@@ -1262,7 +1211,10 @@ func (h *Handler) applyPreloads(model interface{}, query common.SelectQuery, pre
}
if len(preload.Where) > 0 {
sq = sq.Where(preload.Where)
sanitizedWhere := common.SanitizeWhereClause(preload.Where, reflection.ExtractTableNameOnly(preload.Relation))
if len(sanitizedWhere) > 0 {
sq = sq.Where(sanitizedWhere)
}
}
if preload.Limit != nil && *preload.Limit > 0 {

View File

@@ -200,69 +200,6 @@ func (h *Handler) HandleGet(w common.ResponseWriter, r common.Request, params ma
// parseOptionsFromHeaders is now implemented in headers.go
// validateAndFixPreloadWhere validates that the WHERE clause for a preload contains
// the relation prefix (alias). If not present, it attempts to add it to column references.
// Returns the fixed WHERE clause and an error if it cannot be safely fixed.
func (h *Handler) validateAndFixPreloadWhere(where string, relationName string) (string, error) {
if where == "" {
return where, nil
}
// Check if the relation name is already present in the WHERE clause
lowerWhere := strings.ToLower(where)
lowerRelation := strings.ToLower(relationName)
// Check for patterns like "relation.", "relation ", or just "relation" followed by a dot
if strings.Contains(lowerWhere, lowerRelation+".") ||
strings.Contains(lowerWhere, "`"+lowerRelation+"`.") ||
strings.Contains(lowerWhere, "\""+lowerRelation+"\".") {
// Relation prefix is already present
return where, nil
}
// If the WHERE clause is complex (contains OR, parentheses, subqueries, etc.),
// we can't safely auto-fix it - require explicit prefix
if strings.Contains(lowerWhere, " or ") ||
strings.Contains(where, "(") ||
strings.Contains(where, ")") {
return "", fmt.Errorf("preload WHERE condition must reference the relation '%s' (e.g., '%s.column_name'). Complex WHERE clauses with OR/parentheses must explicitly use the relation prefix", relationName, relationName)
}
// Try to add the relation prefix to simple column references
// This handles basic cases like "column = value" or "column = value AND other_column = value"
// Split by AND to handle multiple conditions (case-insensitive)
originalConditions := strings.Split(where, " AND ")
// If uppercase split didn't work, try lowercase
if len(originalConditions) == 1 {
originalConditions = strings.Split(where, " and ")
}
fixedConditions := make([]string, 0, len(originalConditions))
for _, cond := range originalConditions {
cond = strings.TrimSpace(cond)
if cond == "" {
continue
}
// Check if this condition already has a table prefix (contains a dot)
if strings.Contains(cond, ".") {
fixedConditions = append(fixedConditions, cond)
continue
}
// Add relation prefix to the column name
// This prefixes the entire condition with "relationName."
fixedCond := fmt.Sprintf("%s.%s", relationName, cond)
fixedConditions = append(fixedConditions, fixedCond)
}
fixedWhere := strings.Join(fixedConditions, " AND ")
logger.Debug("Auto-fixed preload WHERE clause: '%s' -> '%s'", where, fixedWhere)
return fixedWhere, nil
}
func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id string, options ExtendedRequestOptions) {
// Capture panics and return error response
defer func() {
@@ -276,6 +213,10 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
tableName := GetTableName(ctx)
model := GetModel(ctx)
if id == "" {
options.SingleRecordAsObject = false
}
// Execute BeforeRead hooks
hookCtx := &HookContext{
Context: ctx,
@@ -323,9 +264,12 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
query = query.Table(tableName)
}
// Note: X-Files configuration is now applied via parseXFiles which populates
// ExtendedRequestOptions fields (columns, filters, sort, preload, etc.)
// These are applied below in the normal query building process
// If we have computed columns/expressions but options.Columns is empty,
// populate it with all model columns first since computed columns are additions
if len(options.Columns) == 0 && (len(options.ComputedQL) > 0 || len(options.ComputedColumns) > 0) {
logger.Debug("Populating options.Columns with all model columns since computed columns are additions")
options.Columns = reflection.GetSQLModelColumns(model)
}
// Apply ComputedQL fields if any
if len(options.ComputedQL) > 0 {
@@ -359,7 +303,10 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
// Apply column selection
if len(options.Columns) > 0 {
logger.Debug("Selecting columns: %v", options.Columns)
query = query.Column(options.Columns...)
for _, col := range options.Columns {
query = query.Column(reflection.ExtractSourceColumn(col))
}
}
// Apply expand (Just expand to Preload for now)
@@ -410,7 +357,7 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
// Validate and fix WHERE clause to ensure it contains the relation prefix
if len(preload.Where) > 0 {
fixedWhere, err := h.validateAndFixPreloadWhere(preload.Where, preload.Relation)
fixedWhere, err := common.ValidateAndFixPreloadWhere(preload.Where, preload.Relation)
if err != nil {
logger.Error("Invalid preload WHERE clause for relation '%s': %v", preload.Relation, err)
h.sendError(w, http.StatusBadRequest, "invalid_preload_where",
@@ -420,50 +367,8 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
preload.Where = fixedWhere
}
query = query.PreloadRelation(preload.Relation, func(sq common.SelectQuery) common.SelectQuery {
if len(preload.OmitColumns) > 0 {
allCols := reflection.GetModelColumns(model)
// Remove omitted columns
preload.Columns = []string{}
for _, col := range allCols {
addCols := true
for _, omitCol := range preload.OmitColumns {
if col == omitCol {
addCols = false
break
}
}
if addCols {
preload.Columns = append(preload.Columns, col)
}
}
}
if len(preload.Columns) > 0 {
sq = sq.Column(preload.Columns...)
}
if len(preload.Filters) > 0 {
for _, filter := range preload.Filters {
sq = h.applyFilter(sq, filter, "", false, "AND")
}
}
if len(preload.Sort) > 0 {
for _, sort := range preload.Sort {
sq = sq.Order(fmt.Sprintf("%s %s", sort.Column, sort.Direction))
}
}
if len(preload.Where) > 0 {
sq = sq.Where(preload.Where)
}
if preload.Limit != nil && *preload.Limit > 0 {
sq = sq.Limit(*preload.Limit)
}
return sq
})
// Apply the preload with recursive support
query = h.applyPreloadWithRecursion(query, preload, model, 0)
}
// Apply DISTINCT if requested
@@ -493,13 +398,21 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
// Apply custom SQL WHERE clause (AND condition)
if options.CustomSQLWhere != "" {
logger.Debug("Applying custom SQL WHERE: %s", options.CustomSQLWhere)
query = query.Where(options.CustomSQLWhere)
// Sanitize without auto-prefixing since custom SQL may reference multiple tables
sanitizedWhere := common.SanitizeWhereClause(options.CustomSQLWhere, reflection.ExtractTableNameOnly(tableName))
if sanitizedWhere != "" {
query = query.Where(sanitizedWhere)
}
}
// Apply custom SQL WHERE clause (OR condition)
if options.CustomSQLOr != "" {
logger.Debug("Applying custom SQL OR: %s", options.CustomSQLOr)
query = query.WhereOr(options.CustomSQLOr)
// Sanitize without auto-prefixing since custom SQL may reference multiple tables
sanitizedOr := common.SanitizeWhereClause(options.CustomSQLOr, reflection.ExtractTableNameOnly(tableName))
if sanitizedOr != "" {
query = query.WhereOr(sanitizedOr)
}
}
// If ID is provided, filter by ID
@@ -575,7 +488,10 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
// Apply cursor filter to query
if cursorFilter != "" {
logger.Debug("Applying cursor filter: %s", cursorFilter)
query = query.Where(cursorFilter)
sanitizedCursor := common.SanitizeWhereClause(cursorFilter, reflection.ExtractTableNameOnly(tableName))
if sanitizedCursor != "" {
query = query.Where(sanitizedCursor)
}
}
}
@@ -649,6 +565,142 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
h.sendFormattedResponse(w, modelPtr, metadata, options)
}
// applyPreloadWithRecursion applies a preload with support for ComputedQL and recursive preloading
func (h *Handler) applyPreloadWithRecursion(query common.SelectQuery, preload common.PreloadOption, model interface{}, depth int) common.SelectQuery {
// Log relationship keys if they're specified (from XFiles)
if preload.RelatedKey != "" || preload.ForeignKey != "" || preload.PrimaryKey != "" {
logger.Debug("Preload %s has relationship keys - PK: %s, RelatedKey: %s, ForeignKey: %s",
preload.Relation, preload.PrimaryKey, preload.RelatedKey, preload.ForeignKey)
// Build a WHERE clause using the relationship keys if needed
// Note: Bun's PreloadRelation typically handles the relationship join automatically via struct tags
// However, if the relationship keys are explicitly provided from XFiles, we can use them
// to add additional filtering or validation
if preload.RelatedKey != "" && preload.Where == "" {
// For child tables: ensure the child's relatedkey column will be matched
// The actual parent value is dynamic and handled by Bun's preload mechanism
// We just log this for visibility
logger.Debug("Child table %s will be filtered by %s matching parent's primary key",
preload.Relation, preload.RelatedKey)
}
if preload.ForeignKey != "" && preload.Where == "" {
// For parent tables: ensure the parent's primary key matches the current table's foreign key
logger.Debug("Parent table %s will be filtered by primary key matching current table's %s",
preload.Relation, preload.ForeignKey)
}
}
// Apply the preload
query = query.PreloadRelation(preload.Relation, func(sq common.SelectQuery) common.SelectQuery {
// Get the related model for column operations
relatedModel := reflection.GetRelationModel(model, preload.Relation)
if relatedModel == nil {
logger.Warn("Could not get related model for preload: %s", preload.Relation)
// relatedModel = model // fallback to parent model
} else {
// If we have computed columns but no explicit columns, populate with all model columns first
// since computed columns are additions
if len(preload.Columns) == 0 && (len(preload.ComputedQL) > 0 || len(preload.OmitColumns) > 0) {
logger.Debug("Populating preload columns with all model columns since computed columns are additions")
preload.Columns = reflection.GetSQLModelColumns(relatedModel)
}
// Apply ComputedQL fields if any
if len(preload.ComputedQL) > 0 {
for colName, colExpr := range preload.ComputedQL {
logger.Debug("Applying computed column to preload %s: %s", preload.Relation, colName)
sq = sq.ColumnExpr(fmt.Sprintf("(%s) AS %s", colExpr, colName))
// Remove the computed column from selected columns to avoid duplication
for colIndex := range preload.Columns {
if preload.Columns[colIndex] == colName {
preload.Columns = append(preload.Columns[:colIndex], preload.Columns[colIndex+1:]...)
break
}
}
}
}
// Handle OmitColumns
if len(preload.OmitColumns) > 0 {
allCols := preload.Columns
// Remove omitted columns
preload.Columns = []string{}
for _, col := range allCols {
addCols := true
for _, omitCol := range preload.OmitColumns {
if col == omitCol {
addCols = false
break
}
}
if addCols {
preload.Columns = append(preload.Columns, col)
}
}
}
// Apply column selection
if len(preload.Columns) > 0 {
sq = sq.Column(preload.Columns...)
}
}
// Apply filters
if len(preload.Filters) > 0 {
for _, filter := range preload.Filters {
sq = h.applyFilter(sq, filter, "", false, "AND")
}
}
// Apply sorting
if len(preload.Sort) > 0 {
for _, sort := range preload.Sort {
sq = sq.Order(fmt.Sprintf("%s %s", sort.Column, sort.Direction))
}
}
// Apply WHERE clause
if len(preload.Where) > 0 {
sanitizedWhere := common.SanitizeWhereClause(preload.Where, reflection.ExtractTableNameOnly(preload.Relation))
if len(sanitizedWhere) > 0 {
sq = sq.Where(sanitizedWhere)
}
}
// Apply limit
if preload.Limit != nil && *preload.Limit > 0 {
sq = sq.Limit(*preload.Limit)
}
if preload.Offset != nil && *preload.Offset > 0 {
sq = sq.Offset(*preload.Offset)
}
return sq
})
// Handle recursive preloading
if preload.Recursive && depth < 5 {
logger.Debug("Applying recursive preload for %s at depth %d", preload.Relation, depth+1)
// For recursive relationships, we need to get the last part of the relation path
// e.g., "MastertaskItems" -> "MastertaskItems.MastertaskItems"
relationParts := strings.Split(preload.Relation, ".")
lastRelationName := relationParts[len(relationParts)-1]
// Create a recursive preload with the same configuration
// but with the relation path extended
recursivePreload := preload
recursivePreload.Relation = preload.Relation + "." + lastRelationName
// Recursively apply preload until we reach depth 5
query = h.applyPreloadWithRecursion(query, recursivePreload, model, depth+1)
}
return query
}
func (h *Handler) handleCreate(ctx context.Context, w common.ResponseWriter, data interface{}, options ExtendedRequestOptions) {
// Capture panics and return error response
defer func() {
@@ -1309,7 +1361,7 @@ func (h *Handler) normalizeToSlice(data interface{}) []interface{} {
func (h *Handler) extractNestedRelations(
data map[string]interface{},
model interface{},
) (map[string]interface{}, map[string]interface{}, error) {
) (_cleanedData map[string]interface{}, _relations map[string]interface{}, _err error) {
// Get model type for reflection
modelType := reflect.TypeOf(model)
for modelType != nil && (modelType.Kind() == reflect.Ptr || modelType.Kind() == reflect.Slice || modelType.Kind() == reflect.Array) {
@@ -1738,7 +1790,7 @@ func (h *Handler) sendResponseWithOptions(w common.ResponseWriter, data interfac
// Returns the single element if data is a slice/array with exactly one element, otherwise returns data unchanged
func (h *Handler) normalizeResultArray(data interface{}) interface{} {
if data == nil {
return data
return nil
}
// Use reflection to check if data is a slice or array

View File

@@ -10,6 +10,7 @@ import (
"github.com/bitechdev/ResolveSpec/pkg/common"
"github.com/bitechdev/ResolveSpec/pkg/logger"
"github.com/bitechdev/ResolveSpec/pkg/reflection"
)
// ExtendedRequestOptions extends common.RequestOptions with additional features
@@ -110,105 +111,149 @@ func (h *Handler) parseOptionsFromHeaders(r common.Request, model interface{}) E
AdvancedSQL: make(map[string]string),
ComputedQL: make(map[string]string),
Expand: make([]ExpandOption, 0),
ResponseFormat: "simple", // Default response format
SingleRecordAsObject: true, // Default: normalize single-element arrays to objects
ResponseFormat: "simple", // Default response format
SingleRecordAsObject: true, // Default: normalize single-element arrays to objects
}
// Get all headers
headers := r.AllHeaders()
// Process each header
for key, value := range headers {
// Normalize header key to lowercase for consistent matching
normalizedKey := strings.ToLower(key)
// Get all query parameters
queryParams := r.AllQueryParams()
// Merge headers and query parameters - query parameters take precedence
// This allows the same parameters to be specified in either headers or query string
// Normalize keys to lowercase to ensure query params properly override headers
combinedParams := make(map[string]string)
for key, value := range headers {
combinedParams[strings.ToLower(key)] = value
}
for key, value := range queryParams {
combinedParams[strings.ToLower(key)] = value
}
// Process each parameter (from both headers and query params)
// Note: keys are already normalized to lowercase in combinedParams
for key, value := range combinedParams {
// Decode value if it's base64 encoded
decodedValue := decodeHeaderValue(value)
// Parse based on header prefix/name
// Parse based on parameter prefix/name
switch {
// Field Selection
case strings.HasPrefix(normalizedKey, "x-select-fields"):
case strings.HasPrefix(key, "x-select-fields"):
h.parseSelectFields(&options, decodedValue)
case strings.HasPrefix(normalizedKey, "x-not-select-fields"):
case strings.HasPrefix(key, "x-not-select-fields"):
h.parseNotSelectFields(&options, decodedValue)
case strings.HasPrefix(normalizedKey, "x-clean-json"):
case strings.HasPrefix(key, "x-clean-json"):
options.CleanJSON = strings.EqualFold(decodedValue, "true")
// Filtering & Search
case strings.HasPrefix(normalizedKey, "x-fieldfilter-"):
h.parseFieldFilter(&options, normalizedKey, decodedValue)
case strings.HasPrefix(normalizedKey, "x-searchfilter-"):
h.parseSearchFilter(&options, normalizedKey, decodedValue)
case strings.HasPrefix(normalizedKey, "x-searchop-"):
h.parseSearchOp(&options, normalizedKey, decodedValue, "AND")
case strings.HasPrefix(normalizedKey, "x-searchor-"):
h.parseSearchOp(&options, normalizedKey, decodedValue, "OR")
case strings.HasPrefix(normalizedKey, "x-searchand-"):
h.parseSearchOp(&options, normalizedKey, decodedValue, "AND")
case strings.HasPrefix(normalizedKey, "x-searchcols"):
case strings.HasPrefix(key, "x-fieldfilter-"):
h.parseFieldFilter(&options, key, decodedValue)
case strings.HasPrefix(key, "x-searchfilter-"):
h.parseSearchFilter(&options, key, decodedValue)
case strings.HasPrefix(key, "x-searchop-"):
h.parseSearchOp(&options, key, decodedValue, "AND")
case strings.HasPrefix(key, "x-searchor-"):
h.parseSearchOp(&options, key, decodedValue, "OR")
case strings.HasPrefix(key, "x-searchand-"):
h.parseSearchOp(&options, key, decodedValue, "AND")
case strings.HasPrefix(key, "x-searchcols"):
options.SearchColumns = h.parseCommaSeparated(decodedValue)
case strings.HasPrefix(normalizedKey, "x-custom-sql-w"):
options.CustomSQLWhere = decodedValue
case strings.HasPrefix(normalizedKey, "x-custom-sql-or"):
options.CustomSQLOr = decodedValue
case strings.HasPrefix(key, "x-custom-sql-w"):
if options.CustomSQLWhere != "" {
options.CustomSQLWhere = fmt.Sprintf("%s AND (%s)", options.CustomSQLWhere, decodedValue)
} else {
options.CustomSQLWhere = decodedValue
}
case strings.HasPrefix(key, "x-custom-sql-or"):
if options.CustomSQLOr != "" {
options.CustomSQLOr = fmt.Sprintf("%s OR (%s)", options.CustomSQLOr, decodedValue)
} else {
options.CustomSQLOr = decodedValue
}
// Joins & Relations
case strings.HasPrefix(normalizedKey, "x-preload"):
if strings.HasSuffix(normalizedKey, "-where") {
case strings.HasPrefix(key, "x-preload"):
if strings.HasSuffix(key, "-where") {
continue
}
whereClaude := headers[fmt.Sprintf("%s-where", key)]
whereClaude := combinedParams[fmt.Sprintf("%s-where", key)]
h.parsePreload(&options, decodedValue, decodeHeaderValue(whereClaude))
case strings.HasPrefix(normalizedKey, "x-expand"):
case strings.HasPrefix(key, "x-expand"):
h.parseExpand(&options, decodedValue)
case strings.HasPrefix(normalizedKey, "x-custom-sql-join"):
case strings.HasPrefix(key, "x-custom-sql-join"):
// TODO: Implement custom SQL join
logger.Debug("Custom SQL join not yet implemented: %s", decodedValue)
// Sorting & Pagination
case strings.HasPrefix(normalizedKey, "x-sort"):
case strings.HasPrefix(key, "x-sort"):
h.parseSorting(&options, decodedValue)
case strings.HasPrefix(normalizedKey, "x-limit"):
// Special cases for older clients using sort(a,b,-c) syntax
case strings.HasPrefix(key, "sort(") && strings.Contains(key, ")"):
sortValue := key[strings.Index(key, "(")+1 : strings.Index(key, ")")]
h.parseSorting(&options, sortValue)
case strings.HasPrefix(key, "x-limit"):
if limit, err := strconv.Atoi(decodedValue); err == nil {
options.Limit = &limit
}
case strings.HasPrefix(normalizedKey, "x-offset"):
// Special cases for older clients using limit(n) syntax
case strings.HasPrefix(key, "limit(") && strings.Contains(key, ")"):
limitValue := key[strings.Index(key, "(")+1 : strings.Index(key, ")")]
limitValueParts := strings.Split(limitValue, ",")
if len(limitValueParts) > 1 {
if offset, err := strconv.Atoi(limitValueParts[0]); err == nil {
options.Offset = &offset
}
if limit, err := strconv.Atoi(limitValueParts[1]); err == nil {
options.Limit = &limit
}
} else {
if limit, err := strconv.Atoi(limitValueParts[0]); err == nil {
options.Limit = &limit
}
}
case strings.HasPrefix(key, "x-offset"):
if offset, err := strconv.Atoi(decodedValue); err == nil {
options.Offset = &offset
}
case strings.HasPrefix(normalizedKey, "x-cursor-forward"):
case strings.HasPrefix(key, "x-cursor-forward"):
options.CursorForward = decodedValue
case strings.HasPrefix(normalizedKey, "x-cursor-backward"):
case strings.HasPrefix(key, "x-cursor-backward"):
options.CursorBackward = decodedValue
// Advanced Features
case strings.HasPrefix(normalizedKey, "x-advsql-"):
colName := strings.TrimPrefix(normalizedKey, "x-advsql-")
case strings.HasPrefix(key, "x-advsql-"):
colName := strings.TrimPrefix(key, "x-advsql-")
options.AdvancedSQL[colName] = decodedValue
case strings.HasPrefix(normalizedKey, "x-cql-sel-"):
colName := strings.TrimPrefix(normalizedKey, "x-cql-sel-")
case strings.HasPrefix(key, "x-cql-sel-"):
colName := strings.TrimPrefix(key, "x-cql-sel-")
options.ComputedQL[colName] = decodedValue
case strings.HasPrefix(normalizedKey, "x-distinct"):
case strings.HasPrefix(key, "x-distinct"):
options.Distinct = strings.EqualFold(decodedValue, "true")
case strings.HasPrefix(normalizedKey, "x-skipcount"):
case strings.HasPrefix(key, "x-skipcount"):
options.SkipCount = strings.EqualFold(decodedValue, "true")
case strings.HasPrefix(normalizedKey, "x-skipcache"):
case strings.HasPrefix(key, "x-skipcache"):
options.SkipCache = strings.EqualFold(decodedValue, "true")
case strings.HasPrefix(normalizedKey, "x-fetch-rownumber"):
case strings.HasPrefix(key, "x-fetch-rownumber"):
options.FetchRowNumber = &decodedValue
case strings.HasPrefix(normalizedKey, "x-pkrow"):
case strings.HasPrefix(key, "x-pkrow"):
options.PKRow = &decodedValue
// Response Format
case strings.HasPrefix(normalizedKey, "x-simpleapi"):
case strings.HasPrefix(key, "x-simpleapi"):
options.ResponseFormat = "simple"
case strings.HasPrefix(normalizedKey, "x-detailapi"):
case strings.HasPrefix(key, "x-detailapi"):
options.ResponseFormat = "detail"
case strings.HasPrefix(normalizedKey, "x-syncfusion"):
case strings.HasPrefix(key, "x-syncfusion"):
options.ResponseFormat = "syncfusion"
case strings.HasPrefix(normalizedKey, "x-single-record-as-object"):
case strings.HasPrefix(key, "x-single-record-as-object"):
// Parse as boolean - "false" disables, "true" enables (default is true)
if strings.EqualFold(decodedValue, "false") {
options.SingleRecordAsObject = false
@@ -217,11 +262,11 @@ func (h *Handler) parseOptionsFromHeaders(r common.Request, model interface{}) E
}
// Transaction Control
case strings.HasPrefix(normalizedKey, "x-transaction-atomic"):
case strings.HasPrefix(key, "x-transaction-atomic"):
options.AtomicTransaction = strings.EqualFold(decodedValue, "true")
// X-Files - comprehensive JSON configuration
case strings.HasPrefix(normalizedKey, "x-files"):
case strings.HasPrefix(key, "x-files"):
h.parseXFiles(&options, decodedValue)
}
}
@@ -231,6 +276,12 @@ func (h *Handler) parseOptionsFromHeaders(r common.Request, model interface{}) E
h.resolveRelationNamesInOptions(&options, model)
}
// Always sort according to the primary key if no sorting is specified
if len(options.Sort) == 0 {
pkName := reflection.GetPrimaryKeyName(model)
options.Sort = []common.SortOption{{Column: pkName, Direction: "ASC"}}
}
return options
}
@@ -684,7 +735,7 @@ func (h *Handler) resolveRelationNamesInOptions(options *ExtendedRequestOptions,
// Try to get the model type for the next level
// This allows nested resolution
if nextModel := h.getRelationModel(currentModel, resolvedPart); nextModel != nil {
if nextModel := reflection.GetRelationModel(currentModel, resolvedPart); nextModel != nil {
currentModel = nextModel
}
}
@@ -708,58 +759,6 @@ func (h *Handler) resolveRelationNamesInOptions(options *ExtendedRequestOptions,
}
}
// getRelationModel gets the model type for a relation field
func (h *Handler) getRelationModel(model interface{}, fieldName string) interface{} {
if model == nil || fieldName == "" {
return nil
}
modelType := reflect.TypeOf(model)
if modelType == nil {
return nil
}
if modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
if modelType == nil || modelType.Kind() != reflect.Struct {
return nil
}
// Find the field
field, found := modelType.FieldByName(fieldName)
if !found {
return nil
}
// Get the target type
targetType := field.Type
if targetType == nil {
return nil
}
if targetType.Kind() == reflect.Slice {
targetType = targetType.Elem()
if targetType == nil {
return nil
}
}
if targetType.Kind() == reflect.Ptr {
targetType = targetType.Elem()
if targetType == nil {
return nil
}
}
if targetType.Kind() != reflect.Struct {
return nil
}
// Create a zero value of the target type
return reflect.New(targetType).Elem().Interface()
}
// resolveRelationName resolves a relation name or table name to the actual field name in the model
// If the input is already a field name, it returns it as-is
// If the input is a table name, it looks up the corresponding relation field
@@ -793,7 +792,7 @@ func (h *Handler) resolveRelationName(model interface{}, nameOrTable string) str
field := modelType.Field(i)
if field.Name == nameOrTable {
// It's already a field name
logger.Debug("Input '%s' is a field name", nameOrTable)
// logger.Debug("Input '%s' is a field name", nameOrTable)
return nameOrTable
}
}
@@ -922,6 +921,33 @@ func (h *Handler) addXFilesPreload(xfile *XFiles, options *ExtendedRequestOption
}
}
// Add computed columns (CQL) -> ComputedQL
if len(xfile.CQLColumns) > 0 {
preloadOpt.ComputedQL = make(map[string]string)
for i, cqlExpr := range xfile.CQLColumns {
colName := fmt.Sprintf("cql%d", i+1)
preloadOpt.ComputedQL[colName] = cqlExpr
logger.Debug("X-Files: Added computed column %s to preload %s: %s", colName, relationPath, cqlExpr)
}
}
// Set recursive flag
preloadOpt.Recursive = xfile.Recursive
// Extract relationship keys for proper foreign key filtering
if xfile.PrimaryKey != "" {
preloadOpt.PrimaryKey = xfile.PrimaryKey
logger.Debug("X-Files: Set primary key for %s: %s", relationPath, xfile.PrimaryKey)
}
if xfile.RelatedKey != "" {
preloadOpt.RelatedKey = xfile.RelatedKey
logger.Debug("X-Files: Set related key for %s: %s", relationPath, xfile.RelatedKey)
}
if xfile.ForeignKey != "" {
preloadOpt.ForeignKey = xfile.ForeignKey
logger.Debug("X-Files: Set foreign key for %s: %s", relationPath, xfile.ForeignKey)
}
// Add the preload option
options.Preload = append(options.Preload, preloadOpt)
@@ -934,192 +960,6 @@ func (h *Handler) addXFilesPreload(xfile *XFiles, options *ExtendedRequestOption
}
}
// extractSourceColumn extracts the base column name from PostgreSQL JSON operators
// Examples:
// - "columna->>'val'" returns "columna"
// - "columna->'key'" returns "columna"
// - "columna" returns "columna"
// - "table.columna->>'val'" returns "table.columna"
func extractSourceColumn(colName string) string {
// Check for PostgreSQL JSON operators: -> and ->>
if idx := strings.Index(colName, "->>"); idx != -1 {
return strings.TrimSpace(colName[:idx])
}
if idx := strings.Index(colName, "->"); idx != -1 {
return strings.TrimSpace(colName[:idx])
}
return colName
}
// getColumnTypeFromModel uses reflection to determine the Go type of a column in a model
func (h *Handler) getColumnTypeFromModel(model interface{}, colName string) reflect.Kind {
if model == nil {
return reflect.Invalid
}
// Extract the source column name (remove JSON operators like ->> or ->)
sourceColName := extractSourceColumn(colName)
modelType := reflect.TypeOf(model)
// Dereference pointer if needed
if modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
// Ensure it's a struct
if modelType.Kind() != reflect.Struct {
return reflect.Invalid
}
// Find the field by JSON tag or field name
for i := 0; i < modelType.NumField(); i++ {
field := modelType.Field(i)
// Check JSON tag
jsonTag := field.Tag.Get("json")
if jsonTag != "" {
// Parse JSON tag (format: "name,omitempty")
parts := strings.Split(jsonTag, ",")
if parts[0] == sourceColName {
return field.Type.Kind()
}
}
// Check field name (case-insensitive)
if strings.EqualFold(field.Name, sourceColName) {
return field.Type.Kind()
}
// Check snake_case conversion
snakeCaseName := toSnakeCase(field.Name)
if snakeCaseName == sourceColName {
return field.Type.Kind()
}
}
return reflect.Invalid
}
// toSnakeCase converts a string from CamelCase to snake_case
func toSnakeCase(s string) string {
var result strings.Builder
for i, r := range s {
if i > 0 && r >= 'A' && r <= 'Z' {
result.WriteRune('_')
}
result.WriteRune(r)
}
return strings.ToLower(result.String())
}
// isNumericType checks if a reflect.Kind is a numeric type
func isNumericType(kind reflect.Kind) bool {
return kind == reflect.Int || kind == reflect.Int8 || kind == reflect.Int16 ||
kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.Uint ||
kind == reflect.Uint8 || kind == reflect.Uint16 || kind == reflect.Uint32 ||
kind == reflect.Uint64 || kind == reflect.Float32 || kind == reflect.Float64
}
// isStringType checks if a reflect.Kind is a string type
func isStringType(kind reflect.Kind) bool {
return kind == reflect.String
}
// convertToNumericType converts a string value to the appropriate numeric type
func convertToNumericType(value string, kind reflect.Kind) (interface{}, error) {
value = strings.TrimSpace(value)
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
// Parse as integer
bitSize := 64
switch kind {
case reflect.Int8:
bitSize = 8
case reflect.Int16:
bitSize = 16
case reflect.Int32:
bitSize = 32
}
intVal, err := strconv.ParseInt(value, 10, bitSize)
if err != nil {
return nil, fmt.Errorf("invalid integer value: %w", err)
}
// Return the appropriate type
switch kind {
case reflect.Int:
return int(intVal), nil
case reflect.Int8:
return int8(intVal), nil
case reflect.Int16:
return int16(intVal), nil
case reflect.Int32:
return int32(intVal), nil
case reflect.Int64:
return intVal, nil
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
// Parse as unsigned integer
bitSize := 64
switch kind {
case reflect.Uint8:
bitSize = 8
case reflect.Uint16:
bitSize = 16
case reflect.Uint32:
bitSize = 32
}
uintVal, err := strconv.ParseUint(value, 10, bitSize)
if err != nil {
return nil, fmt.Errorf("invalid unsigned integer value: %w", err)
}
// Return the appropriate type
switch kind {
case reflect.Uint:
return uint(uintVal), nil
case reflect.Uint8:
return uint8(uintVal), nil
case reflect.Uint16:
return uint16(uintVal), nil
case reflect.Uint32:
return uint32(uintVal), nil
case reflect.Uint64:
return uintVal, nil
}
case reflect.Float32, reflect.Float64:
// Parse as float
bitSize := 64
if kind == reflect.Float32 {
bitSize = 32
}
floatVal, err := strconv.ParseFloat(value, bitSize)
if err != nil {
return nil, fmt.Errorf("invalid float value: %w", err)
}
if kind == reflect.Float32 {
return float32(floatVal), nil
}
return floatVal, nil
}
return nil, fmt.Errorf("unsupported numeric type: %v", kind)
}
// isNumericValue checks if a string value can be parsed as a number
func isNumericValue(value string) bool {
value = strings.TrimSpace(value)
_, err := strconv.ParseFloat(value, 64)
return err == nil
}
// ColumnCastInfo holds information about whether a column needs casting
type ColumnCastInfo struct {
NeedsCast bool
@@ -1133,7 +973,7 @@ func (h *Handler) ValidateAndAdjustFilterForColumnType(filter *common.FilterOpti
return ColumnCastInfo{NeedsCast: false, IsNumericType: false}
}
colType := h.getColumnTypeFromModel(model, filter.Column)
colType := reflection.GetColumnTypeFromModel(model, filter.Column)
if colType == reflect.Invalid {
// Column not found in model, no casting needed
logger.Debug("Column %s not found in model, skipping type validation", filter.Column)
@@ -1144,18 +984,18 @@ func (h *Handler) ValidateAndAdjustFilterForColumnType(filter *common.FilterOpti
valueIsNumeric := false
if strVal, ok := filter.Value.(string); ok {
strVal = strings.Trim(strVal, "%")
valueIsNumeric = isNumericValue(strVal)
valueIsNumeric = reflection.IsNumericValue(strVal)
}
// Adjust based on column type
switch {
case isNumericType(colType):
case reflection.IsNumericType(colType):
// Column is numeric
if valueIsNumeric {
// Value is numeric - try to convert it
if strVal, ok := filter.Value.(string); ok {
strVal = strings.Trim(strVal, "%")
numericVal, err := convertToNumericType(strVal, colType)
numericVal, err := reflection.ConvertToNumericType(strVal, colType)
if err != nil {
logger.Debug("Failed to convert value '%s' to numeric type for column %s, will use text cast", strVal, filter.Column)
return ColumnCastInfo{NeedsCast: true, IsNumericType: true}
@@ -1170,7 +1010,7 @@ func (h *Handler) ValidateAndAdjustFilterForColumnType(filter *common.FilterOpti
return ColumnCastInfo{NeedsCast: true, IsNumericType: true}
}
case isStringType(colType):
case reflection.IsStringType(colType):
// String columns don't need casting
return ColumnCastInfo{NeedsCast: false, IsNumericType: false}

View File

@@ -0,0 +1,403 @@
package restheadspec
import (
"testing"
)
// MockRequest implements common.Request interface for testing
type MockRequest struct {
headers map[string]string
queryParams map[string]string
}
func (m *MockRequest) Method() string {
return "GET"
}
func (m *MockRequest) URL() string {
return "http://example.com/test"
}
func (m *MockRequest) Header(key string) string {
return m.headers[key]
}
func (m *MockRequest) AllHeaders() map[string]string {
return m.headers
}
func (m *MockRequest) Body() ([]byte, error) {
return nil, nil
}
func (m *MockRequest) PathParam(key string) string {
return ""
}
func (m *MockRequest) QueryParam(key string) string {
return m.queryParams[key]
}
func (m *MockRequest) AllQueryParams() map[string]string {
return m.queryParams
}
func TestParseOptionsFromQueryParams(t *testing.T) {
handler := NewHandler(nil, nil)
tests := []struct {
name string
queryParams map[string]string
headers map[string]string
validate func(t *testing.T, options ExtendedRequestOptions)
}{
{
name: "Parse custom SQL WHERE from query params",
queryParams: map[string]string{
"x-custom-sql-w-1": `("v_webui_clients".clientstatus = 0 or "v_webui_clients".clientstatus is null)`,
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if options.CustomSQLWhere == "" {
t.Error("Expected CustomSQLWhere to be set from query param")
}
expected := `("v_webui_clients".clientstatus = 0 or "v_webui_clients".clientstatus is null)`
if options.CustomSQLWhere != expected {
t.Errorf("Expected CustomSQLWhere=%q, got %q", expected, options.CustomSQLWhere)
}
},
},
{
name: "Parse sort from query params",
queryParams: map[string]string{
"x-sort": "-applicationdate,name",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if len(options.Sort) != 2 {
t.Errorf("Expected 2 sort options, got %d", len(options.Sort))
return
}
if options.Sort[0].Column != "applicationdate" || options.Sort[0].Direction != "DESC" {
t.Errorf("Expected first sort: applicationdate DESC, got %s %s", options.Sort[0].Column, options.Sort[0].Direction)
}
if options.Sort[1].Column != "name" || options.Sort[1].Direction != "ASC" {
t.Errorf("Expected second sort: name ASC, got %s %s", options.Sort[1].Column, options.Sort[1].Direction)
}
},
},
{
name: "Parse limit and offset from query params",
queryParams: map[string]string{
"x-limit": "100",
"x-offset": "50",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if options.Limit == nil || *options.Limit != 100 {
t.Errorf("Expected limit=100, got %v", options.Limit)
}
if options.Offset == nil || *options.Offset != 50 {
t.Errorf("Expected offset=50, got %v", options.Offset)
}
},
},
{
name: "Parse field filters from query params",
queryParams: map[string]string{
"x-fieldfilter-status": "active",
"x-fieldfilter-type": "user",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if len(options.Filters) != 2 {
t.Errorf("Expected 2 filters, got %d", len(options.Filters))
return
}
// Check that filters were created
foundStatus := false
foundType := false
for _, filter := range options.Filters {
if filter.Column == "status" && filter.Value == "active" && filter.Operator == "eq" {
foundStatus = true
}
if filter.Column == "type" && filter.Value == "user" && filter.Operator == "eq" {
foundType = true
}
}
if !foundStatus {
t.Error("Expected status filter not found")
}
if !foundType {
t.Error("Expected type filter not found")
}
},
},
{
name: "Parse select fields from query params",
queryParams: map[string]string{
"x-select-fields": "id,name,email",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if len(options.Columns) != 3 {
t.Errorf("Expected 3 columns, got %d", len(options.Columns))
return
}
expected := []string{"id", "name", "email"}
for i, col := range expected {
if i >= len(options.Columns) || options.Columns[i] != col {
t.Errorf("Expected column[%d]=%s, got %v", i, col, options.Columns)
}
}
},
},
{
name: "Parse preload from query params",
queryParams: map[string]string{
"x-preload": "posts:title,content|comments",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if len(options.Preload) != 2 {
t.Errorf("Expected 2 preload options, got %d", len(options.Preload))
return
}
// Check first preload (posts with columns)
if options.Preload[0].Relation != "posts" {
t.Errorf("Expected first preload relation=posts, got %s", options.Preload[0].Relation)
}
if len(options.Preload[0].Columns) != 2 {
t.Errorf("Expected 2 columns for posts preload, got %d", len(options.Preload[0].Columns))
}
// Check second preload (comments without columns)
if options.Preload[1].Relation != "comments" {
t.Errorf("Expected second preload relation=comments, got %s", options.Preload[1].Relation)
}
},
},
{
name: "Query params take precedence over headers",
queryParams: map[string]string{
"x-limit": "100",
},
headers: map[string]string{
"X-Limit": "50",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if options.Limit == nil || *options.Limit != 100 {
t.Errorf("Expected query param limit=100 to override header, got %v", options.Limit)
}
},
},
{
name: "Parse search operators from query params",
queryParams: map[string]string{
"x-searchop-contains-name": "john",
"x-searchop-gt-age": "18",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if len(options.Filters) != 2 {
t.Errorf("Expected 2 filters, got %d", len(options.Filters))
return
}
// Check for ILIKE filter
foundContains := false
foundGt := false
for _, filter := range options.Filters {
if filter.Column == "name" && filter.Operator == "ilike" {
foundContains = true
}
if filter.Column == "age" && filter.Operator == "gt" && filter.Value == "18" {
foundGt = true
}
}
if !foundContains {
t.Error("Expected contains filter not found")
}
if !foundGt {
t.Error("Expected gt filter not found")
}
},
},
{
name: "Parse complex example with multiple params",
queryParams: map[string]string{
"x-custom-sql-w-1": `("v_webui_clients".clientstatus = 0)`,
"x-sort": "-applicationdate",
"x-limit": "100",
"x-select-fields": "id,name,status",
"x-fieldfilter-active": "true",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
// Validate CustomSQLWhere
if options.CustomSQLWhere == "" {
t.Error("Expected CustomSQLWhere to be set")
}
// Validate Sort
if len(options.Sort) != 1 || options.Sort[0].Column != "applicationdate" || options.Sort[0].Direction != "DESC" {
t.Errorf("Expected sort by applicationdate DESC, got %v", options.Sort)
}
// Validate Limit
if options.Limit == nil || *options.Limit != 100 {
t.Errorf("Expected limit=100, got %v", options.Limit)
}
// Validate Columns
if len(options.Columns) != 3 {
t.Errorf("Expected 3 columns, got %d", len(options.Columns))
}
// Validate Filters
if len(options.Filters) < 1 {
t.Error("Expected at least 1 filter")
}
},
},
{
name: "Parse distinct flag from query params",
queryParams: map[string]string{
"x-distinct": "true",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if !options.Distinct {
t.Error("Expected Distinct to be true")
}
},
},
{
name: "Parse skip count flag from query params",
queryParams: map[string]string{
"x-skipcount": "true",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if !options.SkipCount {
t.Error("Expected SkipCount to be true")
}
},
},
{
name: "Parse response format from query params",
queryParams: map[string]string{
"x-syncfusion": "true",
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if options.ResponseFormat != "syncfusion" {
t.Errorf("Expected ResponseFormat=syncfusion, got %s", options.ResponseFormat)
}
},
},
{
name: "Parse custom SQL OR from query params",
queryParams: map[string]string{
"x-custom-sql-or": `("field1" = 'value1' OR "field2" = 'value2')`,
},
validate: func(t *testing.T, options ExtendedRequestOptions) {
if options.CustomSQLOr == "" {
t.Error("Expected CustomSQLOr to be set")
}
expected := `("field1" = 'value1' OR "field2" = 'value2')`
if options.CustomSQLOr != expected {
t.Errorf("Expected CustomSQLOr=%q, got %q", expected, options.CustomSQLOr)
}
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create mock request
req := &MockRequest{
headers: tt.headers,
queryParams: tt.queryParams,
}
if req.headers == nil {
req.headers = make(map[string]string)
}
if req.queryParams == nil {
req.queryParams = make(map[string]string)
}
// Parse options
options := handler.parseOptionsFromHeaders(req, nil)
// Validate
tt.validate(t, options)
})
}
}
func TestQueryParamsWithURLEncoding(t *testing.T) {
handler := NewHandler(nil, nil)
// Test with URL-encoded query parameter (like the user's example)
req := &MockRequest{
headers: make(map[string]string),
queryParams: map[string]string{
// URL-encoded version of the SQL WHERE clause
"x-custom-sql-w-1": `("v_webui_clients".clientstatus = 0 or "v_webui_clients".clientstatus is null) and ("v_webui_clients".inactive = 0 or "v_webui_clients".inactive is null)`,
},
}
options := handler.parseOptionsFromHeaders(req, nil)
if options.CustomSQLWhere == "" {
t.Error("Expected CustomSQLWhere to be set from URL-encoded query param")
}
// The SQL should contain the expected conditions
if !contains(options.CustomSQLWhere, "clientstatus") {
t.Error("Expected CustomSQLWhere to contain 'clientstatus'")
}
if !contains(options.CustomSQLWhere, "inactive") {
t.Error("Expected CustomSQLWhere to contain 'inactive'")
}
}
func TestHeadersAndQueryParamsCombined(t *testing.T) {
handler := NewHandler(nil, nil)
// Test that headers and query params can work together
req := &MockRequest{
headers: map[string]string{
"X-Select-Fields": "id,name",
"X-Limit": "50",
},
queryParams: map[string]string{
"x-sort": "-created_at",
"x-offset": "10",
// This should override the header value
"x-limit": "100",
},
}
options := handler.parseOptionsFromHeaders(req, nil)
// Verify columns from header
if len(options.Columns) != 2 {
t.Errorf("Expected 2 columns from header, got %d", len(options.Columns))
}
// Verify sort from query param
if len(options.Sort) != 1 || options.Sort[0].Column != "created_at" {
t.Errorf("Expected sort from query param, got %v", options.Sort)
}
// Verify offset from query param
if options.Offset == nil || *options.Offset != 10 {
t.Errorf("Expected offset=10 from query param, got %v", options.Offset)
}
// Verify limit from query param (should override header)
if options.Limit == nil {
t.Error("Expected limit to be set from query param")
} else if *options.Limit != 100 {
t.Errorf("Expected limit=100 from query param (overriding header), got %d", *options.Limit)
}
}
// Helper function to check if a string contains a substring
func contains(s, substr string) bool {
return len(s) >= len(substr) && (s == substr || len(s) > len(substr) && containsHelper(s, substr))
}
func containsHelper(s, substr string) bool {
for i := 0; i <= len(s)-len(substr); i++ {
if s[i:i+len(substr)] == substr {
return true
}
}
return false
}