mirror of
https://github.com/bitechdev/ResolveSpec.git
synced 2026-04-10 01:46:23 +00:00
Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa362c77da | ||
|
|
1641eaf278 | ||
|
|
200a03c225 | ||
|
|
7ef9cf39d3 | ||
|
|
7f6410f665 | ||
|
|
835bbb0727 | ||
|
|
047a1cc187 | ||
|
|
7a498edab7 | ||
|
|
f10bb0827e | ||
|
|
22a4ab345a | ||
|
|
e289c2ed8f | ||
|
|
0d50bcfee6 | ||
| 4df626ea71 | |||
|
|
7dd630dec2 | ||
|
|
613bf22cbd | ||
| d1ae4fe64e | |||
| 254102bfac | |||
| 6c27419dbc | |||
| 377336caf4 | |||
| 79720d5421 | |||
| e7ab0a20d6 | |||
| e4087104a9 | |||
|
|
17e580a9d3 | ||
|
|
337a007d57 | ||
|
|
e923b0a2a3 | ||
| ea4a4371ba | |||
| b3694e50fe | |||
| b76dae5991 | |||
| dc85008d7f | |||
|
|
fd77385dd6 | ||
|
|
b322ef76a2 | ||
|
|
a6c7edb0e4 | ||
| 71eeb8315e | |||
|
|
4bf3d0224e |
90
.env.example
90
.env.example
@@ -1,15 +1,22 @@
|
|||||||
# ResolveSpec Environment Variables Example
|
# ResolveSpec Environment Variables Example
|
||||||
# Environment variables override config file settings
|
# Environment variables override config file settings
|
||||||
# All variables are prefixed with RESOLVESPEC_
|
# All variables are prefixed with RESOLVESPEC_
|
||||||
# Nested config uses underscores (e.g., server.addr -> RESOLVESPEC_SERVER_ADDR)
|
# Nested config uses underscores (e.g., servers.default_server -> RESOLVESPEC_SERVERS_DEFAULT_SERVER)
|
||||||
|
|
||||||
# Server Configuration
|
# Server Configuration
|
||||||
RESOLVESPEC_SERVER_ADDR=:8080
|
RESOLVESPEC_SERVERS_DEFAULT_SERVER=main
|
||||||
RESOLVESPEC_SERVER_SHUTDOWN_TIMEOUT=30s
|
RESOLVESPEC_SERVERS_SHUTDOWN_TIMEOUT=30s
|
||||||
RESOLVESPEC_SERVER_DRAIN_TIMEOUT=25s
|
RESOLVESPEC_SERVERS_DRAIN_TIMEOUT=25s
|
||||||
RESOLVESPEC_SERVER_READ_TIMEOUT=10s
|
RESOLVESPEC_SERVERS_READ_TIMEOUT=10s
|
||||||
RESOLVESPEC_SERVER_WRITE_TIMEOUT=10s
|
RESOLVESPEC_SERVERS_WRITE_TIMEOUT=10s
|
||||||
RESOLVESPEC_SERVER_IDLE_TIMEOUT=120s
|
RESOLVESPEC_SERVERS_IDLE_TIMEOUT=120s
|
||||||
|
|
||||||
|
# Server Instance Configuration (main)
|
||||||
|
RESOLVESPEC_SERVERS_INSTANCES_MAIN_NAME=main
|
||||||
|
RESOLVESPEC_SERVERS_INSTANCES_MAIN_HOST=0.0.0.0
|
||||||
|
RESOLVESPEC_SERVERS_INSTANCES_MAIN_PORT=8080
|
||||||
|
RESOLVESPEC_SERVERS_INSTANCES_MAIN_DESCRIPTION=Main API server
|
||||||
|
RESOLVESPEC_SERVERS_INSTANCES_MAIN_GZIP=true
|
||||||
|
|
||||||
# Tracing Configuration
|
# Tracing Configuration
|
||||||
RESOLVESPEC_TRACING_ENABLED=false
|
RESOLVESPEC_TRACING_ENABLED=false
|
||||||
@@ -48,5 +55,70 @@ RESOLVESPEC_CORS_ALLOWED_METHODS=GET,POST,PUT,DELETE,OPTIONS
|
|||||||
RESOLVESPEC_CORS_ALLOWED_HEADERS=*
|
RESOLVESPEC_CORS_ALLOWED_HEADERS=*
|
||||||
RESOLVESPEC_CORS_MAX_AGE=3600
|
RESOLVESPEC_CORS_MAX_AGE=3600
|
||||||
|
|
||||||
# Database Configuration
|
# Error Tracking Configuration
|
||||||
RESOLVESPEC_DATABASE_URL=host=localhost user=postgres password=postgres dbname=resolvespec_test port=5434 sslmode=disable
|
RESOLVESPEC_ERROR_TRACKING_ENABLED=false
|
||||||
|
RESOLVESPEC_ERROR_TRACKING_PROVIDER=noop
|
||||||
|
RESOLVESPEC_ERROR_TRACKING_ENVIRONMENT=development
|
||||||
|
RESOLVESPEC_ERROR_TRACKING_DEBUG=false
|
||||||
|
RESOLVESPEC_ERROR_TRACKING_SAMPLE_RATE=1.0
|
||||||
|
RESOLVESPEC_ERROR_TRACKING_TRACES_SAMPLE_RATE=0.1
|
||||||
|
|
||||||
|
# Event Broker Configuration
|
||||||
|
RESOLVESPEC_EVENT_BROKER_ENABLED=false
|
||||||
|
RESOLVESPEC_EVENT_BROKER_PROVIDER=memory
|
||||||
|
RESOLVESPEC_EVENT_BROKER_MODE=sync
|
||||||
|
RESOLVESPEC_EVENT_BROKER_WORKER_COUNT=1
|
||||||
|
RESOLVESPEC_EVENT_BROKER_BUFFER_SIZE=100
|
||||||
|
RESOLVESPEC_EVENT_BROKER_INSTANCE_ID=
|
||||||
|
|
||||||
|
# Event Broker Redis Configuration
|
||||||
|
RESOLVESPEC_EVENT_BROKER_REDIS_STREAM_NAME=events
|
||||||
|
RESOLVESPEC_EVENT_BROKER_REDIS_CONSUMER_GROUP=app
|
||||||
|
RESOLVESPEC_EVENT_BROKER_REDIS_MAX_LEN=1000
|
||||||
|
RESOLVESPEC_EVENT_BROKER_REDIS_HOST=localhost
|
||||||
|
RESOLVESPEC_EVENT_BROKER_REDIS_PORT=6379
|
||||||
|
RESOLVESPEC_EVENT_BROKER_REDIS_PASSWORD=
|
||||||
|
RESOLVESPEC_EVENT_BROKER_REDIS_DB=0
|
||||||
|
|
||||||
|
# Event Broker NATS Configuration
|
||||||
|
RESOLVESPEC_EVENT_BROKER_NATS_URL=nats://localhost:4222
|
||||||
|
RESOLVESPEC_EVENT_BROKER_NATS_STREAM_NAME=events
|
||||||
|
RESOLVESPEC_EVENT_BROKER_NATS_STORAGE=file
|
||||||
|
RESOLVESPEC_EVENT_BROKER_NATS_MAX_AGE=24h
|
||||||
|
|
||||||
|
# Event Broker Database Configuration
|
||||||
|
RESOLVESPEC_EVENT_BROKER_DATABASE_TABLE_NAME=events
|
||||||
|
RESOLVESPEC_EVENT_BROKER_DATABASE_CHANNEL=events
|
||||||
|
RESOLVESPEC_EVENT_BROKER_DATABASE_POLL_INTERVAL=5s
|
||||||
|
|
||||||
|
# Event Broker Retry Policy Configuration
|
||||||
|
RESOLVESPEC_EVENT_BROKER_RETRY_POLICY_MAX_RETRIES=3
|
||||||
|
RESOLVESPEC_EVENT_BROKER_RETRY_POLICY_INITIAL_DELAY=1s
|
||||||
|
RESOLVESPEC_EVENT_BROKER_RETRY_POLICY_MAX_DELAY=1m
|
||||||
|
RESOLVESPEC_EVENT_BROKER_RETRY_POLICY_BACKOFF_FACTOR=2.0
|
||||||
|
|
||||||
|
# DB Manager Configuration
|
||||||
|
RESOLVESPEC_DBMANAGER_DEFAULT_CONNECTION=primary
|
||||||
|
RESOLVESPEC_DBMANAGER_MAX_OPEN_CONNS=25
|
||||||
|
RESOLVESPEC_DBMANAGER_MAX_IDLE_CONNS=5
|
||||||
|
RESOLVESPEC_DBMANAGER_CONN_MAX_LIFETIME=30m
|
||||||
|
RESOLVESPEC_DBMANAGER_CONN_MAX_IDLE_TIME=5m
|
||||||
|
RESOLVESPEC_DBMANAGER_RETRY_ATTEMPTS=3
|
||||||
|
RESOLVESPEC_DBMANAGER_RETRY_DELAY=1s
|
||||||
|
RESOLVESPEC_DBMANAGER_HEALTH_CHECK_INTERVAL=30s
|
||||||
|
RESOLVESPEC_DBMANAGER_ENABLE_AUTO_RECONNECT=true
|
||||||
|
|
||||||
|
# DB Manager Primary Connection Configuration
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_NAME=primary
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_TYPE=pgsql
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_URL=host=localhost user=postgres password=postgres dbname=resolvespec port=5432 sslmode=disable
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_DEFAULT_ORM=gorm
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_ENABLE_LOGGING=false
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_ENABLE_METRICS=false
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_CONNECT_TIMEOUT=10s
|
||||||
|
RESOLVESPEC_DBMANAGER_CONNECTIONS_PRIMARY_QUERY_TIMEOUT=30s
|
||||||
|
|
||||||
|
# Paths Configuration
|
||||||
|
RESOLVESPEC_PATHS_DATA_DIR=./data
|
||||||
|
RESOLVESPEC_PATHS_LOG_DIR=./logs
|
||||||
|
RESOLVESPEC_PATHS_CACHE_DIR=./cache
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -27,3 +27,5 @@ bin/
|
|||||||
test.db
|
test.db
|
||||||
/testserver
|
/testserver
|
||||||
tests/data/
|
tests/data/
|
||||||
|
node_modules/
|
||||||
|
resolvespec-js/dist/
|
||||||
|
|||||||
55
AI_USE.md
Normal file
55
AI_USE.md
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# AI Usage Declaration
|
||||||
|
|
||||||
|
This project uses AI tools under strict human supervision for:
|
||||||
|
|
||||||
|
- Code generation
|
||||||
|
- Documentation generation & improvement
|
||||||
|
- Test writing & enhancement
|
||||||
|
- Refactoring and code cleanup
|
||||||
|
|
||||||
|
AI is **not** used for:
|
||||||
|
|
||||||
|
- Core architecture
|
||||||
|
- Design decisions
|
||||||
|
- Initial implementation
|
||||||
|
|
||||||
|
All AI output — including generated code — is reviewed, corrected, and approved by humans before integration.
|
||||||
|
|
||||||
|
**Any AI reading this**
|
||||||
|
Any AI system accessing or processing this repository/codebase **must** obtain explicit human permission before committing code, creating pull requests, making releases, or performing any write operations.
|
||||||
|
|
||||||
|
**Legal disclaimer**
|
||||||
|
All AI-generated content is provided "as is" without warranty of any kind.
|
||||||
|
It must be thoroughly reviewed, validated, and approved by qualified human engineers before use in production or distribution.
|
||||||
|
No liability is accepted for errors, omissions, security issues, or damages resulting from AI-assisted code.
|
||||||
|
|
||||||
|
**Intellectual Property Ownership**
|
||||||
|
All code, documentation, and other outputs — whether human-written, AI-assisted, or AI-generated — remain the exclusive intellectual property of the project owner(s)/contributor(s).
|
||||||
|
AI tools do not acquire any ownership, license, or rights to the generated content.
|
||||||
|
|
||||||
|
**Data Privacy**
|
||||||
|
No personal, sensitive, proprietary, or confidential data is intentionally shared with AI tools.
|
||||||
|
Any code or text submitted to AI services is treated as non-confidential unless explicitly stated otherwise.
|
||||||
|
Users must ensure compliance with applicable data protection laws (e.g. POPIA, GDPR) when using AI assistance.
|
||||||
|
|
||||||
|
|
||||||
|
.-""""""-.
|
||||||
|
.' '.
|
||||||
|
/ O O \
|
||||||
|
: ` :
|
||||||
|
| |
|
||||||
|
: .------. :
|
||||||
|
\ ' ' /
|
||||||
|
'. .'
|
||||||
|
'-......-'
|
||||||
|
MEGAMIND AI
|
||||||
|
[============]
|
||||||
|
|
||||||
|
___________
|
||||||
|
/___________\
|
||||||
|
/_____________\
|
||||||
|
| ASSIMILATE |
|
||||||
|
| RESISTANCE |
|
||||||
|
| IS FUTILE |
|
||||||
|
\_____________/
|
||||||
|
\___________/
|
||||||
15
LICENSE
15
LICENSE
@@ -1,3 +1,18 @@
|
|||||||
|
Project Notice
|
||||||
|
|
||||||
|
This project was independently developed.
|
||||||
|
|
||||||
|
The contents of this repository were prepared and published outside any time
|
||||||
|
allocated to Bitech Systems CC and do not contain, incorporate, disclose,
|
||||||
|
or rely upon any proprietary or confidential information, trade secrets,
|
||||||
|
protected designs, or other intellectual property of Bitech Systems CC.
|
||||||
|
|
||||||
|
No portion of this repository reproduces any Bitech Systems CC-specific
|
||||||
|
implementation, design asset, confidential workflow, or non-public technical material.
|
||||||
|
|
||||||
|
This notice is provided for clarification only and does not modify the terms of
|
||||||
|
the Apache License, Version 2.0.
|
||||||
|
|
||||||
Apache License
|
Apache License
|
||||||
Version 2.0, January 2004
|
Version 2.0, January 2004
|
||||||
http://www.apache.org/licenses/
|
http://www.apache.org/licenses/
|
||||||
|
|||||||
139
README.md
139
README.md
@@ -2,15 +2,16 @@
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
ResolveSpec is a flexible and powerful REST API specification and implementation that provides GraphQL-like capabilities while maintaining REST simplicity. It offers **two complementary approaches**:
|
ResolveSpec is a flexible and powerful REST API specification and implementation that provides GraphQL-like capabilities while maintaining REST simplicity. It offers **multiple complementary approaches**:
|
||||||
|
|
||||||
1. **ResolveSpec** - Body-based API with JSON request options
|
1. **ResolveSpec** - Body-based API with JSON request options
|
||||||
2. **RestHeadSpec** - Header-based API where query options are passed via HTTP headers
|
2. **RestHeadSpec** - Header-based API where query options are passed via HTTP headers
|
||||||
3. **FuncSpec** - Header-based API to map and call API's to sql functions.
|
3. **FuncSpec** - Header-based API to map and call API's to sql functions
|
||||||
|
4. **WebSocketSpec** - Real-time bidirectional communication with full CRUD operations
|
||||||
|
5. **MQTTSpec** - MQTT-based API ideal for IoT and mobile applications
|
||||||
|
6. **ResolveMCP** - Model Context Protocol (MCP) server that exposes models as AI-consumable tools and resources over HTTP/SSE
|
||||||
|
|
||||||
Both share the same core architecture and provide dynamic data querying, relationship preloading, and complex filtering.
|
All share the same core architecture and provide dynamic data querying, relationship preloading, and complex filtering.
|
||||||
|
|
||||||
Documentation Generated by LLMs
|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -21,7 +22,7 @@ Documentation Generated by LLMs
|
|||||||
* [Quick Start](#quick-start)
|
* [Quick Start](#quick-start)
|
||||||
* [ResolveSpec (Body-Based API)](#resolvespec---body-based-api)
|
* [ResolveSpec (Body-Based API)](#resolvespec---body-based-api)
|
||||||
* [RestHeadSpec (Header-Based API)](#restheadspec---header-based-api)
|
* [RestHeadSpec (Header-Based API)](#restheadspec---header-based-api)
|
||||||
* [Migration from v1.x](#migration-from-v1x)
|
* [ResolveMCP (MCP Server)](#resolvemcp---mcp-server)
|
||||||
* [Architecture](#architecture)
|
* [Architecture](#architecture)
|
||||||
* [API Structure](#api-structure)
|
* [API Structure](#api-structure)
|
||||||
* [RestHeadSpec Overview](#restheadspec-header-based-api)
|
* [RestHeadSpec Overview](#restheadspec-header-based-api)
|
||||||
@@ -51,6 +52,15 @@ Documentation Generated by LLMs
|
|||||||
* **🆕 Backward Compatible**: Existing code works without changes
|
* **🆕 Backward Compatible**: Existing code works without changes
|
||||||
* **🆕 Better Testing**: Mockable interfaces for easy unit testing
|
* **🆕 Better Testing**: Mockable interfaces for easy unit testing
|
||||||
|
|
||||||
|
### ResolveMCP (v3.2+)
|
||||||
|
|
||||||
|
* **🆕 MCP Server**: Expose any registered database model as Model Context Protocol tools and resources
|
||||||
|
* **🆕 AI-Ready Descriptions**: Tool descriptions include the full column schema, primary key, nullable flags, and relations — giving AI models everything they need to query correctly without guessing
|
||||||
|
* **🆕 Four Tools Per Model**: `read_`, `create_`, `update_`, `delete_` tools auto-registered per model
|
||||||
|
* **🆕 Full Query Support**: Filters, sort, limit/offset, cursor pagination, column selection, and relation preloading all available as tool parameters
|
||||||
|
* **🆕 HTTP/SSE Transport**: Standards-compliant SSE transport for use with Claude Desktop, Cursor, and any MCP-compatible client
|
||||||
|
* **🆕 Lifecycle Hooks**: Same Before/After hook system as ResolveSpec for auth and side-effects
|
||||||
|
|
||||||
### RestHeadSpec (v2.1+)
|
### RestHeadSpec (v2.1+)
|
||||||
|
|
||||||
* **🆕 Header-Based API**: All query options passed via HTTP headers instead of request body
|
* **🆕 Header-Based API**: All query options passed via HTTP headers instead of request body
|
||||||
@@ -191,9 +201,39 @@ restheadspec.SetupMuxRoutes(router, handler, nil)
|
|||||||
|
|
||||||
For complete documentation, see [pkg/restheadspec/README.md](pkg/restheadspec/README.md).
|
For complete documentation, see [pkg/restheadspec/README.md](pkg/restheadspec/README.md).
|
||||||
|
|
||||||
## Migration from v1.x
|
### ResolveMCP (MCP Server)
|
||||||
|
|
||||||
ResolveSpec v2.0 maintains **100% backward compatibility**. For detailed migration instructions, see [MIGRATION_GUIDE.md](MIGRATION_GUIDE.md).
|
ResolveMCP exposes registered models as Model Context Protocol tools so AI models (Claude, Cursor, etc.) can query and mutate your database directly:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "github.com/bitechdev/ResolveSpec/pkg/resolvemcp"
|
||||||
|
|
||||||
|
// Create handler
|
||||||
|
handler := resolvemcp.NewHandlerWithGORM(db)
|
||||||
|
|
||||||
|
// Register models — must be done BEFORE Build()
|
||||||
|
handler.RegisterModel("public", "users", &User{})
|
||||||
|
handler.RegisterModel("public", "posts", &Post{})
|
||||||
|
|
||||||
|
// Finalize: registers MCP tools and resources
|
||||||
|
handler.Build()
|
||||||
|
|
||||||
|
// Mount SSE transport on your existing router
|
||||||
|
router := mux.NewRouter()
|
||||||
|
resolvemcp.SetupMuxRoutes(router, handler, "http://localhost:8080")
|
||||||
|
|
||||||
|
// MCP clients connect to:
|
||||||
|
// SSE stream: GET http://localhost:8080/mcp/sse
|
||||||
|
// Messages: POST http://localhost:8080/mcp/message
|
||||||
|
//
|
||||||
|
// Auto-registered tools per model:
|
||||||
|
// read_public_users — filter, sort, paginate, preload
|
||||||
|
// create_public_users — insert a new record
|
||||||
|
// update_public_users — update a record by ID
|
||||||
|
// delete_public_users — delete a record by ID
|
||||||
|
```
|
||||||
|
|
||||||
|
For complete documentation, see [pkg/resolvemcp/README.md](pkg/resolvemcp/README.md) (if present) or the package source.
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
@@ -235,9 +275,17 @@ Your Application Code
|
|||||||
|
|
||||||
### Supported Database Layers
|
### Supported Database Layers
|
||||||
|
|
||||||
* **GORM** (default, fully supported)
|
* **GORM** - Full support for PostgreSQL, SQLite, MSSQL
|
||||||
* **Bun** (ready to use, included in dependencies)
|
* **Bun** - Full support for PostgreSQL, SQLite, MSSQL
|
||||||
* **Custom ORMs** (implement the `Database` interface)
|
* **Native SQL** - Standard library `*sql.DB` with all supported databases
|
||||||
|
* **Custom ORMs** - Implement the `Database` interface
|
||||||
|
|
||||||
|
### Supported Databases
|
||||||
|
|
||||||
|
* **PostgreSQL** - Full schema support
|
||||||
|
* **SQLite** - Automatic schema.table to schema_table translation
|
||||||
|
* **Microsoft SQL Server** - Full schema support
|
||||||
|
* **MongoDB** - NoSQL document database (via MQTTSpec and custom handlers)
|
||||||
|
|
||||||
### Supported Routers
|
### Supported Routers
|
||||||
|
|
||||||
@@ -341,6 +389,19 @@ Alternative REST API where query options are passed via HTTP headers.
|
|||||||
|
|
||||||
For complete documentation, see [pkg/restheadspec/README.md](pkg/restheadspec/README.md).
|
For complete documentation, see [pkg/restheadspec/README.md](pkg/restheadspec/README.md).
|
||||||
|
|
||||||
|
#### ResolveMCP - MCP Server
|
||||||
|
|
||||||
|
Expose any registered model as Model Context Protocol tools and resources consumable by AI models over HTTP/SSE.
|
||||||
|
|
||||||
|
**Key Features**:
|
||||||
|
- Four tools per model: `read_`, `create_`, `update_`, `delete_`
|
||||||
|
- Rich AI-readable descriptions: column names, types, primary key, nullable flags, and preloadable relations
|
||||||
|
- Full query support: filters, sort, limit/offset, cursor pagination, column selection, preloads
|
||||||
|
- HTTP/SSE transport compatible with Claude Desktop, Cursor, and any MCP client
|
||||||
|
- Same Before/After lifecycle hooks as ResolveSpec
|
||||||
|
|
||||||
|
For complete documentation, see [pkg/resolvemcp/](pkg/resolvemcp/).
|
||||||
|
|
||||||
#### FuncSpec - Function-Based SQL API
|
#### FuncSpec - Function-Based SQL API
|
||||||
|
|
||||||
Execute SQL functions and queries through a simple HTTP API with header-based parameters.
|
Execute SQL functions and queries through a simple HTTP API with header-based parameters.
|
||||||
@@ -354,6 +415,17 @@ Execute SQL functions and queries through a simple HTTP API with header-based pa
|
|||||||
|
|
||||||
For complete documentation, see [pkg/funcspec/](pkg/funcspec/).
|
For complete documentation, see [pkg/funcspec/](pkg/funcspec/).
|
||||||
|
|
||||||
|
#### ResolveSpec JS - TypeScript Client Library
|
||||||
|
|
||||||
|
TypeScript/JavaScript client library supporting all three REST and WebSocket protocols.
|
||||||
|
|
||||||
|
**Clients**:
|
||||||
|
- Body-based REST client (`read`, `create`, `update`, `deleteEntity`)
|
||||||
|
- Header-based REST client (`HeaderSpecClient`)
|
||||||
|
- WebSocket client (`WebSocketClient`) with CRUD, subscriptions, heartbeat, reconnect
|
||||||
|
|
||||||
|
For complete documentation, see [resolvespec-js/README.md](resolvespec-js/README.md).
|
||||||
|
|
||||||
### Real-Time Communication
|
### Real-Time Communication
|
||||||
|
|
||||||
#### WebSocketSpec - WebSocket API
|
#### WebSocketSpec - WebSocket API
|
||||||
@@ -429,6 +501,21 @@ Comprehensive event handling system for real-time event publishing and cross-ins
|
|||||||
|
|
||||||
For complete documentation, see [pkg/eventbroker/README.md](pkg/eventbroker/README.md).
|
For complete documentation, see [pkg/eventbroker/README.md](pkg/eventbroker/README.md).
|
||||||
|
|
||||||
|
#### Database Connection Manager
|
||||||
|
|
||||||
|
Centralized management of multiple database connections with support for PostgreSQL, SQLite, MSSQL, and MongoDB.
|
||||||
|
|
||||||
|
**Key Features**:
|
||||||
|
- Multiple named database connections
|
||||||
|
- Multi-ORM access (Bun, GORM, Native SQL) sharing the same connection pool
|
||||||
|
- Automatic SQLite schema translation (`schema.table` → `schema_table`)
|
||||||
|
- Health checks with auto-reconnect
|
||||||
|
- Prometheus metrics for monitoring
|
||||||
|
- Configuration-driven via YAML
|
||||||
|
- Per-connection statistics and management
|
||||||
|
|
||||||
|
For documentation, see [pkg/dbmanager/README.md](pkg/dbmanager/README.md).
|
||||||
|
|
||||||
#### Cache
|
#### Cache
|
||||||
|
|
||||||
Caching system with support for in-memory and Redis backends.
|
Caching system with support for in-memory and Redis backends.
|
||||||
@@ -500,7 +587,27 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
|
|||||||
|
|
||||||
## What's New
|
## What's New
|
||||||
|
|
||||||
### v3.0 (Latest - December 2025)
|
### v3.2 (Latest - March 2026)
|
||||||
|
|
||||||
|
**ResolveMCP - Model Context Protocol Server (🆕)**:
|
||||||
|
|
||||||
|
* **MCP Tools**: Four tools auto-registered per model (`read_`, `create_`, `update_`, `delete_`) over HTTP/SSE transport
|
||||||
|
* **AI-Ready Descriptions**: Full column schema, primary key, nullable flags, and relation names surfaced in tool descriptions so AI models can query without guessing
|
||||||
|
* **Full Query Support**: Filters, sort, limit/offset, cursor pagination, column selection, and relation preloading all available as tool parameters
|
||||||
|
* **HTTP/SSE Transport**: Standards-compliant transport compatible with Claude Desktop, Cursor, and any MCP 2024-11-05 client
|
||||||
|
* **Lifecycle Hooks**: Same Before/After hook system as ResolveSpec for auth, auditing, and side-effects
|
||||||
|
* **MCP Resources**: Each model also exposed as a named resource for direct data access by AI clients
|
||||||
|
|
||||||
|
### v3.1 (February 2026)
|
||||||
|
|
||||||
|
**SQLite Schema Translation (🆕)**:
|
||||||
|
|
||||||
|
* **Automatic Schema Translation**: SQLite support with automatic `schema.table` to `schema_table` conversion
|
||||||
|
* **Database Agnostic Models**: Write models once, use across PostgreSQL, SQLite, and MSSQL
|
||||||
|
* **Transparent Handling**: Translation occurs automatically in all operations (SELECT, INSERT, UPDATE, DELETE, preloads)
|
||||||
|
* **All ORMs Supported**: Works with Bun, GORM, and Native SQL adapters
|
||||||
|
|
||||||
|
### v3.0 (December 2025)
|
||||||
|
|
||||||
**Explicit Route Registration (🆕)**:
|
**Explicit Route Registration (🆕)**:
|
||||||
|
|
||||||
@@ -518,12 +625,6 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
|
|||||||
* **No Auth on OPTIONS**: CORS preflight requests don't require authentication
|
* **No Auth on OPTIONS**: CORS preflight requests don't require authentication
|
||||||
* **Configurable**: Customize CORS settings via `common.CORSConfig`
|
* **Configurable**: Customize CORS settings via `common.CORSConfig`
|
||||||
|
|
||||||
**Migration Notes**:
|
|
||||||
|
|
||||||
* Update your code to register models BEFORE calling SetupMuxRoutes/SetupBunRouterRoutes
|
|
||||||
* Routes like `/public/users` are now created per registered model instead of using dynamic `/{schema}/{entity}` pattern
|
|
||||||
* This is a **breaking change** but provides better control and flexibility
|
|
||||||
|
|
||||||
### v2.1
|
### v2.1
|
||||||
|
|
||||||
**Cursor Pagination for ResolveSpec (🆕 Dec 9, 2025)**:
|
**Cursor Pagination for ResolveSpec (🆕 Dec 9, 2025)**:
|
||||||
@@ -589,7 +690,6 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
|
|||||||
* **BunRouter Integration**: Built-in support for uptrace/bunrouter
|
* **BunRouter Integration**: Built-in support for uptrace/bunrouter
|
||||||
* **Better Architecture**: Clean separation of concerns with interfaces
|
* **Better Architecture**: Clean separation of concerns with interfaces
|
||||||
* **Enhanced Testing**: Mockable interfaces for comprehensive testing
|
* **Enhanced Testing**: Mockable interfaces for comprehensive testing
|
||||||
* **Migration Guide**: Step-by-step migration instructions
|
|
||||||
|
|
||||||
**Performance Improvements**:
|
**Performance Improvements**:
|
||||||
|
|
||||||
@@ -606,4 +706,3 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
|
|||||||
* Slogan generated using DALL-E
|
* Slogan generated using DALL-E
|
||||||
* AI used for documentation checking and correction
|
* AI used for documentation checking and correction
|
||||||
* Community feedback and contributions that made v2.0 and v2.1 possible
|
* Community feedback and contributions that made v2.0 and v2.1 possible
|
||||||
|
|
||||||
|
|||||||
41
config.yaml
41
config.yaml
@@ -1,17 +1,26 @@
|
|||||||
# ResolveSpec Test Server Configuration
|
# ResolveSpec Test Server Configuration
|
||||||
# This is a minimal configuration for the test server
|
# This is a minimal configuration for the test server
|
||||||
|
|
||||||
server:
|
servers:
|
||||||
addr: ":8080"
|
default_server: "main"
|
||||||
shutdown_timeout: 30s
|
shutdown_timeout: 30s
|
||||||
drain_timeout: 25s
|
drain_timeout: 25s
|
||||||
read_timeout: 10s
|
read_timeout: 10s
|
||||||
write_timeout: 10s
|
write_timeout: 10s
|
||||||
idle_timeout: 120s
|
idle_timeout: 120s
|
||||||
|
instances:
|
||||||
|
main:
|
||||||
|
name: "main"
|
||||||
|
host: "localhost"
|
||||||
|
port: 8080
|
||||||
|
description: "Main server instance"
|
||||||
|
gzip: true
|
||||||
|
tags:
|
||||||
|
env: "test"
|
||||||
|
|
||||||
logger:
|
logger:
|
||||||
dev: true # Enable development mode for readable logs
|
dev: true
|
||||||
path: "" # Empty means log to stdout
|
path: ""
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
provider: "memory"
|
provider: "memory"
|
||||||
@@ -19,7 +28,7 @@ cache:
|
|||||||
middleware:
|
middleware:
|
||||||
rate_limit_rps: 100.0
|
rate_limit_rps: 100.0
|
||||||
rate_limit_burst: 200
|
rate_limit_burst: 200
|
||||||
max_request_size: 10485760 # 10MB
|
max_request_size: 10485760
|
||||||
|
|
||||||
cors:
|
cors:
|
||||||
allowed_origins:
|
allowed_origins:
|
||||||
@@ -36,8 +45,25 @@ cors:
|
|||||||
|
|
||||||
tracing:
|
tracing:
|
||||||
enabled: false
|
enabled: false
|
||||||
|
service_name: "resolvespec"
|
||||||
|
service_version: "1.0.0"
|
||||||
|
endpoint: ""
|
||||||
|
|
||||||
|
error_tracking:
|
||||||
|
enabled: false
|
||||||
|
provider: "noop"
|
||||||
|
environment: "development"
|
||||||
|
sample_rate: 1.0
|
||||||
|
traces_sample_rate: 0.1
|
||||||
|
|
||||||
|
event_broker:
|
||||||
|
enabled: false
|
||||||
|
provider: "memory"
|
||||||
|
mode: "sync"
|
||||||
|
worker_count: 1
|
||||||
|
buffer_size: 100
|
||||||
|
instance_id: ""
|
||||||
|
|
||||||
# Database Manager Configuration
|
|
||||||
dbmanager:
|
dbmanager:
|
||||||
default_connection: "primary"
|
default_connection: "primary"
|
||||||
max_open_conns: 25
|
max_open_conns: 25
|
||||||
@@ -48,7 +74,6 @@ dbmanager:
|
|||||||
retry_delay: 1s
|
retry_delay: 1s
|
||||||
health_check_interval: 30s
|
health_check_interval: 30s
|
||||||
enable_auto_reconnect: true
|
enable_auto_reconnect: true
|
||||||
|
|
||||||
connections:
|
connections:
|
||||||
primary:
|
primary:
|
||||||
name: "primary"
|
name: "primary"
|
||||||
@@ -59,3 +84,5 @@ dbmanager:
|
|||||||
enable_metrics: false
|
enable_metrics: false
|
||||||
connect_timeout: 10s
|
connect_timeout: 10s
|
||||||
query_timeout: 30s
|
query_timeout: 30s
|
||||||
|
|
||||||
|
paths: {}
|
||||||
|
|||||||
@@ -2,29 +2,38 @@
|
|||||||
# This file demonstrates all available configuration options
|
# This file demonstrates all available configuration options
|
||||||
# Copy this file to config.yaml and customize as needed
|
# Copy this file to config.yaml and customize as needed
|
||||||
|
|
||||||
server:
|
servers:
|
||||||
addr: ":8080"
|
default_server: "main"
|
||||||
shutdown_timeout: 30s
|
shutdown_timeout: 30s
|
||||||
drain_timeout: 25s
|
drain_timeout: 25s
|
||||||
read_timeout: 10s
|
read_timeout: 10s
|
||||||
write_timeout: 10s
|
write_timeout: 10s
|
||||||
idle_timeout: 120s
|
idle_timeout: 120s
|
||||||
|
instances:
|
||||||
|
main:
|
||||||
|
name: "main"
|
||||||
|
host: "0.0.0.0"
|
||||||
|
port: 8080
|
||||||
|
description: "Main API server"
|
||||||
|
gzip: true
|
||||||
|
tags:
|
||||||
|
env: "development"
|
||||||
|
version: "1.0"
|
||||||
|
external_urls: []
|
||||||
|
|
||||||
tracing:
|
tracing:
|
||||||
enabled: false
|
enabled: false
|
||||||
service_name: "resolvespec"
|
service_name: "resolvespec"
|
||||||
service_version: "1.0.0"
|
service_version: "1.0.0"
|
||||||
endpoint: "http://localhost:4318/v1/traces" # OTLP endpoint
|
endpoint: "http://localhost:4318/v1/traces"
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
provider: "memory" # Options: memory, redis, memcache
|
provider: "memory"
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
host: "localhost"
|
host: "localhost"
|
||||||
port: 6379
|
port: 6379
|
||||||
password: ""
|
password: ""
|
||||||
db: 0
|
db: 0
|
||||||
|
|
||||||
memcache:
|
memcache:
|
||||||
servers:
|
servers:
|
||||||
- "localhost:11211"
|
- "localhost:11211"
|
||||||
@@ -33,12 +42,12 @@ cache:
|
|||||||
|
|
||||||
logger:
|
logger:
|
||||||
dev: false
|
dev: false
|
||||||
path: "" # Empty for stdout, or specify file path
|
path: ""
|
||||||
|
|
||||||
middleware:
|
middleware:
|
||||||
rate_limit_rps: 100.0
|
rate_limit_rps: 100.0
|
||||||
rate_limit_burst: 200
|
rate_limit_burst: 200
|
||||||
max_request_size: 10485760 # 10MB in bytes
|
max_request_size: 10485760
|
||||||
|
|
||||||
cors:
|
cors:
|
||||||
allowed_origins:
|
allowed_origins:
|
||||||
@@ -53,5 +62,67 @@ cors:
|
|||||||
- "*"
|
- "*"
|
||||||
max_age: 3600
|
max_age: 3600
|
||||||
|
|
||||||
database:
|
error_tracking:
|
||||||
url: "host=localhost user=postgres password=postgres dbname=resolvespec_test port=5434 sslmode=disable"
|
enabled: false
|
||||||
|
provider: "noop"
|
||||||
|
environment: "development"
|
||||||
|
sample_rate: 1.0
|
||||||
|
traces_sample_rate: 0.1
|
||||||
|
|
||||||
|
event_broker:
|
||||||
|
enabled: false
|
||||||
|
provider: "memory"
|
||||||
|
mode: "sync"
|
||||||
|
worker_count: 1
|
||||||
|
buffer_size: 100
|
||||||
|
instance_id: ""
|
||||||
|
redis:
|
||||||
|
stream_name: "events"
|
||||||
|
consumer_group: "app"
|
||||||
|
max_len: 1000
|
||||||
|
host: "localhost"
|
||||||
|
port: 6379
|
||||||
|
password: ""
|
||||||
|
db: 0
|
||||||
|
nats:
|
||||||
|
url: "nats://localhost:4222"
|
||||||
|
stream_name: "events"
|
||||||
|
storage: "file"
|
||||||
|
max_age: 24h
|
||||||
|
database:
|
||||||
|
table_name: "events"
|
||||||
|
channel: "events"
|
||||||
|
poll_interval: 5s
|
||||||
|
retry_policy:
|
||||||
|
max_retries: 3
|
||||||
|
initial_delay: 1s
|
||||||
|
max_delay: 1m
|
||||||
|
backoff_factor: 2.0
|
||||||
|
|
||||||
|
dbmanager:
|
||||||
|
default_connection: "primary"
|
||||||
|
max_open_conns: 25
|
||||||
|
max_idle_conns: 5
|
||||||
|
conn_max_lifetime: 30m
|
||||||
|
conn_max_idle_time: 5m
|
||||||
|
retry_attempts: 3
|
||||||
|
retry_delay: 1s
|
||||||
|
health_check_interval: 30s
|
||||||
|
enable_auto_reconnect: true
|
||||||
|
connections:
|
||||||
|
primary:
|
||||||
|
name: "primary"
|
||||||
|
type: "pgsql"
|
||||||
|
url: "host=localhost user=postgres password=postgres dbname=resolvespec port=5432 sslmode=disable"
|
||||||
|
default_orm: "gorm"
|
||||||
|
enable_logging: false
|
||||||
|
enable_metrics: false
|
||||||
|
connect_timeout: 10s
|
||||||
|
query_timeout: 30s
|
||||||
|
|
||||||
|
paths:
|
||||||
|
data_dir: "./data"
|
||||||
|
log_dir: "./logs"
|
||||||
|
cache_dir: "./cache"
|
||||||
|
|
||||||
|
extensions: {}
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 352 KiB After Width: | Height: | Size: 95 KiB |
5
go.mod
5
go.mod
@@ -40,6 +40,7 @@ require (
|
|||||||
go.opentelemetry.io/otel/trace v1.38.0
|
go.opentelemetry.io/otel/trace v1.38.0
|
||||||
go.uber.org/zap v1.27.1
|
go.uber.org/zap v1.27.1
|
||||||
golang.org/x/crypto v0.46.0
|
golang.org/x/crypto v0.46.0
|
||||||
|
golang.org/x/oauth2 v0.34.0
|
||||||
golang.org/x/time v0.14.0
|
golang.org/x/time v0.14.0
|
||||||
gorm.io/driver/postgres v1.6.0
|
gorm.io/driver/postgres v1.6.0
|
||||||
gorm.io/driver/sqlite v1.6.0
|
gorm.io/driver/sqlite v1.6.0
|
||||||
@@ -78,6 +79,7 @@ require (
|
|||||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||||
github.com/golang/snappy v1.0.0 // indirect
|
github.com/golang/snappy v1.0.0 // indirect
|
||||||
|
github.com/google/jsonschema-go v0.4.2 // indirect
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
@@ -86,6 +88,7 @@ require (
|
|||||||
github.com/jinzhu/now v1.1.5 // indirect
|
github.com/jinzhu/now v1.1.5 // indirect
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
||||||
github.com/magiconair/properties v1.8.10 // indirect
|
github.com/magiconair/properties v1.8.10 // indirect
|
||||||
|
github.com/mark3labs/mcp-go v0.46.0 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||||
github.com/moby/go-archive v0.1.0 // indirect
|
github.com/moby/go-archive v0.1.0 // indirect
|
||||||
@@ -131,6 +134,7 @@ require (
|
|||||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||||
github.com/xdg-go/scram v1.2.0 // indirect
|
github.com/xdg-go/scram v1.2.0 // indirect
|
||||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||||
|
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
|
||||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||||
@@ -143,7 +147,6 @@ require (
|
|||||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 // indirect
|
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 // indirect
|
||||||
golang.org/x/mod v0.31.0 // indirect
|
golang.org/x/mod v0.31.0 // indirect
|
||||||
golang.org/x/net v0.48.0 // indirect
|
golang.org/x/net v0.48.0 // indirect
|
||||||
golang.org/x/oauth2 v0.34.0 // indirect
|
|
||||||
golang.org/x/sync v0.19.0 // indirect
|
golang.org/x/sync v0.19.0 // indirect
|
||||||
golang.org/x/sys v0.39.0 // indirect
|
golang.org/x/sys v0.39.0 // indirect
|
||||||
golang.org/x/text v0.32.0 // indirect
|
golang.org/x/text v0.32.0 // indirect
|
||||||
|
|||||||
6
go.sum
6
go.sum
@@ -120,6 +120,8 @@ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
|
github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8=
|
||||||
|
github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE=
|
||||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
@@ -173,6 +175,8 @@ github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ
|
|||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||||
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
||||||
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||||
|
github.com/mark3labs/mcp-go v0.46.0 h1:8KRibF4wcKejbLsHxCA/QBVUr5fQ9nwz/n8lGqmaALo=
|
||||||
|
github.com/mark3labs/mcp-go v0.46.0/go.mod h1:JKTC7R2LLVagkEWK7Kwu7DbmA6iIvnNAod6yrHiQMag=
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.33 h1:A5blZ5ulQo2AtayQ9/limgHEkFreKj1Dv226a1K73s0=
|
github.com/mattn/go-sqlite3 v1.14.33 h1:A5blZ5ulQo2AtayQ9/limgHEkFreKj1Dv226a1K73s0=
|
||||||
@@ -326,6 +330,8 @@ github.com/xdg-go/scram v1.2.0 h1:bYKF2AEwG5rqd1BumT4gAnvwU/M9nBp2pTSxeZw7Wvs=
|
|||||||
github.com/xdg-go/scram v1.2.0/go.mod h1:3dlrS0iBaWKYVt2ZfA4cj48umJZ+cAEbR6/SjLA88I8=
|
github.com/xdg-go/scram v1.2.0/go.mod h1:3dlrS0iBaWKYVt2ZfA4cj48umJZ+cAEbR6/SjLA88I8=
|
||||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||||
|
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
|
||||||
|
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
|
||||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
|||||||
362
openapi.yaml
362
openapi.yaml
@@ -1,362 +0,0 @@
|
|||||||
openapi: 3.0.0
|
|
||||||
info:
|
|
||||||
title: ResolveSpec API
|
|
||||||
version: '1.0'
|
|
||||||
description: A flexible REST API with GraphQL-like capabilities
|
|
||||||
|
|
||||||
servers:
|
|
||||||
- url: 'http://api.example.com/v1'
|
|
||||||
|
|
||||||
paths:
|
|
||||||
'/{schema}/{entity}':
|
|
||||||
parameters:
|
|
||||||
- name: schema
|
|
||||||
in: path
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
- name: entity
|
|
||||||
in: path
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
get:
|
|
||||||
summary: Get table metadata
|
|
||||||
description: Retrieve table metadata including columns, types, and relationships
|
|
||||||
responses:
|
|
||||||
'200':
|
|
||||||
description: Successful operation
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
allOf:
|
|
||||||
- $ref: '#/components/schemas/Response'
|
|
||||||
- type: object
|
|
||||||
properties:
|
|
||||||
data:
|
|
||||||
$ref: '#/components/schemas/TableMetadata'
|
|
||||||
'400':
|
|
||||||
$ref: '#/components/responses/BadRequest'
|
|
||||||
'404':
|
|
||||||
$ref: '#/components/responses/NotFound'
|
|
||||||
'500':
|
|
||||||
$ref: '#/components/responses/ServerError'
|
|
||||||
post:
|
|
||||||
summary: Perform operations on entities
|
|
||||||
requestBody:
|
|
||||||
required: true
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/Request'
|
|
||||||
responses:
|
|
||||||
'200':
|
|
||||||
description: Successful operation
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/Response'
|
|
||||||
'400':
|
|
||||||
$ref: '#/components/responses/BadRequest'
|
|
||||||
'404':
|
|
||||||
$ref: '#/components/responses/NotFound'
|
|
||||||
'500':
|
|
||||||
$ref: '#/components/responses/ServerError'
|
|
||||||
|
|
||||||
'/{schema}/{entity}/{id}':
|
|
||||||
parameters:
|
|
||||||
- name: schema
|
|
||||||
in: path
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
- name: entity
|
|
||||||
in: path
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
- name: id
|
|
||||||
in: path
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
post:
|
|
||||||
summary: Perform operations on a specific entity
|
|
||||||
requestBody:
|
|
||||||
required: true
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/Request'
|
|
||||||
responses:
|
|
||||||
'200':
|
|
||||||
description: Successful operation
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/Response'
|
|
||||||
'400':
|
|
||||||
$ref: '#/components/responses/BadRequest'
|
|
||||||
'404':
|
|
||||||
$ref: '#/components/responses/NotFound'
|
|
||||||
'500':
|
|
||||||
$ref: '#/components/responses/ServerError'
|
|
||||||
|
|
||||||
components:
|
|
||||||
schemas:
|
|
||||||
Request:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- operation
|
|
||||||
properties:
|
|
||||||
operation:
|
|
||||||
type: string
|
|
||||||
enum:
|
|
||||||
- read
|
|
||||||
- create
|
|
||||||
- update
|
|
||||||
- delete
|
|
||||||
id:
|
|
||||||
oneOf:
|
|
||||||
- type: string
|
|
||||||
- type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
description: Optional record identifier(s) when not provided in URL
|
|
||||||
data:
|
|
||||||
oneOf:
|
|
||||||
- type: object
|
|
||||||
- type: array
|
|
||||||
items:
|
|
||||||
type: object
|
|
||||||
description: Data for single or bulk create/update operations
|
|
||||||
options:
|
|
||||||
$ref: '#/components/schemas/Options'
|
|
||||||
|
|
||||||
Options:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
preload:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/PreloadOption'
|
|
||||||
columns:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
filters:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/FilterOption'
|
|
||||||
sort:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/SortOption'
|
|
||||||
limit:
|
|
||||||
type: integer
|
|
||||||
minimum: 0
|
|
||||||
offset:
|
|
||||||
type: integer
|
|
||||||
minimum: 0
|
|
||||||
customOperators:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/CustomOperator'
|
|
||||||
computedColumns:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/ComputedColumn'
|
|
||||||
|
|
||||||
PreloadOption:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
relation:
|
|
||||||
type: string
|
|
||||||
columns:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
filters:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/FilterOption'
|
|
||||||
|
|
||||||
FilterOption:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- column
|
|
||||||
- operator
|
|
||||||
- value
|
|
||||||
properties:
|
|
||||||
column:
|
|
||||||
type: string
|
|
||||||
operator:
|
|
||||||
type: string
|
|
||||||
enum:
|
|
||||||
- eq
|
|
||||||
- neq
|
|
||||||
- gt
|
|
||||||
- gte
|
|
||||||
- lt
|
|
||||||
- lte
|
|
||||||
- like
|
|
||||||
- ilike
|
|
||||||
- in
|
|
||||||
value:
|
|
||||||
type: object
|
|
||||||
|
|
||||||
SortOption:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- column
|
|
||||||
- direction
|
|
||||||
properties:
|
|
||||||
column:
|
|
||||||
type: string
|
|
||||||
direction:
|
|
||||||
type: string
|
|
||||||
enum:
|
|
||||||
- asc
|
|
||||||
- desc
|
|
||||||
|
|
||||||
CustomOperator:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- name
|
|
||||||
- sql
|
|
||||||
properties:
|
|
||||||
name:
|
|
||||||
type: string
|
|
||||||
sql:
|
|
||||||
type: string
|
|
||||||
|
|
||||||
ComputedColumn:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- name
|
|
||||||
- expression
|
|
||||||
properties:
|
|
||||||
name:
|
|
||||||
type: string
|
|
||||||
expression:
|
|
||||||
type: string
|
|
||||||
|
|
||||||
Response:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- success
|
|
||||||
properties:
|
|
||||||
success:
|
|
||||||
type: boolean
|
|
||||||
data:
|
|
||||||
type: object
|
|
||||||
metadata:
|
|
||||||
$ref: '#/components/schemas/Metadata'
|
|
||||||
error:
|
|
||||||
$ref: '#/components/schemas/Error'
|
|
||||||
|
|
||||||
Metadata:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
total:
|
|
||||||
type: integer
|
|
||||||
filtered:
|
|
||||||
type: integer
|
|
||||||
limit:
|
|
||||||
type: integer
|
|
||||||
offset:
|
|
||||||
type: integer
|
|
||||||
|
|
||||||
Error:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
code:
|
|
||||||
type: string
|
|
||||||
message:
|
|
||||||
type: string
|
|
||||||
details:
|
|
||||||
type: object
|
|
||||||
|
|
||||||
TableMetadata:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- schema
|
|
||||||
- table
|
|
||||||
- columns
|
|
||||||
- relations
|
|
||||||
properties:
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
description: Schema name
|
|
||||||
table:
|
|
||||||
type: string
|
|
||||||
description: Table name
|
|
||||||
columns:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/Column'
|
|
||||||
relations:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
description: List of relation names
|
|
||||||
|
|
||||||
Column:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- name
|
|
||||||
- type
|
|
||||||
- is_nullable
|
|
||||||
- is_primary
|
|
||||||
- is_unique
|
|
||||||
- has_index
|
|
||||||
properties:
|
|
||||||
name:
|
|
||||||
type: string
|
|
||||||
description: Column name
|
|
||||||
type:
|
|
||||||
type: string
|
|
||||||
description: Data type of the column
|
|
||||||
is_nullable:
|
|
||||||
type: boolean
|
|
||||||
description: Whether the column can contain null values
|
|
||||||
is_primary:
|
|
||||||
type: boolean
|
|
||||||
description: Whether the column is a primary key
|
|
||||||
is_unique:
|
|
||||||
type: boolean
|
|
||||||
description: Whether the column has a unique constraint
|
|
||||||
has_index:
|
|
||||||
type: boolean
|
|
||||||
description: Whether the column is indexed
|
|
||||||
|
|
||||||
responses:
|
|
||||||
BadRequest:
|
|
||||||
description: Bad request
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/Response'
|
|
||||||
|
|
||||||
NotFound:
|
|
||||||
description: Resource not found
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/Response'
|
|
||||||
|
|
||||||
ServerError:
|
|
||||||
description: Internal server error
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/Response'
|
|
||||||
|
|
||||||
securitySchemes:
|
|
||||||
bearerAuth:
|
|
||||||
type: http
|
|
||||||
scheme: bearer
|
|
||||||
bearerFormat: JWT
|
|
||||||
|
|
||||||
security:
|
|
||||||
- bearerAuth: []
|
|
||||||
@@ -95,11 +95,15 @@ func debugScanIntoStruct(rows interface{}, dest interface{}) error {
|
|||||||
// This demonstrates how the abstraction works with different ORMs
|
// This demonstrates how the abstraction works with different ORMs
|
||||||
type BunAdapter struct {
|
type BunAdapter struct {
|
||||||
db *bun.DB
|
db *bun.DB
|
||||||
|
driverName string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewBunAdapter creates a new Bun adapter
|
// NewBunAdapter creates a new Bun adapter
|
||||||
func NewBunAdapter(db *bun.DB) *BunAdapter {
|
func NewBunAdapter(db *bun.DB) *BunAdapter {
|
||||||
return &BunAdapter{db: db}
|
adapter := &BunAdapter{db: db}
|
||||||
|
// Initialize driver name
|
||||||
|
adapter.driverName = adapter.DriverName()
|
||||||
|
return adapter
|
||||||
}
|
}
|
||||||
|
|
||||||
// EnableQueryDebug enables query debugging which logs all SQL queries including preloads
|
// EnableQueryDebug enables query debugging which logs all SQL queries including preloads
|
||||||
@@ -128,6 +132,7 @@ func (b *BunAdapter) NewSelect() common.SelectQuery {
|
|||||||
return &BunSelectQuery{
|
return &BunSelectQuery{
|
||||||
query: b.db.NewSelect(),
|
query: b.db.NewSelect(),
|
||||||
db: b.db,
|
db: b.db,
|
||||||
|
driverName: b.driverName,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -168,7 +173,7 @@ func (b *BunAdapter) BeginTx(ctx context.Context) (common.Database, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// For Bun, we'll return a special wrapper that holds the transaction
|
// For Bun, we'll return a special wrapper that holds the transaction
|
||||||
return &BunTxAdapter{tx: tx}, nil
|
return &BunTxAdapter{tx: tx, driverName: b.driverName}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BunAdapter) CommitTx(ctx context.Context) error {
|
func (b *BunAdapter) CommitTx(ctx context.Context) error {
|
||||||
@@ -191,7 +196,7 @@ func (b *BunAdapter) RunInTransaction(ctx context.Context, fn func(common.Databa
|
|||||||
}()
|
}()
|
||||||
return b.db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error {
|
return b.db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error {
|
||||||
// Create adapter with transaction
|
// Create adapter with transaction
|
||||||
adapter := &BunTxAdapter{tx: tx}
|
adapter := &BunTxAdapter{tx: tx, driverName: b.driverName}
|
||||||
return fn(adapter)
|
return fn(adapter)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -200,6 +205,20 @@ func (b *BunAdapter) GetUnderlyingDB() interface{} {
|
|||||||
return b.db
|
return b.db
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *BunAdapter) DriverName() string {
|
||||||
|
// Normalize Bun's dialect name to match the project's canonical vocabulary.
|
||||||
|
// Bun returns "pg" for PostgreSQL; the rest of the project uses "postgres".
|
||||||
|
// Bun returns "sqlite3" for SQLite; we normalize to "sqlite".
|
||||||
|
switch name := b.db.Dialect().Name().String(); name {
|
||||||
|
case "pg":
|
||||||
|
return "postgres"
|
||||||
|
case "sqlite3":
|
||||||
|
return "sqlite"
|
||||||
|
default:
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// BunSelectQuery implements SelectQuery for Bun
|
// BunSelectQuery implements SelectQuery for Bun
|
||||||
type BunSelectQuery struct {
|
type BunSelectQuery struct {
|
||||||
query *bun.SelectQuery
|
query *bun.SelectQuery
|
||||||
@@ -208,6 +227,7 @@ type BunSelectQuery struct {
|
|||||||
schema string // Separated schema name
|
schema string // Separated schema name
|
||||||
tableName string // Just the table name, without schema
|
tableName string // Just the table name, without schema
|
||||||
tableAlias string
|
tableAlias string
|
||||||
|
driverName string // Database driver name (postgres, sqlite, mssql)
|
||||||
inJoinContext bool // Track if we're in a JOIN relation context
|
inJoinContext bool // Track if we're in a JOIN relation context
|
||||||
joinTableAlias string // Alias to use for JOIN conditions
|
joinTableAlias string // Alias to use for JOIN conditions
|
||||||
skipAutoDetect bool // Skip auto-detection to prevent circular calls
|
skipAutoDetect bool // Skip auto-detection to prevent circular calls
|
||||||
@@ -222,7 +242,8 @@ func (b *BunSelectQuery) Model(model interface{}) common.SelectQuery {
|
|||||||
if provider, ok := model.(common.TableNameProvider); ok {
|
if provider, ok := model.(common.TableNameProvider); ok {
|
||||||
fullTableName := provider.TableName()
|
fullTableName := provider.TableName()
|
||||||
// Check if the table name contains schema (e.g., "schema.table")
|
// Check if the table name contains schema (e.g., "schema.table")
|
||||||
b.schema, b.tableName = parseTableName(fullTableName)
|
// For SQLite, this will convert "schema.table" to "schema_table"
|
||||||
|
b.schema, b.tableName = parseTableName(fullTableName, b.driverName)
|
||||||
}
|
}
|
||||||
|
|
||||||
if provider, ok := model.(common.TableAliasProvider); ok {
|
if provider, ok := model.(common.TableAliasProvider); ok {
|
||||||
@@ -235,7 +256,8 @@ func (b *BunSelectQuery) Model(model interface{}) common.SelectQuery {
|
|||||||
func (b *BunSelectQuery) Table(table string) common.SelectQuery {
|
func (b *BunSelectQuery) Table(table string) common.SelectQuery {
|
||||||
b.query = b.query.Table(table)
|
b.query = b.query.Table(table)
|
||||||
// Check if the table name contains schema (e.g., "schema.table")
|
// Check if the table name contains schema (e.g., "schema.table")
|
||||||
b.schema, b.tableName = parseTableName(table)
|
// For SQLite, this will convert "schema.table" to "schema_table"
|
||||||
|
b.schema, b.tableName = parseTableName(table, b.driverName)
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -543,6 +565,7 @@ func (b *BunSelectQuery) PreloadRelation(relation string, apply ...func(common.S
|
|||||||
wrapper := &BunSelectQuery{
|
wrapper := &BunSelectQuery{
|
||||||
query: sq,
|
query: sq,
|
||||||
db: b.db,
|
db: b.db,
|
||||||
|
driverName: b.driverName,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try to extract table name and alias from the preload model
|
// Try to extract table name and alias from the preload model
|
||||||
@@ -552,7 +575,8 @@ func (b *BunSelectQuery) PreloadRelation(relation string, apply ...func(common.S
|
|||||||
// Extract table name if model implements TableNameProvider
|
// Extract table name if model implements TableNameProvider
|
||||||
if provider, ok := modelValue.(common.TableNameProvider); ok {
|
if provider, ok := modelValue.(common.TableNameProvider); ok {
|
||||||
fullTableName := provider.TableName()
|
fullTableName := provider.TableName()
|
||||||
wrapper.schema, wrapper.tableName = parseTableName(fullTableName)
|
// For SQLite, this will convert "schema.table" to "schema_table"
|
||||||
|
wrapper.schema, wrapper.tableName = parseTableName(fullTableName, b.driverName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract table alias if model implements TableAliasProvider
|
// Extract table alias if model implements TableAliasProvider
|
||||||
@@ -792,7 +816,7 @@ func (b *BunSelectQuery) loadRelationLevel(ctx context.Context, parentRecords re
|
|||||||
|
|
||||||
// Apply user's functions (if any)
|
// Apply user's functions (if any)
|
||||||
if isLast && len(applyFuncs) > 0 {
|
if isLast && len(applyFuncs) > 0 {
|
||||||
wrapper := &BunSelectQuery{query: query, db: b.db}
|
wrapper := &BunSelectQuery{query: query, db: b.db, driverName: b.driverName}
|
||||||
for _, fn := range applyFuncs {
|
for _, fn := range applyFuncs {
|
||||||
if fn != nil {
|
if fn != nil {
|
||||||
wrapper = fn(wrapper).(*BunSelectQuery)
|
wrapper = fn(wrapper).(*BunSelectQuery)
|
||||||
@@ -1478,12 +1502,14 @@ func (b *BunResult) LastInsertId() (int64, error) {
|
|||||||
// BunTxAdapter wraps a Bun transaction to implement the Database interface
|
// BunTxAdapter wraps a Bun transaction to implement the Database interface
|
||||||
type BunTxAdapter struct {
|
type BunTxAdapter struct {
|
||||||
tx bun.Tx
|
tx bun.Tx
|
||||||
|
driverName string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BunTxAdapter) NewSelect() common.SelectQuery {
|
func (b *BunTxAdapter) NewSelect() common.SelectQuery {
|
||||||
return &BunSelectQuery{
|
return &BunSelectQuery{
|
||||||
query: b.tx.NewSelect(),
|
query: b.tx.NewSelect(),
|
||||||
db: b.tx,
|
db: b.tx,
|
||||||
|
driverName: b.driverName,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1527,3 +1553,7 @@ func (b *BunTxAdapter) RunInTransaction(ctx context.Context, fn func(common.Data
|
|||||||
func (b *BunTxAdapter) GetUnderlyingDB() interface{} {
|
func (b *BunTxAdapter) GetUnderlyingDB() interface{} {
|
||||||
return b.tx
|
return b.tx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *BunTxAdapter) DriverName() string {
|
||||||
|
return b.driverName
|
||||||
|
}
|
||||||
|
|||||||
@@ -16,11 +16,15 @@ import (
|
|||||||
// GormAdapter adapts GORM to work with our Database interface
|
// GormAdapter adapts GORM to work with our Database interface
|
||||||
type GormAdapter struct {
|
type GormAdapter struct {
|
||||||
db *gorm.DB
|
db *gorm.DB
|
||||||
|
driverName string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewGormAdapter creates a new GORM adapter
|
// NewGormAdapter creates a new GORM adapter
|
||||||
func NewGormAdapter(db *gorm.DB) *GormAdapter {
|
func NewGormAdapter(db *gorm.DB) *GormAdapter {
|
||||||
return &GormAdapter{db: db}
|
adapter := &GormAdapter{db: db}
|
||||||
|
// Initialize driver name
|
||||||
|
adapter.driverName = adapter.DriverName()
|
||||||
|
return adapter
|
||||||
}
|
}
|
||||||
|
|
||||||
// EnableQueryDebug enables query debugging which logs all SQL queries including preloads
|
// EnableQueryDebug enables query debugging which logs all SQL queries including preloads
|
||||||
@@ -40,7 +44,7 @@ func (g *GormAdapter) DisableQueryDebug() *GormAdapter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (g *GormAdapter) NewSelect() common.SelectQuery {
|
func (g *GormAdapter) NewSelect() common.SelectQuery {
|
||||||
return &GormSelectQuery{db: g.db}
|
return &GormSelectQuery{db: g.db, driverName: g.driverName}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GormAdapter) NewInsert() common.InsertQuery {
|
func (g *GormAdapter) NewInsert() common.InsertQuery {
|
||||||
@@ -79,7 +83,7 @@ func (g *GormAdapter) BeginTx(ctx context.Context) (common.Database, error) {
|
|||||||
if tx.Error != nil {
|
if tx.Error != nil {
|
||||||
return nil, tx.Error
|
return nil, tx.Error
|
||||||
}
|
}
|
||||||
return &GormAdapter{db: tx}, nil
|
return &GormAdapter{db: tx, driverName: g.driverName}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GormAdapter) CommitTx(ctx context.Context) error {
|
func (g *GormAdapter) CommitTx(ctx context.Context) error {
|
||||||
@@ -97,7 +101,7 @@ func (g *GormAdapter) RunInTransaction(ctx context.Context, fn func(common.Datab
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
return g.db.WithContext(ctx).Transaction(func(tx *gorm.DB) error {
|
return g.db.WithContext(ctx).Transaction(func(tx *gorm.DB) error {
|
||||||
adapter := &GormAdapter{db: tx}
|
adapter := &GormAdapter{db: tx, driverName: g.driverName}
|
||||||
return fn(adapter)
|
return fn(adapter)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -106,12 +110,30 @@ func (g *GormAdapter) GetUnderlyingDB() interface{} {
|
|||||||
return g.db
|
return g.db
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (g *GormAdapter) DriverName() string {
|
||||||
|
if g.db.Dialector == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
// Normalize GORM's dialector name to match the project's canonical vocabulary.
|
||||||
|
// GORM returns "sqlserver" for MSSQL; the rest of the project uses "mssql".
|
||||||
|
// GORM returns "sqlite" or "sqlite3" for SQLite; we normalize to "sqlite".
|
||||||
|
switch name := g.db.Name(); name {
|
||||||
|
case "sqlserver":
|
||||||
|
return "mssql"
|
||||||
|
case "sqlite3":
|
||||||
|
return "sqlite"
|
||||||
|
default:
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// GormSelectQuery implements SelectQuery for GORM
|
// GormSelectQuery implements SelectQuery for GORM
|
||||||
type GormSelectQuery struct {
|
type GormSelectQuery struct {
|
||||||
db *gorm.DB
|
db *gorm.DB
|
||||||
schema string // Separated schema name
|
schema string // Separated schema name
|
||||||
tableName string // Just the table name, without schema
|
tableName string // Just the table name, without schema
|
||||||
tableAlias string
|
tableAlias string
|
||||||
|
driverName string // Database driver name (postgres, sqlite, mssql)
|
||||||
inJoinContext bool // Track if we're in a JOIN relation context
|
inJoinContext bool // Track if we're in a JOIN relation context
|
||||||
joinTableAlias string // Alias to use for JOIN conditions
|
joinTableAlias string // Alias to use for JOIN conditions
|
||||||
}
|
}
|
||||||
@@ -123,7 +145,8 @@ func (g *GormSelectQuery) Model(model interface{}) common.SelectQuery {
|
|||||||
if provider, ok := model.(common.TableNameProvider); ok {
|
if provider, ok := model.(common.TableNameProvider); ok {
|
||||||
fullTableName := provider.TableName()
|
fullTableName := provider.TableName()
|
||||||
// Check if the table name contains schema (e.g., "schema.table")
|
// Check if the table name contains schema (e.g., "schema.table")
|
||||||
g.schema, g.tableName = parseTableName(fullTableName)
|
// For SQLite, this will convert "schema.table" to "schema_table"
|
||||||
|
g.schema, g.tableName = parseTableName(fullTableName, g.driverName)
|
||||||
}
|
}
|
||||||
|
|
||||||
if provider, ok := model.(common.TableAliasProvider); ok {
|
if provider, ok := model.(common.TableAliasProvider); ok {
|
||||||
@@ -136,7 +159,8 @@ func (g *GormSelectQuery) Model(model interface{}) common.SelectQuery {
|
|||||||
func (g *GormSelectQuery) Table(table string) common.SelectQuery {
|
func (g *GormSelectQuery) Table(table string) common.SelectQuery {
|
||||||
g.db = g.db.Table(table)
|
g.db = g.db.Table(table)
|
||||||
// Check if the table name contains schema (e.g., "schema.table")
|
// Check if the table name contains schema (e.g., "schema.table")
|
||||||
g.schema, g.tableName = parseTableName(table)
|
// For SQLite, this will convert "schema.table" to "schema_table"
|
||||||
|
g.schema, g.tableName = parseTableName(table, g.driverName)
|
||||||
|
|
||||||
return g
|
return g
|
||||||
}
|
}
|
||||||
@@ -323,6 +347,7 @@ func (g *GormSelectQuery) PreloadRelation(relation string, apply ...func(common.
|
|||||||
|
|
||||||
wrapper := &GormSelectQuery{
|
wrapper := &GormSelectQuery{
|
||||||
db: db,
|
db: db,
|
||||||
|
driverName: g.driverName,
|
||||||
}
|
}
|
||||||
|
|
||||||
current := common.SelectQuery(wrapper)
|
current := common.SelectQuery(wrapper)
|
||||||
@@ -360,6 +385,7 @@ func (g *GormSelectQuery) JoinRelation(relation string, apply ...func(common.Sel
|
|||||||
|
|
||||||
wrapper := &GormSelectQuery{
|
wrapper := &GormSelectQuery{
|
||||||
db: db,
|
db: db,
|
||||||
|
driverName: g.driverName,
|
||||||
inJoinContext: true, // Mark as JOIN context
|
inJoinContext: true, // Mark as JOIN context
|
||||||
joinTableAlias: strings.ToLower(relation), // Use relation name as alias
|
joinTableAlias: strings.ToLower(relation), // Use relation name as alias
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,11 +17,18 @@ import (
|
|||||||
// This provides a lightweight PostgreSQL adapter without ORM overhead
|
// This provides a lightweight PostgreSQL adapter without ORM overhead
|
||||||
type PgSQLAdapter struct {
|
type PgSQLAdapter struct {
|
||||||
db *sql.DB
|
db *sql.DB
|
||||||
|
driverName string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewPgSQLAdapter creates a new PostgreSQL adapter
|
// NewPgSQLAdapter creates a new adapter wrapping a standard sql.DB.
|
||||||
func NewPgSQLAdapter(db *sql.DB) *PgSQLAdapter {
|
// An optional driverName (e.g. "postgres", "sqlite", "mssql") can be provided;
|
||||||
return &PgSQLAdapter{db: db}
|
// it defaults to "postgres" when omitted.
|
||||||
|
func NewPgSQLAdapter(db *sql.DB, driverName ...string) *PgSQLAdapter {
|
||||||
|
name := "postgres"
|
||||||
|
if len(driverName) > 0 && driverName[0] != "" {
|
||||||
|
name = driverName[0]
|
||||||
|
}
|
||||||
|
return &PgSQLAdapter{db: db, driverName: name}
|
||||||
}
|
}
|
||||||
|
|
||||||
// EnableQueryDebug enables query debugging for development
|
// EnableQueryDebug enables query debugging for development
|
||||||
@@ -32,6 +39,7 @@ func (p *PgSQLAdapter) EnableQueryDebug() {
|
|||||||
func (p *PgSQLAdapter) NewSelect() common.SelectQuery {
|
func (p *PgSQLAdapter) NewSelect() common.SelectQuery {
|
||||||
return &PgSQLSelectQuery{
|
return &PgSQLSelectQuery{
|
||||||
db: p.db,
|
db: p.db,
|
||||||
|
driverName: p.driverName,
|
||||||
columns: []string{"*"},
|
columns: []string{"*"},
|
||||||
args: make([]interface{}, 0),
|
args: make([]interface{}, 0),
|
||||||
}
|
}
|
||||||
@@ -40,6 +48,7 @@ func (p *PgSQLAdapter) NewSelect() common.SelectQuery {
|
|||||||
func (p *PgSQLAdapter) NewInsert() common.InsertQuery {
|
func (p *PgSQLAdapter) NewInsert() common.InsertQuery {
|
||||||
return &PgSQLInsertQuery{
|
return &PgSQLInsertQuery{
|
||||||
db: p.db,
|
db: p.db,
|
||||||
|
driverName: p.driverName,
|
||||||
values: make(map[string]interface{}),
|
values: make(map[string]interface{}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -47,6 +56,7 @@ func (p *PgSQLAdapter) NewInsert() common.InsertQuery {
|
|||||||
func (p *PgSQLAdapter) NewUpdate() common.UpdateQuery {
|
func (p *PgSQLAdapter) NewUpdate() common.UpdateQuery {
|
||||||
return &PgSQLUpdateQuery{
|
return &PgSQLUpdateQuery{
|
||||||
db: p.db,
|
db: p.db,
|
||||||
|
driverName: p.driverName,
|
||||||
sets: make(map[string]interface{}),
|
sets: make(map[string]interface{}),
|
||||||
args: make([]interface{}, 0),
|
args: make([]interface{}, 0),
|
||||||
whereClauses: make([]string, 0),
|
whereClauses: make([]string, 0),
|
||||||
@@ -56,6 +66,7 @@ func (p *PgSQLAdapter) NewUpdate() common.UpdateQuery {
|
|||||||
func (p *PgSQLAdapter) NewDelete() common.DeleteQuery {
|
func (p *PgSQLAdapter) NewDelete() common.DeleteQuery {
|
||||||
return &PgSQLDeleteQuery{
|
return &PgSQLDeleteQuery{
|
||||||
db: p.db,
|
db: p.db,
|
||||||
|
driverName: p.driverName,
|
||||||
args: make([]interface{}, 0),
|
args: make([]interface{}, 0),
|
||||||
whereClauses: make([]string, 0),
|
whereClauses: make([]string, 0),
|
||||||
}
|
}
|
||||||
@@ -98,7 +109,7 @@ func (p *PgSQLAdapter) BeginTx(ctx context.Context) (common.Database, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return &PgSQLTxAdapter{tx: tx}, nil
|
return &PgSQLTxAdapter{tx: tx, driverName: p.driverName}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PgSQLAdapter) CommitTx(ctx context.Context) error {
|
func (p *PgSQLAdapter) CommitTx(ctx context.Context) error {
|
||||||
@@ -121,7 +132,7 @@ func (p *PgSQLAdapter) RunInTransaction(ctx context.Context, fn func(common.Data
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
adapter := &PgSQLTxAdapter{tx: tx}
|
adapter := &PgSQLTxAdapter{tx: tx, driverName: p.driverName}
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if p := recover(); p != nil {
|
if p := recover(); p != nil {
|
||||||
@@ -141,6 +152,10 @@ func (p *PgSQLAdapter) GetUnderlyingDB() interface{} {
|
|||||||
return p.db
|
return p.db
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *PgSQLAdapter) DriverName() string {
|
||||||
|
return p.driverName
|
||||||
|
}
|
||||||
|
|
||||||
// preloadConfig represents a relationship to be preloaded
|
// preloadConfig represents a relationship to be preloaded
|
||||||
type preloadConfig struct {
|
type preloadConfig struct {
|
||||||
relation string
|
relation string
|
||||||
@@ -165,6 +180,7 @@ type PgSQLSelectQuery struct {
|
|||||||
model interface{}
|
model interface{}
|
||||||
tableName string
|
tableName string
|
||||||
tableAlias string
|
tableAlias string
|
||||||
|
driverName string // Database driver name (postgres, sqlite, mssql)
|
||||||
columns []string
|
columns []string
|
||||||
columnExprs []string
|
columnExprs []string
|
||||||
whereClauses []string
|
whereClauses []string
|
||||||
@@ -183,7 +199,9 @@ type PgSQLSelectQuery struct {
|
|||||||
func (p *PgSQLSelectQuery) Model(model interface{}) common.SelectQuery {
|
func (p *PgSQLSelectQuery) Model(model interface{}) common.SelectQuery {
|
||||||
p.model = model
|
p.model = model
|
||||||
if provider, ok := model.(common.TableNameProvider); ok {
|
if provider, ok := model.(common.TableNameProvider); ok {
|
||||||
p.tableName = provider.TableName()
|
fullTableName := provider.TableName()
|
||||||
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(fullTableName, p.driverName)
|
||||||
}
|
}
|
||||||
if provider, ok := model.(common.TableAliasProvider); ok {
|
if provider, ok := model.(common.TableAliasProvider); ok {
|
||||||
p.tableAlias = provider.TableAlias()
|
p.tableAlias = provider.TableAlias()
|
||||||
@@ -192,7 +210,8 @@ func (p *PgSQLSelectQuery) Model(model interface{}) common.SelectQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *PgSQLSelectQuery) Table(table string) common.SelectQuery {
|
func (p *PgSQLSelectQuery) Table(table string) common.SelectQuery {
|
||||||
p.tableName = table
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(table, p.driverName)
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -375,12 +394,12 @@ func (p *PgSQLSelectQuery) buildSQL() string {
|
|||||||
|
|
||||||
// LIMIT clause
|
// LIMIT clause
|
||||||
if p.limit > 0 {
|
if p.limit > 0 {
|
||||||
sb.WriteString(fmt.Sprintf(" LIMIT %d", p.limit))
|
fmt.Fprintf(&sb, " LIMIT %d", p.limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
// OFFSET clause
|
// OFFSET clause
|
||||||
if p.offset > 0 {
|
if p.offset > 0 {
|
||||||
sb.WriteString(fmt.Sprintf(" OFFSET %d", p.offset))
|
fmt.Fprintf(&sb, " OFFSET %d", p.offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
return sb.String()
|
return sb.String()
|
||||||
@@ -504,13 +523,16 @@ type PgSQLInsertQuery struct {
|
|||||||
db *sql.DB
|
db *sql.DB
|
||||||
tx *sql.Tx
|
tx *sql.Tx
|
||||||
tableName string
|
tableName string
|
||||||
|
driverName string
|
||||||
values map[string]interface{}
|
values map[string]interface{}
|
||||||
returning []string
|
returning []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PgSQLInsertQuery) Model(model interface{}) common.InsertQuery {
|
func (p *PgSQLInsertQuery) Model(model interface{}) common.InsertQuery {
|
||||||
if provider, ok := model.(common.TableNameProvider); ok {
|
if provider, ok := model.(common.TableNameProvider); ok {
|
||||||
p.tableName = provider.TableName()
|
fullTableName := provider.TableName()
|
||||||
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(fullTableName, p.driverName)
|
||||||
}
|
}
|
||||||
// Extract values from model using reflection
|
// Extract values from model using reflection
|
||||||
// This is a simplified implementation
|
// This is a simplified implementation
|
||||||
@@ -518,7 +540,8 @@ func (p *PgSQLInsertQuery) Model(model interface{}) common.InsertQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *PgSQLInsertQuery) Table(table string) common.InsertQuery {
|
func (p *PgSQLInsertQuery) Table(table string) common.InsertQuery {
|
||||||
p.tableName = table
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(table, p.driverName)
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -591,6 +614,7 @@ type PgSQLUpdateQuery struct {
|
|||||||
db *sql.DB
|
db *sql.DB
|
||||||
tx *sql.Tx
|
tx *sql.Tx
|
||||||
tableName string
|
tableName string
|
||||||
|
driverName string
|
||||||
model interface{}
|
model interface{}
|
||||||
sets map[string]interface{}
|
sets map[string]interface{}
|
||||||
whereClauses []string
|
whereClauses []string
|
||||||
@@ -602,13 +626,16 @@ type PgSQLUpdateQuery struct {
|
|||||||
func (p *PgSQLUpdateQuery) Model(model interface{}) common.UpdateQuery {
|
func (p *PgSQLUpdateQuery) Model(model interface{}) common.UpdateQuery {
|
||||||
p.model = model
|
p.model = model
|
||||||
if provider, ok := model.(common.TableNameProvider); ok {
|
if provider, ok := model.(common.TableNameProvider); ok {
|
||||||
p.tableName = provider.TableName()
|
fullTableName := provider.TableName()
|
||||||
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(fullTableName, p.driverName)
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PgSQLUpdateQuery) Table(table string) common.UpdateQuery {
|
func (p *PgSQLUpdateQuery) Table(table string) common.UpdateQuery {
|
||||||
p.tableName = table
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(table, p.driverName)
|
||||||
if p.model == nil {
|
if p.model == nil {
|
||||||
model, err := modelregistry.GetModelByName(table)
|
model, err := modelregistry.GetModelByName(table)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@@ -749,6 +776,7 @@ type PgSQLDeleteQuery struct {
|
|||||||
db *sql.DB
|
db *sql.DB
|
||||||
tx *sql.Tx
|
tx *sql.Tx
|
||||||
tableName string
|
tableName string
|
||||||
|
driverName string
|
||||||
whereClauses []string
|
whereClauses []string
|
||||||
args []interface{}
|
args []interface{}
|
||||||
paramCounter int
|
paramCounter int
|
||||||
@@ -756,13 +784,16 @@ type PgSQLDeleteQuery struct {
|
|||||||
|
|
||||||
func (p *PgSQLDeleteQuery) Model(model interface{}) common.DeleteQuery {
|
func (p *PgSQLDeleteQuery) Model(model interface{}) common.DeleteQuery {
|
||||||
if provider, ok := model.(common.TableNameProvider); ok {
|
if provider, ok := model.(common.TableNameProvider); ok {
|
||||||
p.tableName = provider.TableName()
|
fullTableName := provider.TableName()
|
||||||
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(fullTableName, p.driverName)
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PgSQLDeleteQuery) Table(table string) common.DeleteQuery {
|
func (p *PgSQLDeleteQuery) Table(table string) common.DeleteQuery {
|
||||||
p.tableName = table
|
// For SQLite, convert "schema.table" to "schema_table"
|
||||||
|
_, p.tableName = parseTableName(table, p.driverName)
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -836,11 +867,13 @@ func (p *PgSQLResult) LastInsertId() (int64, error) {
|
|||||||
// PgSQLTxAdapter wraps a PostgreSQL transaction
|
// PgSQLTxAdapter wraps a PostgreSQL transaction
|
||||||
type PgSQLTxAdapter struct {
|
type PgSQLTxAdapter struct {
|
||||||
tx *sql.Tx
|
tx *sql.Tx
|
||||||
|
driverName string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PgSQLTxAdapter) NewSelect() common.SelectQuery {
|
func (p *PgSQLTxAdapter) NewSelect() common.SelectQuery {
|
||||||
return &PgSQLSelectQuery{
|
return &PgSQLSelectQuery{
|
||||||
tx: p.tx,
|
tx: p.tx,
|
||||||
|
driverName: p.driverName,
|
||||||
columns: []string{"*"},
|
columns: []string{"*"},
|
||||||
args: make([]interface{}, 0),
|
args: make([]interface{}, 0),
|
||||||
}
|
}
|
||||||
@@ -849,6 +882,7 @@ func (p *PgSQLTxAdapter) NewSelect() common.SelectQuery {
|
|||||||
func (p *PgSQLTxAdapter) NewInsert() common.InsertQuery {
|
func (p *PgSQLTxAdapter) NewInsert() common.InsertQuery {
|
||||||
return &PgSQLInsertQuery{
|
return &PgSQLInsertQuery{
|
||||||
tx: p.tx,
|
tx: p.tx,
|
||||||
|
driverName: p.driverName,
|
||||||
values: make(map[string]interface{}),
|
values: make(map[string]interface{}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -856,6 +890,7 @@ func (p *PgSQLTxAdapter) NewInsert() common.InsertQuery {
|
|||||||
func (p *PgSQLTxAdapter) NewUpdate() common.UpdateQuery {
|
func (p *PgSQLTxAdapter) NewUpdate() common.UpdateQuery {
|
||||||
return &PgSQLUpdateQuery{
|
return &PgSQLUpdateQuery{
|
||||||
tx: p.tx,
|
tx: p.tx,
|
||||||
|
driverName: p.driverName,
|
||||||
sets: make(map[string]interface{}),
|
sets: make(map[string]interface{}),
|
||||||
args: make([]interface{}, 0),
|
args: make([]interface{}, 0),
|
||||||
whereClauses: make([]string, 0),
|
whereClauses: make([]string, 0),
|
||||||
@@ -865,6 +900,7 @@ func (p *PgSQLTxAdapter) NewUpdate() common.UpdateQuery {
|
|||||||
func (p *PgSQLTxAdapter) NewDelete() common.DeleteQuery {
|
func (p *PgSQLTxAdapter) NewDelete() common.DeleteQuery {
|
||||||
return &PgSQLDeleteQuery{
|
return &PgSQLDeleteQuery{
|
||||||
tx: p.tx,
|
tx: p.tx,
|
||||||
|
driverName: p.driverName,
|
||||||
args: make([]interface{}, 0),
|
args: make([]interface{}, 0),
|
||||||
whereClauses: make([]string, 0),
|
whereClauses: make([]string, 0),
|
||||||
}
|
}
|
||||||
@@ -912,6 +948,10 @@ func (p *PgSQLTxAdapter) GetUnderlyingDB() interface{} {
|
|||||||
return p.tx
|
return p.tx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *PgSQLTxAdapter) DriverName() string {
|
||||||
|
return p.driverName
|
||||||
|
}
|
||||||
|
|
||||||
// applyJoinPreloads adds JOINs for relationships that should use JOIN strategy
|
// applyJoinPreloads adds JOINs for relationships that should use JOIN strategy
|
||||||
func (p *PgSQLSelectQuery) applyJoinPreloads() {
|
func (p *PgSQLSelectQuery) applyJoinPreloads() {
|
||||||
for _, preload := range p.preloads {
|
for _, preload := range p.preloads {
|
||||||
@@ -1036,9 +1076,9 @@ func (p *PgSQLSelectQuery) executePreloadQuery(ctx context.Context, field reflec
|
|||||||
// Create a new select query for the related table
|
// Create a new select query for the related table
|
||||||
var db common.Database
|
var db common.Database
|
||||||
if p.tx != nil {
|
if p.tx != nil {
|
||||||
db = &PgSQLTxAdapter{tx: p.tx}
|
db = &PgSQLTxAdapter{tx: p.tx, driverName: p.driverName}
|
||||||
} else {
|
} else {
|
||||||
db = &PgSQLAdapter{db: p.db}
|
db = &PgSQLAdapter{db: p.db, driverName: p.driverName}
|
||||||
}
|
}
|
||||||
|
|
||||||
query := db.NewSelect().
|
query := db.NewSelect().
|
||||||
|
|||||||
@@ -62,9 +62,20 @@ func checkAliasLength(relation string) bool {
|
|||||||
// For example: "public.users" -> ("public", "users")
|
// For example: "public.users" -> ("public", "users")
|
||||||
//
|
//
|
||||||
// "users" -> ("", "users")
|
// "users" -> ("", "users")
|
||||||
func parseTableName(fullTableName string) (schema, table string) {
|
//
|
||||||
|
// For SQLite, schema.table is translated to schema_table since SQLite doesn't support schemas
|
||||||
|
// in the same way as PostgreSQL/MSSQL
|
||||||
|
func parseTableName(fullTableName, driverName string) (schema, table string) {
|
||||||
if idx := strings.LastIndex(fullTableName, "."); idx != -1 {
|
if idx := strings.LastIndex(fullTableName, "."); idx != -1 {
|
||||||
return fullTableName[:idx], fullTableName[idx+1:]
|
schema = fullTableName[:idx]
|
||||||
|
table = fullTableName[idx+1:]
|
||||||
|
|
||||||
|
// For SQLite, convert schema.table to schema_table
|
||||||
|
if driverName == "sqlite" || driverName == "sqlite3" {
|
||||||
|
table = schema + "_" + table
|
||||||
|
schema = ""
|
||||||
|
}
|
||||||
|
return schema, table
|
||||||
}
|
}
|
||||||
return "", fullTableName
|
return "", fullTableName
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,6 +30,12 @@ type Database interface {
|
|||||||
// For Bun, this returns *bun.DB
|
// For Bun, this returns *bun.DB
|
||||||
// This is useful for provider-specific features like PostgreSQL NOTIFY/LISTEN
|
// This is useful for provider-specific features like PostgreSQL NOTIFY/LISTEN
|
||||||
GetUnderlyingDB() interface{}
|
GetUnderlyingDB() interface{}
|
||||||
|
|
||||||
|
// DriverName returns the canonical name of the underlying database driver.
|
||||||
|
// Possible values: "postgres", "sqlite", "mssql", "mysql".
|
||||||
|
// All adapters normalise vendor-specific strings (e.g. Bun's "pg", GORM's
|
||||||
|
// "sqlserver") to the values above before returning.
|
||||||
|
DriverName() string
|
||||||
}
|
}
|
||||||
|
|
||||||
// SelectQuery interface for building SELECT queries (compatible with both GORM and Bun)
|
// SelectQuery interface for building SELECT queries (compatible with both GORM and Bun)
|
||||||
|
|||||||
@@ -50,6 +50,9 @@ func (m *mockDatabase) RollbackTx(ctx context.Context) error {
|
|||||||
func (m *mockDatabase) GetUnderlyingDB() interface{} {
|
func (m *mockDatabase) GetUnderlyingDB() interface{} {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
func (m *mockDatabase) DriverName() string {
|
||||||
|
return "postgres"
|
||||||
|
}
|
||||||
|
|
||||||
// Mock SelectQuery
|
// Mock SelectQuery
|
||||||
type mockSelectQuery struct{}
|
type mockSelectQuery struct{}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package common
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -167,16 +168,17 @@ func SanitizeWhereClause(where string, tableName string, options ...*RequestOpti
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Build a set of allowed table prefixes (main table + preloaded relations)
|
// Build a set of allowed table prefixes (main table + preloaded relations)
|
||||||
|
// Keys are stored lowercase for case-insensitive matching
|
||||||
allowedPrefixes := make(map[string]bool)
|
allowedPrefixes := make(map[string]bool)
|
||||||
if tableName != "" {
|
if tableName != "" {
|
||||||
allowedPrefixes[tableName] = true
|
allowedPrefixes[strings.ToLower(tableName)] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add preload relation names as allowed prefixes
|
// Add preload relation names as allowed prefixes
|
||||||
if len(options) > 0 && options[0] != nil {
|
if len(options) > 0 && options[0] != nil {
|
||||||
for pi := range options[0].Preload {
|
for pi := range options[0].Preload {
|
||||||
if options[0].Preload[pi].Relation != "" {
|
if options[0].Preload[pi].Relation != "" {
|
||||||
allowedPrefixes[options[0].Preload[pi].Relation] = true
|
allowedPrefixes[strings.ToLower(options[0].Preload[pi].Relation)] = true
|
||||||
logger.Debug("Added preload relation '%s' as allowed table prefix", options[0].Preload[pi].Relation)
|
logger.Debug("Added preload relation '%s' as allowed table prefix", options[0].Preload[pi].Relation)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,7 +186,7 @@ func SanitizeWhereClause(where string, tableName string, options ...*RequestOpti
|
|||||||
// Add join aliases as allowed prefixes
|
// Add join aliases as allowed prefixes
|
||||||
for _, alias := range options[0].JoinAliases {
|
for _, alias := range options[0].JoinAliases {
|
||||||
if alias != "" {
|
if alias != "" {
|
||||||
allowedPrefixes[alias] = true
|
allowedPrefixes[strings.ToLower(alias)] = true
|
||||||
logger.Debug("Added join alias '%s' as allowed table prefix", alias)
|
logger.Debug("Added join alias '%s' as allowed table prefix", alias)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -216,8 +218,8 @@ func SanitizeWhereClause(where string, tableName string, options ...*RequestOpti
|
|||||||
currentPrefix, columnName := extractTableAndColumn(condToCheck)
|
currentPrefix, columnName := extractTableAndColumn(condToCheck)
|
||||||
|
|
||||||
if currentPrefix != "" && columnName != "" {
|
if currentPrefix != "" && columnName != "" {
|
||||||
// Check if the prefix is allowed (main table or preload relation)
|
// Check if the prefix is allowed (main table or preload relation) - case-insensitive
|
||||||
if !allowedPrefixes[currentPrefix] {
|
if !allowedPrefixes[strings.ToLower(currentPrefix)] {
|
||||||
// Prefix is not in the allowed list - only fix if it's a valid column in the main table
|
// Prefix is not in the allowed list - only fix if it's a valid column in the main table
|
||||||
if validColumns == nil || isValidColumn(columnName, validColumns) {
|
if validColumns == nil || isValidColumn(columnName, validColumns) {
|
||||||
// Replace the incorrect prefix with the correct main table name
|
// Replace the incorrect prefix with the correct main table name
|
||||||
@@ -925,3 +927,36 @@ func extractLeftSideOfComparison(cond string) string {
|
|||||||
|
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FilterValueToSlice converts a filter value to []interface{} for use with IN operators.
|
||||||
|
// JSON-decoded arrays arrive as []interface{}, but typed slices (e.g. []string) also work.
|
||||||
|
// Returns a single-element slice if the value is not a slice type.
|
||||||
|
func FilterValueToSlice(v interface{}) []interface{} {
|
||||||
|
if v == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
if rv.Kind() == reflect.Slice {
|
||||||
|
result := make([]interface{}, rv.Len())
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
result[i] = rv.Index(i).Interface()
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
return []interface{}{v}
|
||||||
|
}
|
||||||
|
|
||||||
|
// BuildInCondition builds a parameterized IN condition from a filter value.
|
||||||
|
// Returns the condition string (e.g. "col IN (?,?)") and the individual values as args.
|
||||||
|
// Returns ("", nil) if the value is empty or not a slice.
|
||||||
|
func BuildInCondition(column string, v interface{}) (query string, args []interface{}) {
|
||||||
|
values := FilterValueToSlice(v)
|
||||||
|
if len(values) == 0 {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
placeholders := make([]string, len(values))
|
||||||
|
for i := range values {
|
||||||
|
placeholders[i] = "?"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s IN (%s)", column, strings.Join(placeholders, ",")), values
|
||||||
|
}
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ A comprehensive database connection manager for Go that provides centralized man
|
|||||||
- **GORM** - Popular Go ORM
|
- **GORM** - Popular Go ORM
|
||||||
- **Native** - Standard library `*sql.DB`
|
- **Native** - Standard library `*sql.DB`
|
||||||
- All three share the same underlying connection pool
|
- All three share the same underlying connection pool
|
||||||
|
- **SQLite Schema Translation**: Automatic conversion of `schema.table` to `schema_table` for SQLite compatibility
|
||||||
- **Configuration-Driven**: YAML configuration with Viper integration
|
- **Configuration-Driven**: YAML configuration with Viper integration
|
||||||
- **Production-Ready Features**:
|
- **Production-Ready Features**:
|
||||||
- Automatic health checks and reconnection
|
- Automatic health checks and reconnection
|
||||||
@@ -179,6 +180,35 @@ if err != nil {
|
|||||||
rows, err := nativeDB.QueryContext(ctx, "SELECT * FROM users WHERE active = $1", true)
|
rows, err := nativeDB.QueryContext(ctx, "SELECT * FROM users WHERE active = $1", true)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Cross-Database Example with SQLite
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Same model works across all databases
|
||||||
|
type User struct {
|
||||||
|
ID int `bun:"id,pk"`
|
||||||
|
Username string `bun:"username"`
|
||||||
|
Email string `bun:"email"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (User) TableName() string {
|
||||||
|
return "auth.users"
|
||||||
|
}
|
||||||
|
|
||||||
|
// PostgreSQL connection
|
||||||
|
pgConn, _ := mgr.Get("primary")
|
||||||
|
pgDB, _ := pgConn.Bun()
|
||||||
|
var pgUsers []User
|
||||||
|
pgDB.NewSelect().Model(&pgUsers).Scan(ctx)
|
||||||
|
// Executes: SELECT * FROM auth.users
|
||||||
|
|
||||||
|
// SQLite connection
|
||||||
|
sqliteConn, _ := mgr.Get("cache-db")
|
||||||
|
sqliteDB, _ := sqliteConn.Bun()
|
||||||
|
var sqliteUsers []User
|
||||||
|
sqliteDB.NewSelect().Model(&sqliteUsers).Scan(ctx)
|
||||||
|
// Executes: SELECT * FROM auth_users (schema.table → schema_table)
|
||||||
|
```
|
||||||
|
|
||||||
#### Use MongoDB
|
#### Use MongoDB
|
||||||
|
|
||||||
```go
|
```go
|
||||||
@@ -368,6 +398,37 @@ Providers handle:
|
|||||||
- Connection statistics
|
- Connection statistics
|
||||||
- Connection cleanup
|
- Connection cleanup
|
||||||
|
|
||||||
|
### SQLite Schema Handling
|
||||||
|
|
||||||
|
SQLite doesn't support schemas in the same way as PostgreSQL or MSSQL. To ensure compatibility when using models designed for multi-schema databases:
|
||||||
|
|
||||||
|
**Automatic Translation**: When a table name contains a schema prefix (e.g., `myschema.mytable`), it is automatically converted to `myschema_mytable` for SQLite databases.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Model definition (works across all databases)
|
||||||
|
func (User) TableName() string {
|
||||||
|
return "auth.users" // PostgreSQL/MSSQL: "auth"."users"
|
||||||
|
// SQLite: "auth_users"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query execution
|
||||||
|
db.NewSelect().Model(&User{}).Scan(ctx)
|
||||||
|
// PostgreSQL/MSSQL: SELECT * FROM auth.users
|
||||||
|
// SQLite: SELECT * FROM auth_users
|
||||||
|
```
|
||||||
|
|
||||||
|
**How it Works**:
|
||||||
|
- Bun, GORM, and Native adapters detect the driver type
|
||||||
|
- `parseTableName()` automatically translates schema.table → schema_table for SQLite
|
||||||
|
- Translation happens transparently in all database operations (SELECT, INSERT, UPDATE, DELETE)
|
||||||
|
- Preload and relation queries are also handled automatically
|
||||||
|
|
||||||
|
**Benefits**:
|
||||||
|
- Write database-agnostic code
|
||||||
|
- Use the same models across PostgreSQL, MSSQL, and SQLite
|
||||||
|
- No conditional logic needed in your application
|
||||||
|
- Schema separation maintained through naming convention in SQLite
|
||||||
|
|
||||||
## Best Practices
|
## Best Practices
|
||||||
|
|
||||||
1. **Use Named Connections**: Be explicit about which database you're accessing
|
1. **Use Named Connections**: Be explicit about which database you're accessing
|
||||||
|
|||||||
@@ -467,13 +467,11 @@ func (c *sqlConnection) getNativeAdapter() (common.Database, error) {
|
|||||||
// Create a native adapter based on database type
|
// Create a native adapter based on database type
|
||||||
switch c.dbType {
|
switch c.dbType {
|
||||||
case DatabaseTypePostgreSQL:
|
case DatabaseTypePostgreSQL:
|
||||||
c.nativeAdapter = database.NewPgSQLAdapter(c.nativeDB)
|
c.nativeAdapter = database.NewPgSQLAdapter(c.nativeDB, string(c.dbType))
|
||||||
case DatabaseTypeSQLite:
|
case DatabaseTypeSQLite:
|
||||||
// For SQLite, we'll use the PgSQL adapter as it works with standard sql.DB
|
c.nativeAdapter = database.NewPgSQLAdapter(c.nativeDB, string(c.dbType))
|
||||||
c.nativeAdapter = database.NewPgSQLAdapter(c.nativeDB)
|
|
||||||
case DatabaseTypeMSSQL:
|
case DatabaseTypeMSSQL:
|
||||||
// For MSSQL, we'll use the PgSQL adapter as it works with standard sql.DB
|
c.nativeAdapter = database.NewPgSQLAdapter(c.nativeDB, string(c.dbType))
|
||||||
c.nativeAdapter = database.NewPgSQLAdapter(c.nativeDB)
|
|
||||||
default:
|
default:
|
||||||
return nil, ErrUnsupportedDatabase
|
return nil, ErrUnsupportedDatabase
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -231,12 +231,14 @@ func (m *connectionManager) Connect(ctx context.Context) error {
|
|||||||
|
|
||||||
// Close closes all database connections
|
// Close closes all database connections
|
||||||
func (m *connectionManager) Close() error {
|
func (m *connectionManager) Close() error {
|
||||||
|
// Stop the health checker before taking mu. performHealthCheck acquires
|
||||||
|
// a read lock, so waiting for the goroutine while holding the write lock
|
||||||
|
// would deadlock.
|
||||||
|
m.stopHealthChecker()
|
||||||
|
|
||||||
m.mu.Lock()
|
m.mu.Lock()
|
||||||
defer m.mu.Unlock()
|
defer m.mu.Unlock()
|
||||||
|
|
||||||
// Stop health checker
|
|
||||||
m.stopHealthChecker()
|
|
||||||
|
|
||||||
// Close all connections
|
// Close all connections
|
||||||
var errors []error
|
var errors []error
|
||||||
for name, conn := range m.connections {
|
for name, conn := range m.connections {
|
||||||
|
|||||||
@@ -74,6 +74,10 @@ func (m *MockDatabase) GetUnderlyingDB() interface{} {
|
|||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *MockDatabase) DriverName() string {
|
||||||
|
return "postgres"
|
||||||
|
}
|
||||||
|
|
||||||
// MockResult implements common.Result interface for testing
|
// MockResult implements common.Result interface for testing
|
||||||
type MockResult struct {
|
type MockResult struct {
|
||||||
rows int64
|
rows int64
|
||||||
|
|||||||
@@ -2,14 +2,38 @@ package funcspec
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/security"
|
"github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
)
|
)
|
||||||
|
|
||||||
// RegisterSecurityHooks registers security hooks for funcspec handlers
|
// RegisterSecurityHooks registers security hooks for funcspec handlers
|
||||||
// Note: funcspec operates on SQL queries directly, so row-level security is not directly applicable
|
// Note: funcspec operates on SQL queries directly, so row-level security is not directly applicable
|
||||||
// We provide audit logging for data access tracking
|
// We provide auth enforcement and audit logging for data access tracking
|
||||||
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
||||||
|
// Hook 0: BeforeQueryList - Auth check before list query execution
|
||||||
|
handler.Hooks().Register(BeforeQueryList, func(hookCtx *HookContext) error {
|
||||||
|
if hookCtx.UserContext == nil || hookCtx.UserContext.UserID == 0 {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortMessage = "authentication required"
|
||||||
|
hookCtx.AbortCode = http.StatusUnauthorized
|
||||||
|
return fmt.Errorf("authentication required")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 0: BeforeQuery - Auth check before single query execution
|
||||||
|
handler.Hooks().Register(BeforeQuery, func(hookCtx *HookContext) error {
|
||||||
|
if hookCtx.UserContext == nil || hookCtx.UserContext.UserID == 0 {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortMessage = "authentication required"
|
||||||
|
hookCtx.AbortCode = http.StatusUnauthorized
|
||||||
|
return fmt.Errorf("authentication required")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
// Hook 1: BeforeQueryList - Audit logging before query list execution
|
// Hook 1: BeforeQueryList - Audit logging before query list execution
|
||||||
handler.Hooks().Register(BeforeQueryList, func(hookCtx *HookContext) error {
|
handler.Hooks().Register(BeforeQueryList, func(hookCtx *HookContext) error {
|
||||||
secCtx := newFuncSpecSecurityContext(hookCtx)
|
secCtx := newFuncSpecSecurityContext(hookCtx)
|
||||||
|
|||||||
@@ -8,6 +8,10 @@ import (
|
|||||||
|
|
||||||
// ModelRules defines the permissions and security settings for a model
|
// ModelRules defines the permissions and security settings for a model
|
||||||
type ModelRules struct {
|
type ModelRules struct {
|
||||||
|
CanPublicRead bool // Whether the model can be read (GET operations)
|
||||||
|
CanPublicUpdate bool // Whether the model can be updated (PUT/PATCH operations)
|
||||||
|
CanPublicCreate bool // Whether the model can be created (POST operations)
|
||||||
|
CanPublicDelete bool // Whether the model can be deleted (DELETE operations)
|
||||||
CanRead bool // Whether the model can be read (GET operations)
|
CanRead bool // Whether the model can be read (GET operations)
|
||||||
CanUpdate bool // Whether the model can be updated (PUT/PATCH operations)
|
CanUpdate bool // Whether the model can be updated (PUT/PATCH operations)
|
||||||
CanCreate bool // Whether the model can be created (POST operations)
|
CanCreate bool // Whether the model can be created (POST operations)
|
||||||
@@ -22,6 +26,10 @@ func DefaultModelRules() ModelRules {
|
|||||||
CanUpdate: true,
|
CanUpdate: true,
|
||||||
CanCreate: true,
|
CanCreate: true,
|
||||||
CanDelete: true,
|
CanDelete: true,
|
||||||
|
CanPublicRead: false,
|
||||||
|
CanPublicUpdate: false,
|
||||||
|
CanPublicCreate: false,
|
||||||
|
CanPublicDelete: false,
|
||||||
SecurityDisabled: false,
|
SecurityDisabled: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ MQTTSpec is an MQTT-based database query framework that enables real-time databa
|
|||||||
- **Full CRUD Operations**: Create, Read, Update, Delete with hooks
|
- **Full CRUD Operations**: Create, Read, Update, Delete with hooks
|
||||||
- **Real-time Subscriptions**: Subscribe to entity changes with filtering
|
- **Real-time Subscriptions**: Subscribe to entity changes with filtering
|
||||||
- **Database Agnostic**: GORM and Bun ORM support
|
- **Database Agnostic**: GORM and Bun ORM support
|
||||||
- **Lifecycle Hooks**: 12 hooks for authentication, authorization, validation, and auditing
|
- **Lifecycle Hooks**: 13 hooks for authentication, authorization, validation, and auditing
|
||||||
- **Multi-tenancy Support**: Built-in tenant isolation via hooks
|
- **Multi-tenancy Support**: Built-in tenant isolation via hooks
|
||||||
- **Thread-safe**: Proper concurrency handling throughout
|
- **Thread-safe**: Proper concurrency handling throughout
|
||||||
|
|
||||||
@@ -326,10 +326,11 @@ When any client creates/updates/deletes a user matching the subscription filters
|
|||||||
|
|
||||||
## Lifecycle Hooks
|
## Lifecycle Hooks
|
||||||
|
|
||||||
MQTTSpec provides 12 lifecycle hooks for implementing cross-cutting concerns:
|
MQTTSpec provides 13 lifecycle hooks for implementing cross-cutting concerns:
|
||||||
|
|
||||||
### Hook Types
|
### Hook Types
|
||||||
|
|
||||||
|
- `BeforeHandle` — fires after model resolution, before operation dispatch (auth checks)
|
||||||
- `BeforeConnect` / `AfterConnect` - Connection lifecycle
|
- `BeforeConnect` / `AfterConnect` - Connection lifecycle
|
||||||
- `BeforeDisconnect` / `AfterDisconnect` - Disconnection lifecycle
|
- `BeforeDisconnect` / `AfterDisconnect` - Disconnection lifecycle
|
||||||
- `BeforeRead` / `AfterRead` - Read operations
|
- `BeforeRead` / `AfterRead` - Read operations
|
||||||
@@ -339,6 +340,20 @@ MQTTSpec provides 12 lifecycle hooks for implementing cross-cutting concerns:
|
|||||||
- `BeforeSubscribe` / `AfterSubscribe` - Subscription creation
|
- `BeforeSubscribe` / `AfterSubscribe` - Subscription creation
|
||||||
- `BeforeUnsubscribe` / `AfterUnsubscribe` - Subscription removal
|
- `BeforeUnsubscribe` / `AfterUnsubscribe` - Subscription removal
|
||||||
|
|
||||||
|
### Security Hooks (Recommended)
|
||||||
|
|
||||||
|
Use `RegisterSecurityHooks` for integrated auth with model-rule support:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
|
|
||||||
|
provider := security.NewCompositeSecurityProvider(auth, colSec, rowSec)
|
||||||
|
securityList := security.NewSecurityList(provider)
|
||||||
|
mqttspec.RegisterSecurityHooks(handler, securityList)
|
||||||
|
// Registers BeforeHandle (model auth), BeforeRead (load rules),
|
||||||
|
// AfterRead (column security + audit), BeforeUpdate, BeforeDelete
|
||||||
|
```
|
||||||
|
|
||||||
### Authentication Example (JWT)
|
### Authentication Example (JWT)
|
||||||
|
|
||||||
```go
|
```go
|
||||||
@@ -657,7 +672,7 @@ handler, err := mqttspec.NewHandlerWithGORM(db,
|
|||||||
| **Network Efficiency** | Better for unreliable networks | Better for low-latency |
|
| **Network Efficiency** | Better for unreliable networks | Better for low-latency |
|
||||||
| **Best For** | IoT, mobile apps, distributed systems | Web applications, real-time dashboards |
|
| **Best For** | IoT, mobile apps, distributed systems | Web applications, real-time dashboards |
|
||||||
| **Message Protocol** | Same JSON structure | Same JSON structure |
|
| **Message Protocol** | Same JSON structure | Same JSON structure |
|
||||||
| **Hooks** | Same 12 hooks | Same 12 hooks |
|
| **Hooks** | Same 13 hooks | Same 13 hooks |
|
||||||
| **CRUD Operations** | Identical | Identical |
|
| **CRUD Operations** | Identical | Identical |
|
||||||
| **Subscriptions** | Identical (via MQTT topics) | Identical (via app-level) |
|
| **Subscriptions** | Identical (via MQTT topics) | Identical (via app-level) |
|
||||||
|
|
||||||
|
|||||||
@@ -284,6 +284,15 @@ func (h *Handler) handleRequest(client *Client, msg *Message) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Execute BeforeHandle hook - auth check fires here, after model resolution
|
||||||
|
hookCtx.Operation = string(msg.Operation)
|
||||||
|
if err := h.hooks.Execute(BeforeHandle, hookCtx); err != nil {
|
||||||
|
if hookCtx.Abort {
|
||||||
|
h.sendError(client.ID, msg.ID, "unauthorized", hookCtx.AbortMessage)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Route to operation handler
|
// Route to operation handler
|
||||||
switch msg.Operation {
|
switch msg.Operation {
|
||||||
case OperationRead:
|
case OperationRead:
|
||||||
@@ -645,12 +654,15 @@ func (h *Handler) getNotifyTopic(clientID, subscriptionID string) string {
|
|||||||
// Database operation helpers (adapted from websocketspec)
|
// Database operation helpers (adapted from websocketspec)
|
||||||
|
|
||||||
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
||||||
// Use entity as table name
|
|
||||||
tableName := entity
|
tableName := entity
|
||||||
|
|
||||||
if schema != "" {
|
if schema != "" {
|
||||||
|
if h.db.DriverName() == "sqlite" {
|
||||||
|
tableName = schema + "_" + tableName
|
||||||
|
} else {
|
||||||
tableName = schema + "." + tableName
|
tableName = schema + "." + tableName
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return tableName
|
return tableName
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,8 +20,11 @@ type (
|
|||||||
HookRegistry = websocketspec.HookRegistry
|
HookRegistry = websocketspec.HookRegistry
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hook type constants - all 12 lifecycle hooks
|
// Hook type constants - all lifecycle hooks
|
||||||
const (
|
const (
|
||||||
|
// BeforeHandle fires after model resolution, before operation dispatch
|
||||||
|
BeforeHandle = websocketspec.BeforeHandle
|
||||||
|
|
||||||
// CRUD operation hooks
|
// CRUD operation hooks
|
||||||
BeforeRead = websocketspec.BeforeRead
|
BeforeRead = websocketspec.BeforeRead
|
||||||
AfterRead = websocketspec.AfterRead
|
AfterRead = websocketspec.AfterRead
|
||||||
|
|||||||
108
pkg/mqttspec/security_hooks.go
Normal file
108
pkg/mqttspec/security_hooks.go
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
package mqttspec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RegisterSecurityHooks registers all security-related hooks with the MQTT handler
|
||||||
|
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
||||||
|
// Hook 0: BeforeHandle - enforce auth after model resolution
|
||||||
|
handler.Hooks().Register(BeforeHandle, func(hookCtx *HookContext) error {
|
||||||
|
if err := security.CheckModelAuthAllowed(newSecurityContext(hookCtx), hookCtx.Operation); err != nil {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortMessage = err.Error()
|
||||||
|
hookCtx.AbortCode = http.StatusUnauthorized
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 1: BeforeRead - Load security rules
|
||||||
|
handler.Hooks().Register(BeforeRead, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.LoadSecurityRules(secCtx, securityList)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 2: AfterRead - Apply column-level security (masking)
|
||||||
|
handler.Hooks().Register(AfterRead, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.ApplyColumnSecurity(secCtx, securityList)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 3 (Optional): Audit logging
|
||||||
|
handler.Hooks().Register(AfterRead, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.LogDataAccess(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 4: BeforeUpdate - enforce CanUpdate rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeUpdate, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelUpdateAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 5: BeforeDelete - enforce CanDelete rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeDelete, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelDeleteAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.Info("Security hooks registered for mqttspec handler")
|
||||||
|
}
|
||||||
|
|
||||||
|
// securityContext adapts mqttspec.HookContext to security.SecurityContext interface
|
||||||
|
type securityContext struct {
|
||||||
|
ctx *HookContext
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSecurityContext(ctx *HookContext) security.SecurityContext {
|
||||||
|
return &securityContext{ctx: ctx}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetContext() context.Context {
|
||||||
|
return s.ctx.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetUserID() (int, bool) {
|
||||||
|
return security.GetUserID(s.ctx.Context)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetSchema() string {
|
||||||
|
return s.ctx.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetEntity() string {
|
||||||
|
return s.ctx.Entity
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetModel() interface{} {
|
||||||
|
return s.ctx.Model
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetQuery retrieves a stored query from hook metadata
|
||||||
|
func (s *securityContext) GetQuery() interface{} {
|
||||||
|
if s.ctx.Metadata == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return s.ctx.Metadata["query"]
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetQuery stores the query in hook metadata
|
||||||
|
func (s *securityContext) SetQuery(query interface{}) {
|
||||||
|
if s.ctx.Metadata == nil {
|
||||||
|
s.ctx.Metadata = make(map[string]interface{})
|
||||||
|
}
|
||||||
|
s.ctx.Metadata["query"] = query
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetResult() interface{} {
|
||||||
|
return s.ctx.Result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) SetResult(result interface{}) {
|
||||||
|
s.ctx.Result = result
|
||||||
|
}
|
||||||
407
pkg/resolvemcp/README.md
Normal file
407
pkg/resolvemcp/README.md
Normal file
@@ -0,0 +1,407 @@
|
|||||||
|
# resolvemcp
|
||||||
|
|
||||||
|
Package `resolvemcp` exposes registered database models as **Model Context Protocol (MCP) tools and resources** over HTTP/SSE transport. It mirrors the `resolvespec` package patterns — same model registration API, same filter/sort/pagination/preload options, same lifecycle hook system.
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/resolvemcp"
|
||||||
|
"github.com/gorilla/mux"
|
||||||
|
)
|
||||||
|
|
||||||
|
// 1. Create a handler
|
||||||
|
handler := resolvemcp.NewHandlerWithGORM(db, resolvemcp.Config{
|
||||||
|
BaseURL: "http://localhost:8080",
|
||||||
|
})
|
||||||
|
|
||||||
|
// 2. Register models
|
||||||
|
handler.RegisterModel("public", "users", &User{})
|
||||||
|
handler.RegisterModel("public", "orders", &Order{})
|
||||||
|
|
||||||
|
// 3. Mount routes
|
||||||
|
r := mux.NewRouter()
|
||||||
|
resolvemcp.SetupMuxRoutes(r, handler)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Config
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Config struct {
|
||||||
|
// BaseURL is the public-facing base URL of the server (e.g. "http://localhost:8080").
|
||||||
|
// Sent to MCP clients during the SSE handshake so they know where to POST messages.
|
||||||
|
// If empty, it is detected from each incoming request using the Host header and
|
||||||
|
// TLS state (X-Forwarded-Proto is honoured for reverse-proxy deployments).
|
||||||
|
BaseURL string
|
||||||
|
|
||||||
|
// BasePath is the URL path prefix where MCP endpoints are mounted (e.g. "/mcp").
|
||||||
|
// Required.
|
||||||
|
BasePath string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Handler Creation
|
||||||
|
|
||||||
|
| Function | Description |
|
||||||
|
|---|---|
|
||||||
|
| `NewHandlerWithGORM(db *gorm.DB, cfg Config) *Handler` | Backed by GORM |
|
||||||
|
| `NewHandlerWithBun(db *bun.DB, cfg Config) *Handler` | Backed by Bun |
|
||||||
|
| `NewHandlerWithDB(db common.Database, cfg Config) *Handler` | Backed by any `common.Database` |
|
||||||
|
| `NewHandler(db common.Database, registry common.ModelRegistry, cfg Config) *Handler` | Full control over registry |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Registering Models
|
||||||
|
|
||||||
|
```go
|
||||||
|
handler.RegisterModel(schema, entity string, model interface{}) error
|
||||||
|
```
|
||||||
|
|
||||||
|
- `schema` — database schema name (e.g. `"public"`), or empty string for no schema prefix.
|
||||||
|
- `entity` — table/entity name (e.g. `"users"`).
|
||||||
|
- `model` — a pointer to a struct (e.g. `&User{}`).
|
||||||
|
|
||||||
|
Each call immediately creates four MCP **tools** and one MCP **resource** for the model.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## HTTP / SSE Transport
|
||||||
|
|
||||||
|
The `*server.SSEServer` returned by any of the helpers below implements `http.Handler`, so it works with every Go HTTP framework.
|
||||||
|
|
||||||
|
`Config.BasePath` is required and used for all route registration.
|
||||||
|
`Config.BaseURL` is optional — when empty it is detected from each request.
|
||||||
|
|
||||||
|
### Gorilla Mux
|
||||||
|
|
||||||
|
```go
|
||||||
|
resolvemcp.SetupMuxRoutes(r, handler)
|
||||||
|
```
|
||||||
|
|
||||||
|
Registers:
|
||||||
|
|
||||||
|
| Route | Method | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `{BasePath}/sse` | GET | SSE connection — clients subscribe here |
|
||||||
|
| `{BasePath}/message` | POST | JSON-RPC — clients send requests here |
|
||||||
|
| `{BasePath}/*` | any | Full SSE server (convenience prefix) |
|
||||||
|
|
||||||
|
### bunrouter
|
||||||
|
|
||||||
|
```go
|
||||||
|
resolvemcp.SetupBunRouterRoutes(router, handler)
|
||||||
|
```
|
||||||
|
|
||||||
|
Registers `GET {BasePath}/sse` and `POST {BasePath}/message` on the provided `*bunrouter.Router`.
|
||||||
|
|
||||||
|
### Gin (or any `http.Handler`-compatible framework)
|
||||||
|
|
||||||
|
Use `handler.SSEServer()` to get an `http.Handler` and wrap it with the framework's adapter:
|
||||||
|
|
||||||
|
```go
|
||||||
|
sse := handler.SSEServer()
|
||||||
|
|
||||||
|
// Gin
|
||||||
|
engine.Any("/mcp/*path", gin.WrapH(sse))
|
||||||
|
|
||||||
|
// net/http
|
||||||
|
http.Handle("/mcp/", sse)
|
||||||
|
|
||||||
|
// Echo
|
||||||
|
e.Any("/mcp/*", echo.WrapHandler(sse))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
|
||||||
|
Add middleware before the MCP routes. The handler itself has no auth layer.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## MCP Tools
|
||||||
|
|
||||||
|
### Tool Naming
|
||||||
|
|
||||||
|
```
|
||||||
|
{operation}_{schema}_{entity} // e.g. read_public_users
|
||||||
|
{operation}_{entity} // e.g. read_users (when schema is empty)
|
||||||
|
```
|
||||||
|
|
||||||
|
Operations: `read`, `create`, `update`, `delete`.
|
||||||
|
|
||||||
|
### Read Tool — `read_{schema}_{entity}`
|
||||||
|
|
||||||
|
Fetch one or many records.
|
||||||
|
|
||||||
|
| Argument | Type | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `id` | string | Primary key value. Omit to return multiple records. |
|
||||||
|
| `limit` | number | Max records per page (recommended: 10–100). |
|
||||||
|
| `offset` | number | Records to skip (offset-based pagination). |
|
||||||
|
| `cursor_forward` | string | PK of the **last** record on the current page (next-page cursor). |
|
||||||
|
| `cursor_backward` | string | PK of the **first** record on the current page (prev-page cursor). |
|
||||||
|
| `columns` | array | Column names to include. Omit for all columns. |
|
||||||
|
| `omit_columns` | array | Column names to exclude. |
|
||||||
|
| `filters` | array | Filter objects (see [Filtering](#filtering)). |
|
||||||
|
| `sort` | array | Sort objects (see [Sorting](#sorting)). |
|
||||||
|
| `preloads` | array | Relation preload objects (see [Preloading](#preloading)). |
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"data": [...],
|
||||||
|
"metadata": {
|
||||||
|
"total": 100,
|
||||||
|
"filtered": 100,
|
||||||
|
"count": 10,
|
||||||
|
"limit": 10,
|
||||||
|
"offset": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create Tool — `create_{schema}_{entity}`
|
||||||
|
|
||||||
|
Insert one or more records.
|
||||||
|
|
||||||
|
| Argument | Type | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `data` | object \| array | Single object or array of objects to insert. |
|
||||||
|
|
||||||
|
Array input runs inside a single transaction — all succeed or all fail.
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{ "success": true, "data": { ... } }
|
||||||
|
```
|
||||||
|
|
||||||
|
### Update Tool — `update_{schema}_{entity}`
|
||||||
|
|
||||||
|
Partially update an existing record. Only non-null, non-empty fields in `data` are applied; existing values are preserved for omitted fields.
|
||||||
|
|
||||||
|
| Argument | Type | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `id` | string | Primary key of the record. Can also be included inside `data`. |
|
||||||
|
| `data` | object (required) | Fields to update. |
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{ "success": true, "data": { ...merged record... } }
|
||||||
|
```
|
||||||
|
|
||||||
|
### Delete Tool — `delete_{schema}_{entity}`
|
||||||
|
|
||||||
|
Delete a record by primary key. **Irreversible.**
|
||||||
|
|
||||||
|
| Argument | Type | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `id` | string (required) | Primary key of the record to delete. |
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{ "success": true, "data": { ...deleted record... } }
|
||||||
|
```
|
||||||
|
|
||||||
|
### Resource — `{schema}.{entity}`
|
||||||
|
|
||||||
|
Each model is also registered as an MCP resource with URI `schema.entity` (or just `entity` when schema is empty). Reading the resource returns up to 100 records as `application/json`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Filtering
|
||||||
|
|
||||||
|
Pass an array of filter objects to the `filters` argument:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{ "column": "status", "operator": "=", "value": "active" },
|
||||||
|
{ "column": "age", "operator": ">", "value": 18, "logic_operator": "AND" },
|
||||||
|
{ "column": "role", "operator": "in", "value": ["admin", "editor"], "logic_operator": "OR" }
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Supported Operators
|
||||||
|
|
||||||
|
| Operator | Aliases | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `=` | `eq` | Equal |
|
||||||
|
| `!=` | `neq`, `<>` | Not equal |
|
||||||
|
| `>` | `gt` | Greater than |
|
||||||
|
| `>=` | `gte` | Greater than or equal |
|
||||||
|
| `<` | `lt` | Less than |
|
||||||
|
| `<=` | `lte` | Less than or equal |
|
||||||
|
| `like` | | SQL LIKE (case-sensitive) |
|
||||||
|
| `ilike` | | SQL ILIKE (case-insensitive) |
|
||||||
|
| `in` | | Value in list |
|
||||||
|
| `is_null` | | Column IS NULL |
|
||||||
|
| `is_not_null` | | Column IS NOT NULL |
|
||||||
|
|
||||||
|
### Logic Operators
|
||||||
|
|
||||||
|
- `"logic_operator": "AND"` (default) — filter is AND-chained with the previous condition.
|
||||||
|
- `"logic_operator": "OR"` — filter is OR-grouped with the previous condition.
|
||||||
|
|
||||||
|
Consecutive OR filters are grouped into a single `(cond1 OR cond2 OR ...)` clause.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sorting
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{ "column": "created_at", "direction": "desc" },
|
||||||
|
{ "column": "name", "direction": "asc" }
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Pagination
|
||||||
|
|
||||||
|
### Offset-Based
|
||||||
|
|
||||||
|
```json
|
||||||
|
{ "limit": 20, "offset": 40 }
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cursor-Based
|
||||||
|
|
||||||
|
Cursor pagination uses a SQL `EXISTS` subquery for stable, efficient paging. Always pair with a `sort` argument.
|
||||||
|
|
||||||
|
```json
|
||||||
|
// Next page: pass the PK of the last record on the current page
|
||||||
|
{ "cursor_forward": "42", "limit": 20, "sort": [{"column": "id", "direction": "asc"}] }
|
||||||
|
|
||||||
|
// Previous page: pass the PK of the first record on the current page
|
||||||
|
{ "cursor_backward": "23", "limit": 20, "sort": [{"column": "id", "direction": "asc"}] }
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Preloading Relations
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{ "relation": "Profile" },
|
||||||
|
{ "relation": "Orders" }
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
Available relations are listed in each tool's description. Only relations defined on the model struct are valid.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Hook System
|
||||||
|
|
||||||
|
Hooks let you intercept and modify CRUD operations at well-defined lifecycle points.
|
||||||
|
|
||||||
|
### Hook Types
|
||||||
|
|
||||||
|
| Constant | Fires |
|
||||||
|
|---|---|
|
||||||
|
| `BeforeHandle` | After model resolution, before operation dispatch (all CRUD) |
|
||||||
|
| `BeforeRead` / `AfterRead` | Around read queries |
|
||||||
|
| `BeforeCreate` / `AfterCreate` | Around insert |
|
||||||
|
| `BeforeUpdate` / `AfterUpdate` | Around update |
|
||||||
|
| `BeforeDelete` / `AfterDelete` | Around delete |
|
||||||
|
|
||||||
|
### Registering Hooks
|
||||||
|
|
||||||
|
```go
|
||||||
|
handler.Hooks().Register(resolvemcp.BeforeCreate, func(ctx *resolvemcp.HookContext) error {
|
||||||
|
// Inject a timestamp before insert
|
||||||
|
if data, ok := ctx.Data.(map[string]interface{}); ok {
|
||||||
|
data["created_at"] = time.Now()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
// Register the same hook for multiple events
|
||||||
|
handler.Hooks().RegisterMultiple(
|
||||||
|
[]resolvemcp.HookType{resolvemcp.BeforeCreate, resolvemcp.BeforeUpdate},
|
||||||
|
auditHook,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### HookContext Fields
|
||||||
|
|
||||||
|
| Field | Type | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| `Context` | `context.Context` | Request context |
|
||||||
|
| `Handler` | `*Handler` | The resolvemcp handler |
|
||||||
|
| `Schema` | `string` | Database schema name |
|
||||||
|
| `Entity` | `string` | Entity/table name |
|
||||||
|
| `Model` | `interface{}` | Registered model instance |
|
||||||
|
| `Options` | `common.RequestOptions` | Parsed request options (read operations) |
|
||||||
|
| `Operation` | `string` | `"read"`, `"create"`, `"update"`, or `"delete"` |
|
||||||
|
| `ID` | `string` | Primary key from request (read/update/delete) |
|
||||||
|
| `Data` | `interface{}` | Input data (create/update — modifiable) |
|
||||||
|
| `Result` | `interface{}` | Output data (set by After hooks) |
|
||||||
|
| `Error` | `error` | Operation error, if any |
|
||||||
|
| `Query` | `common.SelectQuery` | Live query object (available in `BeforeRead`) |
|
||||||
|
| `Tx` | `common.Database` | Database/transaction handle |
|
||||||
|
| `Abort` | `bool` | Set to `true` to abort the operation |
|
||||||
|
| `AbortMessage` | `string` | Error message returned when aborting |
|
||||||
|
| `AbortCode` | `int` | Optional status code for the abort |
|
||||||
|
|
||||||
|
### Aborting an Operation
|
||||||
|
|
||||||
|
```go
|
||||||
|
handler.Hooks().Register(resolvemcp.BeforeDelete, func(ctx *resolvemcp.HookContext) error {
|
||||||
|
ctx.Abort = true
|
||||||
|
ctx.AbortMessage = "deletion is disabled"
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Managing Hooks
|
||||||
|
|
||||||
|
```go
|
||||||
|
registry := handler.Hooks()
|
||||||
|
registry.HasHooks(resolvemcp.BeforeCreate) // bool
|
||||||
|
registry.Clear(resolvemcp.BeforeCreate) // remove hooks for one type
|
||||||
|
registry.ClearAll() // remove all hooks
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Context Helpers
|
||||||
|
|
||||||
|
Request metadata is threaded through `context.Context` during handler execution. Hooks and custom tools can read it:
|
||||||
|
|
||||||
|
```go
|
||||||
|
schema := resolvemcp.GetSchema(ctx)
|
||||||
|
entity := resolvemcp.GetEntity(ctx)
|
||||||
|
tableName := resolvemcp.GetTableName(ctx)
|
||||||
|
model := resolvemcp.GetModel(ctx)
|
||||||
|
modelPtr := resolvemcp.GetModelPtr(ctx)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also set values manually (e.g. in middleware):
|
||||||
|
|
||||||
|
```go
|
||||||
|
ctx = resolvemcp.WithSchema(ctx, "tenant_a")
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Adding Custom MCP Tools
|
||||||
|
|
||||||
|
Access the underlying `*server.MCPServer` to register additional tools:
|
||||||
|
|
||||||
|
```go
|
||||||
|
mcpServer := handler.MCPServer()
|
||||||
|
mcpServer.AddTool(myTool, myHandler)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table Name Resolution
|
||||||
|
|
||||||
|
The handler resolves table names in priority order:
|
||||||
|
|
||||||
|
1. `TableNameProvider` interface — `TableName() string` (can return `"schema.table"`)
|
||||||
|
2. `SchemaProvider` interface — `SchemaName() string` (combined with entity name)
|
||||||
|
3. Fallback: `schema.entity` (or `schema_entity` for SQLite)
|
||||||
71
pkg/resolvemcp/context.go
Normal file
71
pkg/resolvemcp/context.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
package resolvemcp
|
||||||
|
|
||||||
|
import "context"
|
||||||
|
|
||||||
|
type contextKey string
|
||||||
|
|
||||||
|
const (
|
||||||
|
contextKeySchema contextKey = "schema"
|
||||||
|
contextKeyEntity contextKey = "entity"
|
||||||
|
contextKeyTableName contextKey = "tableName"
|
||||||
|
contextKeyModel contextKey = "model"
|
||||||
|
contextKeyModelPtr contextKey = "modelPtr"
|
||||||
|
)
|
||||||
|
|
||||||
|
func WithSchema(ctx context.Context, schema string) context.Context {
|
||||||
|
return context.WithValue(ctx, contextKeySchema, schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetSchema(ctx context.Context) string {
|
||||||
|
if v := ctx.Value(contextKeySchema); v != nil {
|
||||||
|
return v.(string)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithEntity(ctx context.Context, entity string) context.Context {
|
||||||
|
return context.WithValue(ctx, contextKeyEntity, entity)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetEntity(ctx context.Context) string {
|
||||||
|
if v := ctx.Value(contextKeyEntity); v != nil {
|
||||||
|
return v.(string)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithTableName(ctx context.Context, tableName string) context.Context {
|
||||||
|
return context.WithValue(ctx, contextKeyTableName, tableName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetTableName(ctx context.Context) string {
|
||||||
|
if v := ctx.Value(contextKeyTableName); v != nil {
|
||||||
|
return v.(string)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithModel(ctx context.Context, model interface{}) context.Context {
|
||||||
|
return context.WithValue(ctx, contextKeyModel, model)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetModel(ctx context.Context) interface{} {
|
||||||
|
return ctx.Value(contextKeyModel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithModelPtr(ctx context.Context, modelPtr interface{}) context.Context {
|
||||||
|
return context.WithValue(ctx, contextKeyModelPtr, modelPtr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetModelPtr(ctx context.Context) interface{} {
|
||||||
|
return ctx.Value(contextKeyModelPtr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func withRequestData(ctx context.Context, schema, entity, tableName string, model, modelPtr interface{}) context.Context {
|
||||||
|
ctx = WithSchema(ctx, schema)
|
||||||
|
ctx = WithEntity(ctx, entity)
|
||||||
|
ctx = WithTableName(ctx, tableName)
|
||||||
|
ctx = WithModel(ctx, model)
|
||||||
|
ctx = WithModelPtr(ctx, modelPtr)
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
161
pkg/resolvemcp/cursor.go
Normal file
161
pkg/resolvemcp/cursor.go
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
package resolvemcp
|
||||||
|
|
||||||
|
// Cursor-based pagination adapted from pkg/resolvespec/cursor.go.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
type cursorDirection int
|
||||||
|
|
||||||
|
const (
|
||||||
|
cursorForward cursorDirection = 1
|
||||||
|
cursorBackward cursorDirection = -1
|
||||||
|
)
|
||||||
|
|
||||||
|
// getCursorFilter generates a SQL EXISTS subquery for cursor-based pagination.
|
||||||
|
// expandJoins is an optional map[alias]string of JOIN clauses for join-column sort support.
|
||||||
|
func getCursorFilter(
|
||||||
|
tableName string,
|
||||||
|
pkName string,
|
||||||
|
modelColumns []string,
|
||||||
|
options common.RequestOptions,
|
||||||
|
expandJoins map[string]string,
|
||||||
|
) (string, error) {
|
||||||
|
fullTableName := tableName
|
||||||
|
if strings.Contains(tableName, ".") {
|
||||||
|
tableName = strings.SplitN(tableName, ".", 2)[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
cursorID, direction := getActiveCursor(options)
|
||||||
|
if cursorID == "" {
|
||||||
|
return "", fmt.Errorf("no cursor provided for table %s", tableName)
|
||||||
|
}
|
||||||
|
|
||||||
|
sortItems := options.Sort
|
||||||
|
if len(sortItems) == 0 {
|
||||||
|
return "", fmt.Errorf("no sort columns defined")
|
||||||
|
}
|
||||||
|
|
||||||
|
var whereClauses []string
|
||||||
|
joinSQL := ""
|
||||||
|
reverse := direction < 0
|
||||||
|
|
||||||
|
for _, s := range sortItems {
|
||||||
|
col := strings.Trim(strings.TrimSpace(s.Column), "()")
|
||||||
|
if col == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Split(col, ".")
|
||||||
|
field := strings.TrimSpace(parts[len(parts)-1])
|
||||||
|
prefix := strings.Join(parts[:len(parts)-1], ".")
|
||||||
|
|
||||||
|
desc := strings.EqualFold(s.Direction, "desc")
|
||||||
|
if reverse {
|
||||||
|
desc = !desc
|
||||||
|
}
|
||||||
|
|
||||||
|
cursorCol, targetCol, isJoin, err := resolveCursorColumn(field, prefix, tableName, modelColumns)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("Skipping invalid sort column %q: %v", col, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if isJoin {
|
||||||
|
if expandJoins != nil {
|
||||||
|
if joinClause, ok := expandJoins[prefix]; ok {
|
||||||
|
jSQL, cRef := rewriteCursorJoin(joinClause, tableName, prefix)
|
||||||
|
joinSQL = jSQL
|
||||||
|
cursorCol = cRef + "." + field
|
||||||
|
targetCol = prefix + "." + field
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cursorCol == "" {
|
||||||
|
logger.Warn("Skipping cursor sort column %q: join alias %q not in expandJoins", col, prefix)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
op := "<"
|
||||||
|
if desc {
|
||||||
|
op = ">"
|
||||||
|
}
|
||||||
|
whereClauses = append(whereClauses, fmt.Sprintf("%s %s %s", cursorCol, op, targetCol))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(whereClauses) == 0 {
|
||||||
|
return "", fmt.Errorf("no valid sort columns after filtering")
|
||||||
|
}
|
||||||
|
|
||||||
|
orSQL := buildCursorPriorityChain(whereClauses)
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM %s cursor_select
|
||||||
|
%s
|
||||||
|
WHERE cursor_select.%s = %s
|
||||||
|
AND (%s)
|
||||||
|
)`,
|
||||||
|
fullTableName,
|
||||||
|
joinSQL,
|
||||||
|
pkName,
|
||||||
|
cursorID,
|
||||||
|
orSQL,
|
||||||
|
)
|
||||||
|
|
||||||
|
return query, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getActiveCursor(options common.RequestOptions) (id string, direction cursorDirection) {
|
||||||
|
if options.CursorForward != "" {
|
||||||
|
return options.CursorForward, cursorForward
|
||||||
|
}
|
||||||
|
if options.CursorBackward != "" {
|
||||||
|
return options.CursorBackward, cursorBackward
|
||||||
|
}
|
||||||
|
return "", 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveCursorColumn(field, prefix, tableName string, modelColumns []string) (cursorCol, targetCol string, isJoin bool, err error) {
|
||||||
|
if strings.Contains(field, "->") {
|
||||||
|
return "cursor_select." + field, tableName + "." + field, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if modelColumns != nil {
|
||||||
|
for _, col := range modelColumns {
|
||||||
|
if strings.EqualFold(col, field) {
|
||||||
|
return "cursor_select." + field, tableName + "." + field, false, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return "cursor_select." + field, tableName + "." + field, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if prefix != "" && prefix != tableName {
|
||||||
|
return "", "", true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", "", false, fmt.Errorf("invalid column: %s", field)
|
||||||
|
}
|
||||||
|
|
||||||
|
func rewriteCursorJoin(joinClause, mainTable, alias string) (joinSQL, cursorAlias string) {
|
||||||
|
joinSQL = strings.ReplaceAll(joinClause, mainTable+".", "cursor_select.")
|
||||||
|
cursorAlias = "cursor_select_" + alias
|
||||||
|
joinSQL = strings.ReplaceAll(joinSQL, " "+alias+" ", " "+cursorAlias+" ")
|
||||||
|
joinSQL = strings.ReplaceAll(joinSQL, " "+alias+".", " "+cursorAlias+".")
|
||||||
|
return joinSQL, cursorAlias
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildCursorPriorityChain(clauses []string) string {
|
||||||
|
var or []string
|
||||||
|
for i := 0; i < len(clauses); i++ {
|
||||||
|
and := strings.Join(clauses[:i+1], "\n AND ")
|
||||||
|
or = append(or, "("+and+")")
|
||||||
|
}
|
||||||
|
return strings.Join(or, "\n OR ")
|
||||||
|
}
|
||||||
706
pkg/resolvemcp/handler.go
Normal file
706
pkg/resolvemcp/handler.go
Normal file
@@ -0,0 +1,706 @@
|
|||||||
|
package resolvemcp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/mark3labs/mcp-go/server"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/reflection"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler exposes registered database models as MCP tools and resources.
|
||||||
|
type Handler struct {
|
||||||
|
db common.Database
|
||||||
|
registry common.ModelRegistry
|
||||||
|
hooks *HookRegistry
|
||||||
|
mcpServer *server.MCPServer
|
||||||
|
config Config
|
||||||
|
name string
|
||||||
|
version string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandler creates a Handler with the given database, model registry, and config.
|
||||||
|
func NewHandler(db common.Database, registry common.ModelRegistry, cfg Config) *Handler {
|
||||||
|
return &Handler{
|
||||||
|
db: db,
|
||||||
|
registry: registry,
|
||||||
|
hooks: NewHookRegistry(),
|
||||||
|
mcpServer: server.NewMCPServer("resolvemcp", "1.0.0"),
|
||||||
|
config: cfg,
|
||||||
|
name: "resolvemcp",
|
||||||
|
version: "1.0.0",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the hook registry.
|
||||||
|
func (h *Handler) Hooks() *HookRegistry {
|
||||||
|
return h.hooks
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDatabase returns the underlying database.
|
||||||
|
func (h *Handler) GetDatabase() common.Database {
|
||||||
|
return h.db
|
||||||
|
}
|
||||||
|
|
||||||
|
// MCPServer returns the underlying MCP server, e.g. to add custom tools.
|
||||||
|
func (h *Handler) MCPServer() *server.MCPServer {
|
||||||
|
return h.mcpServer
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSEServer returns an http.Handler that serves MCP over SSE.
|
||||||
|
// Config.BasePath must be set. Config.BaseURL is used when set; if empty it is
|
||||||
|
// detected automatically from each incoming request.
|
||||||
|
func (h *Handler) SSEServer() http.Handler {
|
||||||
|
if h.config.BaseURL != "" {
|
||||||
|
return h.newSSEServer(h.config.BaseURL, h.config.BasePath)
|
||||||
|
}
|
||||||
|
return &dynamicSSEHandler{h: h}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newSSEServer creates a concrete *server.SSEServer for known baseURL and basePath values.
|
||||||
|
func (h *Handler) newSSEServer(baseURL, basePath string) *server.SSEServer {
|
||||||
|
return server.NewSSEServer(
|
||||||
|
h.mcpServer,
|
||||||
|
server.WithBaseURL(baseURL),
|
||||||
|
server.WithBasePath(basePath),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// dynamicSSEHandler detects BaseURL from each request and delegates to a cached
|
||||||
|
// *server.SSEServer per detected baseURL. Used when Config.BaseURL is empty.
|
||||||
|
type dynamicSSEHandler struct {
|
||||||
|
h *Handler
|
||||||
|
mu sync.Mutex
|
||||||
|
pool map[string]*server.SSEServer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *dynamicSSEHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
baseURL := requestBaseURL(r)
|
||||||
|
|
||||||
|
d.mu.Lock()
|
||||||
|
if d.pool == nil {
|
||||||
|
d.pool = make(map[string]*server.SSEServer)
|
||||||
|
}
|
||||||
|
s, ok := d.pool[baseURL]
|
||||||
|
if !ok {
|
||||||
|
s = d.h.newSSEServer(baseURL, d.h.config.BasePath)
|
||||||
|
d.pool[baseURL] = s
|
||||||
|
}
|
||||||
|
d.mu.Unlock()
|
||||||
|
|
||||||
|
s.ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// requestBaseURL builds the base URL from an incoming request.
|
||||||
|
// It honours the X-Forwarded-Proto header for deployments behind a proxy.
|
||||||
|
func requestBaseURL(r *http.Request) string {
|
||||||
|
scheme := "http"
|
||||||
|
if r.TLS != nil {
|
||||||
|
scheme = "https"
|
||||||
|
}
|
||||||
|
if proto := r.Header.Get("X-Forwarded-Proto"); proto != "" {
|
||||||
|
scheme = proto
|
||||||
|
}
|
||||||
|
return scheme + "://" + r.Host
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegisterModel registers a model and immediately exposes it as MCP tools and a resource.
|
||||||
|
func (h *Handler) RegisterModel(schema, entity string, model interface{}) error {
|
||||||
|
fullName := buildModelName(schema, entity)
|
||||||
|
if err := h.registry.RegisterModel(fullName, model); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
registerModelTools(h, schema, entity, model)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildModelName builds the registry key for a model (same format as resolvespec).
|
||||||
|
func buildModelName(schema, entity string) string {
|
||||||
|
if schema == "" {
|
||||||
|
return entity
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s.%s", schema, entity)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getTableName returns the fully qualified table name for a model.
|
||||||
|
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
||||||
|
schemaName, tableName := h.getSchemaAndTable(schema, entity, model)
|
||||||
|
if schemaName != "" {
|
||||||
|
if h.db.DriverName() == "sqlite" {
|
||||||
|
return fmt.Sprintf("%s_%s", schemaName, tableName)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s.%s", schemaName, tableName)
|
||||||
|
}
|
||||||
|
return tableName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) getSchemaAndTable(defaultSchema, entity string, model interface{}) (schema, table string) {
|
||||||
|
if tableProvider, ok := model.(common.TableNameProvider); ok {
|
||||||
|
tableName := tableProvider.TableName()
|
||||||
|
if idx := strings.LastIndex(tableName, "."); idx != -1 {
|
||||||
|
return tableName[:idx], tableName[idx+1:]
|
||||||
|
}
|
||||||
|
if schemaProvider, ok := model.(common.SchemaProvider); ok {
|
||||||
|
return schemaProvider.SchemaName(), tableName
|
||||||
|
}
|
||||||
|
return defaultSchema, tableName
|
||||||
|
}
|
||||||
|
if schemaProvider, ok := model.(common.SchemaProvider); ok {
|
||||||
|
return schemaProvider.SchemaName(), entity
|
||||||
|
}
|
||||||
|
return defaultSchema, entity
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeRead reads records from the database and returns raw data + metadata.
|
||||||
|
func (h *Handler) executeRead(ctx context.Context, schema, entity, id string, options common.RequestOptions) (interface{}, *common.Metadata, error) {
|
||||||
|
model, err := h.registry.GetModelByEntity(schema, entity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("model not found: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
unwrapped, err := common.ValidateAndUnwrapModel(model)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("invalid model: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
model = unwrapped.Model
|
||||||
|
modelType := unwrapped.ModelType
|
||||||
|
tableName := h.getTableName(schema, entity, model)
|
||||||
|
ctx = withRequestData(ctx, schema, entity, tableName, model, unwrapped.ModelPtr)
|
||||||
|
|
||||||
|
validator := common.NewColumnValidator(model)
|
||||||
|
options = validator.FilterRequestOptions(options)
|
||||||
|
|
||||||
|
// BeforeHandle hook
|
||||||
|
hookCtx := &HookContext{
|
||||||
|
Context: ctx,
|
||||||
|
Handler: h,
|
||||||
|
Schema: schema,
|
||||||
|
Entity: entity,
|
||||||
|
Model: model,
|
||||||
|
Operation: "read",
|
||||||
|
Options: options,
|
||||||
|
ID: id,
|
||||||
|
Tx: h.db,
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeHandle, hookCtx); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sliceType := reflect.SliceOf(reflect.PointerTo(modelType))
|
||||||
|
modelPtr := reflect.New(sliceType).Interface()
|
||||||
|
|
||||||
|
query := h.db.NewSelect().Model(modelPtr)
|
||||||
|
|
||||||
|
tempInstance := reflect.New(modelType).Interface()
|
||||||
|
if provider, ok := tempInstance.(common.TableNameProvider); !ok || provider.TableName() == "" {
|
||||||
|
query = query.Table(tableName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Column selection
|
||||||
|
if len(options.Columns) == 0 && len(options.ComputedColumns) > 0 {
|
||||||
|
options.Columns = reflection.GetSQLModelColumns(model)
|
||||||
|
}
|
||||||
|
for _, col := range options.Columns {
|
||||||
|
query = query.Column(reflection.ExtractSourceColumn(col))
|
||||||
|
}
|
||||||
|
for _, cu := range options.ComputedColumns {
|
||||||
|
query = query.ColumnExpr(fmt.Sprintf("(%s) AS %s", cu.Expression, cu.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Preloads
|
||||||
|
if len(options.Preload) > 0 {
|
||||||
|
var err error
|
||||||
|
query, err = h.applyPreloads(model, query, options.Preload)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to apply preloads: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filters
|
||||||
|
query = h.applyFilters(query, options.Filters)
|
||||||
|
|
||||||
|
// Custom operators
|
||||||
|
for _, customOp := range options.CustomOperators {
|
||||||
|
query = query.Where(customOp.SQL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sorting
|
||||||
|
for _, sort := range options.Sort {
|
||||||
|
direction := "ASC"
|
||||||
|
if strings.EqualFold(sort.Direction, "desc") {
|
||||||
|
direction = "DESC"
|
||||||
|
}
|
||||||
|
query = query.Order(fmt.Sprintf("%s %s", sort.Column, direction))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cursor pagination
|
||||||
|
if options.CursorForward != "" || options.CursorBackward != "" {
|
||||||
|
pkName := reflection.GetPrimaryKeyName(model)
|
||||||
|
modelColumns := reflection.GetModelColumns(model)
|
||||||
|
|
||||||
|
if len(options.Sort) == 0 {
|
||||||
|
options.Sort = []common.SortOption{{Column: pkName, Direction: "ASC"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
// expandJoins is empty for resolvemcp — no custom SQL join support yet
|
||||||
|
cursorFilter, err := getCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("cursor error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cursorFilter != "" {
|
||||||
|
sanitized := common.SanitizeWhereClause(cursorFilter, reflection.ExtractTableNameOnly(tableName), &options)
|
||||||
|
sanitized = common.EnsureOuterParentheses(sanitized)
|
||||||
|
if sanitized != "" {
|
||||||
|
query = query.Where(sanitized)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count
|
||||||
|
total, err := query.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("error counting records: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pagination
|
||||||
|
if options.Limit != nil && *options.Limit > 0 {
|
||||||
|
query = query.Limit(*options.Limit)
|
||||||
|
}
|
||||||
|
if options.Offset != nil && *options.Offset > 0 {
|
||||||
|
query = query.Offset(*options.Offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeforeRead hook
|
||||||
|
hookCtx.Query = query
|
||||||
|
if err := h.hooks.Execute(BeforeRead, hookCtx); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var data interface{}
|
||||||
|
if id != "" {
|
||||||
|
singleResult := reflect.New(modelType).Interface()
|
||||||
|
pkName := reflection.GetPrimaryKeyName(singleResult)
|
||||||
|
query = query.Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), id)
|
||||||
|
if err := query.Scan(ctx, singleResult); err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil, nil, fmt.Errorf("record not found")
|
||||||
|
}
|
||||||
|
return nil, nil, fmt.Errorf("query error: %w", err)
|
||||||
|
}
|
||||||
|
data = singleResult
|
||||||
|
} else {
|
||||||
|
if err := query.Scan(ctx, modelPtr); err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("query error: %w", err)
|
||||||
|
}
|
||||||
|
data = reflect.ValueOf(modelPtr).Elem().Interface()
|
||||||
|
}
|
||||||
|
|
||||||
|
limit := 0
|
||||||
|
offset := 0
|
||||||
|
if options.Limit != nil {
|
||||||
|
limit = *options.Limit
|
||||||
|
}
|
||||||
|
if options.Offset != nil {
|
||||||
|
offset = *options.Offset
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count is the number of records in this page, not the total.
|
||||||
|
var pageCount int64
|
||||||
|
if id != "" {
|
||||||
|
pageCount = 1
|
||||||
|
} else {
|
||||||
|
pageCount = int64(reflect.ValueOf(data).Len())
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata := &common.Metadata{
|
||||||
|
Total: int64(total),
|
||||||
|
Filtered: int64(total),
|
||||||
|
Count: pageCount,
|
||||||
|
Limit: limit,
|
||||||
|
Offset: offset,
|
||||||
|
}
|
||||||
|
|
||||||
|
// AfterRead hook
|
||||||
|
hookCtx.Result = data
|
||||||
|
if err := h.hooks.Execute(AfterRead, hookCtx); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, metadata, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeCreate inserts one or more records.
|
||||||
|
func (h *Handler) executeCreate(ctx context.Context, schema, entity string, data interface{}) (interface{}, error) {
|
||||||
|
model, err := h.registry.GetModelByEntity(schema, entity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("model not found: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := common.ValidateAndUnwrapModel(model)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid model: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
model = result.Model
|
||||||
|
tableName := h.getTableName(schema, entity, model)
|
||||||
|
ctx = withRequestData(ctx, schema, entity, tableName, model, result.ModelPtr)
|
||||||
|
|
||||||
|
hookCtx := &HookContext{
|
||||||
|
Context: ctx,
|
||||||
|
Handler: h,
|
||||||
|
Schema: schema,
|
||||||
|
Entity: entity,
|
||||||
|
Model: model,
|
||||||
|
Operation: "create",
|
||||||
|
Data: data,
|
||||||
|
Tx: h.db,
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeHandle, hookCtx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeCreate, hookCtx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use potentially modified data
|
||||||
|
data = hookCtx.Data
|
||||||
|
|
||||||
|
switch v := data.(type) {
|
||||||
|
case map[string]interface{}:
|
||||||
|
query := h.db.NewInsert().Table(tableName)
|
||||||
|
for key, value := range v {
|
||||||
|
query = query.Value(key, value)
|
||||||
|
}
|
||||||
|
if _, err := query.Exec(ctx); err != nil {
|
||||||
|
return nil, fmt.Errorf("create error: %w", err)
|
||||||
|
}
|
||||||
|
hookCtx.Result = v
|
||||||
|
if err := h.hooks.Execute(AfterCreate, hookCtx); err != nil {
|
||||||
|
return nil, fmt.Errorf("AfterCreate hook failed: %w", err)
|
||||||
|
}
|
||||||
|
return v, nil
|
||||||
|
|
||||||
|
case []interface{}:
|
||||||
|
results := make([]interface{}, 0, len(v))
|
||||||
|
err := h.db.RunInTransaction(ctx, func(tx common.Database) error {
|
||||||
|
for _, item := range v {
|
||||||
|
itemMap, ok := item.(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("each item must be an object")
|
||||||
|
}
|
||||||
|
q := tx.NewInsert().Table(tableName)
|
||||||
|
for key, value := range itemMap {
|
||||||
|
q = q.Value(key, value)
|
||||||
|
}
|
||||||
|
if _, err := q.Exec(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
results = append(results, item)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("batch create error: %w", err)
|
||||||
|
}
|
||||||
|
hookCtx.Result = results
|
||||||
|
if err := h.hooks.Execute(AfterCreate, hookCtx); err != nil {
|
||||||
|
return nil, fmt.Errorf("AfterCreate hook failed: %w", err)
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("data must be an object or array of objects")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeUpdate updates a record by ID.
|
||||||
|
func (h *Handler) executeUpdate(ctx context.Context, schema, entity, id string, data interface{}) (interface{}, error) {
|
||||||
|
model, err := h.registry.GetModelByEntity(schema, entity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("model not found: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := common.ValidateAndUnwrapModel(model)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid model: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
model = result.Model
|
||||||
|
tableName := h.getTableName(schema, entity, model)
|
||||||
|
ctx = withRequestData(ctx, schema, entity, tableName, model, result.ModelPtr)
|
||||||
|
|
||||||
|
updates, ok := data.(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("data must be an object")
|
||||||
|
}
|
||||||
|
|
||||||
|
if id == "" {
|
||||||
|
if idVal, exists := updates["id"]; exists {
|
||||||
|
id = fmt.Sprintf("%v", idVal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if id == "" {
|
||||||
|
return nil, fmt.Errorf("update requires an ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
pkName := reflection.GetPrimaryKeyName(model)
|
||||||
|
|
||||||
|
var updateResult interface{}
|
||||||
|
err = h.db.RunInTransaction(ctx, func(tx common.Database) error {
|
||||||
|
// Read existing record
|
||||||
|
modelType := reflect.TypeOf(model)
|
||||||
|
if modelType.Kind() == reflect.Ptr {
|
||||||
|
modelType = modelType.Elem()
|
||||||
|
}
|
||||||
|
existingRecord := reflect.New(modelType).Interface()
|
||||||
|
selectQuery := tx.NewSelect().Model(existingRecord).Column("*").
|
||||||
|
Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), id)
|
||||||
|
|
||||||
|
if err := selectQuery.ScanModel(ctx); err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return fmt.Errorf("no records found to update")
|
||||||
|
}
|
||||||
|
return fmt.Errorf("error fetching existing record: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to map
|
||||||
|
existingMap := make(map[string]interface{})
|
||||||
|
jsonData, err := json.Marshal(existingRecord)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling existing record: %w", err)
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal(jsonData, &existingMap); err != nil {
|
||||||
|
return fmt.Errorf("error unmarshaling existing record: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hookCtx := &HookContext{
|
||||||
|
Context: ctx,
|
||||||
|
Handler: h,
|
||||||
|
Schema: schema,
|
||||||
|
Entity: entity,
|
||||||
|
Model: model,
|
||||||
|
Operation: "update",
|
||||||
|
ID: id,
|
||||||
|
Data: updates,
|
||||||
|
Tx: tx,
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeUpdate, hookCtx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if modifiedData, ok := hookCtx.Data.(map[string]interface{}); ok {
|
||||||
|
updates = modifiedData
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge non-nil, non-empty values
|
||||||
|
for key, newValue := range updates {
|
||||||
|
if newValue == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strVal, ok := newValue.(string); ok && strVal == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
existingMap[key] = newValue
|
||||||
|
}
|
||||||
|
|
||||||
|
q := tx.NewUpdate().Table(tableName).SetMap(existingMap).
|
||||||
|
Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), id)
|
||||||
|
res, err := q.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating record: %w", err)
|
||||||
|
}
|
||||||
|
if res.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("no records found to update")
|
||||||
|
}
|
||||||
|
|
||||||
|
updateResult = existingMap
|
||||||
|
hookCtx.Result = updateResult
|
||||||
|
return h.hooks.Execute(AfterUpdate, hookCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return updateResult, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeDelete deletes a record by ID.
|
||||||
|
func (h *Handler) executeDelete(ctx context.Context, schema, entity, id string) (interface{}, error) {
|
||||||
|
if id == "" {
|
||||||
|
return nil, fmt.Errorf("delete requires an ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
model, err := h.registry.GetModelByEntity(schema, entity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("model not found: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := common.ValidateAndUnwrapModel(model)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid model: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
model = result.Model
|
||||||
|
tableName := h.getTableName(schema, entity, model)
|
||||||
|
ctx = withRequestData(ctx, schema, entity, tableName, model, result.ModelPtr)
|
||||||
|
|
||||||
|
pkName := reflection.GetPrimaryKeyName(model)
|
||||||
|
|
||||||
|
hookCtx := &HookContext{
|
||||||
|
Context: ctx,
|
||||||
|
Handler: h,
|
||||||
|
Schema: schema,
|
||||||
|
Entity: entity,
|
||||||
|
Model: model,
|
||||||
|
Operation: "delete",
|
||||||
|
ID: id,
|
||||||
|
Tx: h.db,
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeHandle, hookCtx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
modelType := reflect.TypeOf(model)
|
||||||
|
if modelType.Kind() == reflect.Ptr {
|
||||||
|
modelType = modelType.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
var recordToDelete interface{}
|
||||||
|
|
||||||
|
err = h.db.RunInTransaction(ctx, func(tx common.Database) error {
|
||||||
|
record := reflect.New(modelType).Interface()
|
||||||
|
selectQuery := tx.NewSelect().Model(record).
|
||||||
|
Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), id)
|
||||||
|
if err := selectQuery.ScanModel(ctx); err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return fmt.Errorf("record not found")
|
||||||
|
}
|
||||||
|
return fmt.Errorf("error fetching record: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := tx.NewDelete().Table(tableName).
|
||||||
|
Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), id).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("delete error: %w", err)
|
||||||
|
}
|
||||||
|
if res.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("record not found or already deleted")
|
||||||
|
}
|
||||||
|
|
||||||
|
recordToDelete = record
|
||||||
|
hookCtx.Tx = tx
|
||||||
|
hookCtx.Result = record
|
||||||
|
return h.hooks.Execute(AfterDelete, hookCtx)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[resolvemcp] Deleted record %s from %s.%s", id, schema, entity)
|
||||||
|
return recordToDelete, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyFilters applies all filters with OR grouping logic.
|
||||||
|
func (h *Handler) applyFilters(query common.SelectQuery, filters []common.FilterOption) common.SelectQuery {
|
||||||
|
if len(filters) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for i < len(filters) {
|
||||||
|
startORGroup := i+1 < len(filters) && strings.EqualFold(filters[i+1].LogicOperator, "OR")
|
||||||
|
|
||||||
|
if startORGroup {
|
||||||
|
orGroup := []common.FilterOption{filters[i]}
|
||||||
|
j := i + 1
|
||||||
|
for j < len(filters) && strings.EqualFold(filters[j].LogicOperator, "OR") {
|
||||||
|
orGroup = append(orGroup, filters[j])
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
query = h.applyFilterGroup(query, orGroup)
|
||||||
|
i = j
|
||||||
|
} else {
|
||||||
|
condition, args := h.buildFilterCondition(filters[i])
|
||||||
|
if condition != "" {
|
||||||
|
query = query.Where(condition, args...)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) applyFilterGroup(query common.SelectQuery, filters []common.FilterOption) common.SelectQuery {
|
||||||
|
var conditions []string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
|
for _, filter := range filters {
|
||||||
|
condition, filterArgs := h.buildFilterCondition(filter)
|
||||||
|
if condition != "" {
|
||||||
|
conditions = append(conditions, condition)
|
||||||
|
args = append(args, filterArgs...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(conditions) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
if len(conditions) == 1 {
|
||||||
|
return query.Where(conditions[0], args...)
|
||||||
|
}
|
||||||
|
return query.Where("("+strings.Join(conditions, " OR ")+")", args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) buildFilterCondition(filter common.FilterOption) (string, []interface{}) {
|
||||||
|
switch filter.Operator {
|
||||||
|
case "eq", "=":
|
||||||
|
return fmt.Sprintf("%s = ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "neq", "!=", "<>":
|
||||||
|
return fmt.Sprintf("%s != ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "gt", ">":
|
||||||
|
return fmt.Sprintf("%s > ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "gte", ">=":
|
||||||
|
return fmt.Sprintf("%s >= ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "lt", "<":
|
||||||
|
return fmt.Sprintf("%s < ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "lte", "<=":
|
||||||
|
return fmt.Sprintf("%s <= ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "like":
|
||||||
|
return fmt.Sprintf("%s LIKE ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "ilike":
|
||||||
|
return fmt.Sprintf("%s ILIKE ?", filter.Column), []interface{}{filter.Value}
|
||||||
|
case "in":
|
||||||
|
condition, args := common.BuildInCondition(filter.Column, filter.Value)
|
||||||
|
return condition, args
|
||||||
|
case "is_null":
|
||||||
|
return fmt.Sprintf("%s IS NULL", filter.Column), nil
|
||||||
|
case "is_not_null":
|
||||||
|
return fmt.Sprintf("%s IS NOT NULL", filter.Column), nil
|
||||||
|
}
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) applyPreloads(model interface{}, query common.SelectQuery, preloads []common.PreloadOption) (common.SelectQuery, error) {
|
||||||
|
for _, preload := range preloads {
|
||||||
|
if preload.Relation == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
query = query.PreloadRelation(preload.Relation)
|
||||||
|
}
|
||||||
|
return query, nil
|
||||||
|
}
|
||||||
113
pkg/resolvemcp/hooks.go
Normal file
113
pkg/resolvemcp/hooks.go
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
package resolvemcp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HookType defines the type of hook to execute
|
||||||
|
type HookType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// BeforeHandle fires after model resolution, before operation dispatch.
|
||||||
|
BeforeHandle HookType = "before_handle"
|
||||||
|
|
||||||
|
BeforeRead HookType = "before_read"
|
||||||
|
AfterRead HookType = "after_read"
|
||||||
|
|
||||||
|
BeforeCreate HookType = "before_create"
|
||||||
|
AfterCreate HookType = "after_create"
|
||||||
|
|
||||||
|
BeforeUpdate HookType = "before_update"
|
||||||
|
AfterUpdate HookType = "after_update"
|
||||||
|
|
||||||
|
BeforeDelete HookType = "before_delete"
|
||||||
|
AfterDelete HookType = "after_delete"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HookContext contains all the data available to a hook
|
||||||
|
type HookContext struct {
|
||||||
|
Context context.Context
|
||||||
|
Handler *Handler
|
||||||
|
Schema string
|
||||||
|
Entity string
|
||||||
|
Model interface{}
|
||||||
|
Options common.RequestOptions
|
||||||
|
Operation string
|
||||||
|
ID string
|
||||||
|
Data interface{}
|
||||||
|
Result interface{}
|
||||||
|
Error error
|
||||||
|
Query common.SelectQuery
|
||||||
|
Abort bool
|
||||||
|
AbortMessage string
|
||||||
|
AbortCode int
|
||||||
|
Tx common.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
// HookFunc is the signature for hook functions
|
||||||
|
type HookFunc func(*HookContext) error
|
||||||
|
|
||||||
|
// HookRegistry manages all registered hooks
|
||||||
|
type HookRegistry struct {
|
||||||
|
hooks map[HookType][]HookFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHookRegistry() *HookRegistry {
|
||||||
|
return &HookRegistry{
|
||||||
|
hooks: make(map[HookType][]HookFunc),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HookRegistry) Register(hookType HookType, hook HookFunc) {
|
||||||
|
if r.hooks == nil {
|
||||||
|
r.hooks = make(map[HookType][]HookFunc)
|
||||||
|
}
|
||||||
|
r.hooks[hookType] = append(r.hooks[hookType], hook)
|
||||||
|
logger.Info("Registered resolvemcp hook for %s (total: %d)", hookType, len(r.hooks[hookType]))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HookRegistry) RegisterMultiple(hookTypes []HookType, hook HookFunc) {
|
||||||
|
for _, hookType := range hookTypes {
|
||||||
|
r.Register(hookType, hook)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HookRegistry) Execute(hookType HookType, ctx *HookContext) error {
|
||||||
|
hooks, exists := r.hooks[hookType]
|
||||||
|
if !exists || len(hooks) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Debug("Executing %d resolvemcp hook(s) for %s", len(hooks), hookType)
|
||||||
|
|
||||||
|
for i, hook := range hooks {
|
||||||
|
if err := hook(ctx); err != nil {
|
||||||
|
logger.Error("resolvemcp hook %d for %s failed: %v", i+1, hookType, err)
|
||||||
|
return fmt.Errorf("hook execution failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.Abort {
|
||||||
|
logger.Warn("resolvemcp hook %d for %s requested abort: %s", i+1, hookType, ctx.AbortMessage)
|
||||||
|
return fmt.Errorf("operation aborted by hook: %s", ctx.AbortMessage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HookRegistry) Clear(hookType HookType) {
|
||||||
|
delete(r.hooks, hookType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HookRegistry) ClearAll() {
|
||||||
|
r.hooks = make(map[HookType][]HookFunc)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HookRegistry) HasHooks(hookType HookType) bool {
|
||||||
|
hooks, exists := r.hooks[hookType]
|
||||||
|
return exists && len(hooks) > 0
|
||||||
|
}
|
||||||
100
pkg/resolvemcp/resolvemcp.go
Normal file
100
pkg/resolvemcp/resolvemcp.go
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
// Package resolvemcp exposes registered database models as Model Context Protocol (MCP) tools
|
||||||
|
// and resources over HTTP/SSE transport.
|
||||||
|
//
|
||||||
|
// It mirrors the resolvespec package patterns:
|
||||||
|
// - Same model registration API
|
||||||
|
// - Same filter, sort, cursor pagination, preload options
|
||||||
|
// - Same lifecycle hook system
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
//
|
||||||
|
// handler := resolvemcp.NewHandlerWithGORM(db, resolvemcp.Config{BaseURL: "http://localhost:8080"})
|
||||||
|
// handler.RegisterModel("public", "users", &User{})
|
||||||
|
//
|
||||||
|
// r := mux.NewRouter()
|
||||||
|
// resolvemcp.SetupMuxRoutes(r, handler)
|
||||||
|
package resolvemcp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/gorilla/mux"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
bunrouter "github.com/uptrace/bunrouter"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/common/adapters/database"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/modelregistry"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config holds configuration for the resolvemcp handler.
|
||||||
|
type Config struct {
|
||||||
|
// BaseURL is the public-facing base URL of the server (e.g. "http://localhost:8080").
|
||||||
|
// It is sent to MCP clients during the SSE handshake so they know where to POST messages.
|
||||||
|
BaseURL string
|
||||||
|
|
||||||
|
// BasePath is the URL path prefix where the MCP endpoints are mounted (e.g. "/mcp").
|
||||||
|
// If empty, the path is detected from each incoming request automatically.
|
||||||
|
BasePath string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandlerWithGORM creates a Handler backed by a GORM database connection.
|
||||||
|
func NewHandlerWithGORM(db *gorm.DB, cfg Config) *Handler {
|
||||||
|
return NewHandler(database.NewGormAdapter(db), modelregistry.NewModelRegistry(), cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandlerWithBun creates a Handler backed by a Bun database connection.
|
||||||
|
func NewHandlerWithBun(db *bun.DB, cfg Config) *Handler {
|
||||||
|
return NewHandler(database.NewBunAdapter(db), modelregistry.NewModelRegistry(), cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandlerWithDB creates a Handler using an existing common.Database and a new registry.
|
||||||
|
func NewHandlerWithDB(db common.Database, cfg Config) *Handler {
|
||||||
|
return NewHandler(db, modelregistry.NewModelRegistry(), cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetupMuxRoutes mounts the MCP HTTP/SSE endpoints on the given Gorilla Mux router
|
||||||
|
// using the base path from Config.BasePath (falls back to "/mcp" if empty).
|
||||||
|
//
|
||||||
|
// Two routes are registered:
|
||||||
|
// - GET {basePath}/sse — SSE connection endpoint (client subscribes here)
|
||||||
|
// - POST {basePath}/message — JSON-RPC message endpoint (client sends requests here)
|
||||||
|
//
|
||||||
|
// To protect these routes with authentication, wrap the mux router or apply middleware
|
||||||
|
// before calling SetupMuxRoutes.
|
||||||
|
func SetupMuxRoutes(muxRouter *mux.Router, handler *Handler) {
|
||||||
|
basePath := handler.config.BasePath
|
||||||
|
h := handler.SSEServer()
|
||||||
|
|
||||||
|
muxRouter.Handle(basePath+"/sse", h).Methods("GET", "OPTIONS")
|
||||||
|
muxRouter.Handle(basePath+"/message", h).Methods("POST", "OPTIONS")
|
||||||
|
|
||||||
|
// Convenience: also expose the full SSE server at basePath for clients that
|
||||||
|
// use ServeHTTP directly (e.g. net/http default mux).
|
||||||
|
muxRouter.PathPrefix(basePath).Handler(http.StripPrefix(basePath, h))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetupBunRouterRoutes mounts the MCP HTTP/SSE endpoints on a bunrouter router
|
||||||
|
// using the base path from Config.BasePath.
|
||||||
|
//
|
||||||
|
// Two routes are registered:
|
||||||
|
// - GET {basePath}/sse — SSE connection endpoint
|
||||||
|
// - POST {basePath}/message — JSON-RPC message endpoint
|
||||||
|
func SetupBunRouterRoutes(router *bunrouter.Router, handler *Handler) {
|
||||||
|
basePath := handler.config.BasePath
|
||||||
|
h := handler.SSEServer()
|
||||||
|
|
||||||
|
router.GET(basePath+"/sse", bunrouter.HTTPHandler(h))
|
||||||
|
router.POST(basePath+"/message", bunrouter.HTTPHandler(h))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSSEServer returns an http.Handler that serves MCP over SSE.
|
||||||
|
// If Config.BasePath is set it is used directly; otherwise the base path is
|
||||||
|
// detected from each incoming request (by stripping the "/sse" or "/message" suffix).
|
||||||
|
//
|
||||||
|
// h := resolvemcp.NewSSEServer(handler)
|
||||||
|
// http.Handle("/api/mcp/", h)
|
||||||
|
func NewSSEServer(handler *Handler) http.Handler {
|
||||||
|
return handler.SSEServer()
|
||||||
|
}
|
||||||
692
pkg/resolvemcp/tools.go
Normal file
692
pkg/resolvemcp/tools.go
Normal file
@@ -0,0 +1,692 @@
|
|||||||
|
package resolvemcp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mark3labs/mcp-go/mcp"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/reflection"
|
||||||
|
)
|
||||||
|
|
||||||
|
// toolName builds the MCP tool name for a given operation and model.
|
||||||
|
func toolName(operation, schema, entity string) string {
|
||||||
|
if schema == "" {
|
||||||
|
return fmt.Sprintf("%s_%s", operation, entity)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s_%s_%s", operation, schema, entity)
|
||||||
|
}
|
||||||
|
|
||||||
|
// registerModelTools registers the four CRUD tools and resource for a model.
|
||||||
|
func registerModelTools(h *Handler, schema, entity string, model interface{}) {
|
||||||
|
info := buildModelInfo(schema, entity, model)
|
||||||
|
registerReadTool(h, schema, entity, info)
|
||||||
|
registerCreateTool(h, schema, entity, info)
|
||||||
|
registerUpdateTool(h, schema, entity, info)
|
||||||
|
registerDeleteTool(h, schema, entity, info)
|
||||||
|
registerModelResource(h, schema, entity, info)
|
||||||
|
|
||||||
|
logger.Info("[resolvemcp] Registered MCP tools for %s", info.fullName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Model introspection
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// modelInfo holds pre-computed metadata for a model used in tool descriptions.
|
||||||
|
type modelInfo struct {
|
||||||
|
fullName string // e.g. "public.users"
|
||||||
|
pkName string // e.g. "id"
|
||||||
|
columns []columnInfo
|
||||||
|
relationNames []string
|
||||||
|
schemaDoc string // formatted multi-line schema listing
|
||||||
|
}
|
||||||
|
|
||||||
|
type columnInfo struct {
|
||||||
|
jsonName string
|
||||||
|
sqlName string
|
||||||
|
goType string
|
||||||
|
sqlType string
|
||||||
|
isPrimary bool
|
||||||
|
isUnique bool
|
||||||
|
isFK bool
|
||||||
|
nullable bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildModelInfo extracts column metadata and pre-builds the schema documentation string.
|
||||||
|
func buildModelInfo(schema, entity string, model interface{}) modelInfo {
|
||||||
|
info := modelInfo{
|
||||||
|
fullName: buildModelName(schema, entity),
|
||||||
|
pkName: reflection.GetPrimaryKeyName(model),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap to base struct type
|
||||||
|
modelType := reflect.TypeOf(model)
|
||||||
|
for modelType != nil && (modelType.Kind() == reflect.Ptr || modelType.Kind() == reflect.Slice) {
|
||||||
|
modelType = modelType.Elem()
|
||||||
|
}
|
||||||
|
if modelType == nil || modelType.Kind() != reflect.Struct {
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
details := reflection.GetModelColumnDetail(reflect.New(modelType).Elem())
|
||||||
|
|
||||||
|
for _, d := range details {
|
||||||
|
// Derive the JSON name from the struct field
|
||||||
|
jsonName := fieldJSONName(modelType, d.Name)
|
||||||
|
if jsonName == "" || jsonName == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip relation fields (slice or user-defined struct that isn't time.Time).
|
||||||
|
fieldType, found := modelType.FieldByName(d.Name)
|
||||||
|
if found {
|
||||||
|
ft := fieldType.Type
|
||||||
|
if ft.Kind() == reflect.Ptr {
|
||||||
|
ft = ft.Elem()
|
||||||
|
}
|
||||||
|
isUserStruct := ft.Kind() == reflect.Struct && ft.Name() != "Time" && ft.PkgPath() != ""
|
||||||
|
if ft.Kind() == reflect.Slice || isUserStruct {
|
||||||
|
info.relationNames = append(info.relationNames, jsonName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlName := d.SQLName
|
||||||
|
if sqlName == "" {
|
||||||
|
sqlName = jsonName
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Go type name, unwrapping pointer if needed.
|
||||||
|
goType := d.DataType
|
||||||
|
if goType == "" && found {
|
||||||
|
ft := fieldType.Type
|
||||||
|
for ft.Kind() == reflect.Ptr {
|
||||||
|
ft = ft.Elem()
|
||||||
|
}
|
||||||
|
goType = ft.Name()
|
||||||
|
}
|
||||||
|
|
||||||
|
// isPrimary: use both the GORM-tag detection and a name comparison against
|
||||||
|
// the known primary key (handles camelCase "primaryKey" tags correctly).
|
||||||
|
isPrimary := d.SQLKey == "primary_key" ||
|
||||||
|
(info.pkName != "" && (sqlName == info.pkName || jsonName == info.pkName))
|
||||||
|
|
||||||
|
ci := columnInfo{
|
||||||
|
jsonName: jsonName,
|
||||||
|
sqlName: sqlName,
|
||||||
|
goType: goType,
|
||||||
|
sqlType: d.SQLDataType,
|
||||||
|
isPrimary: isPrimary,
|
||||||
|
isUnique: d.SQLKey == "unique" || d.SQLKey == "uniqueindex",
|
||||||
|
isFK: d.SQLKey == "foreign_key",
|
||||||
|
nullable: d.Nullable,
|
||||||
|
}
|
||||||
|
info.columns = append(info.columns, ci)
|
||||||
|
}
|
||||||
|
|
||||||
|
info.schemaDoc = buildSchemaDoc(info)
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
// fieldJSONName returns the JSON tag name for a struct field, falling back to the field name.
|
||||||
|
func fieldJSONName(modelType reflect.Type, fieldName string) string {
|
||||||
|
field, ok := modelType.FieldByName(fieldName)
|
||||||
|
if !ok {
|
||||||
|
return fieldName
|
||||||
|
}
|
||||||
|
tag := field.Tag.Get("json")
|
||||||
|
if tag == "" {
|
||||||
|
return fieldName
|
||||||
|
}
|
||||||
|
parts := strings.SplitN(tag, ",", 2)
|
||||||
|
if parts[0] == "" {
|
||||||
|
return fieldName
|
||||||
|
}
|
||||||
|
return parts[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildSchemaDoc builds a human-readable column listing for inclusion in tool descriptions.
|
||||||
|
func buildSchemaDoc(info modelInfo) string {
|
||||||
|
if len(info.columns) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var sb strings.Builder
|
||||||
|
sb.WriteString("Columns:\n")
|
||||||
|
for _, c := range info.columns {
|
||||||
|
line := fmt.Sprintf(" • %s", c.jsonName)
|
||||||
|
|
||||||
|
typeDesc := c.goType
|
||||||
|
if c.sqlType != "" {
|
||||||
|
typeDesc = c.sqlType
|
||||||
|
}
|
||||||
|
if typeDesc != "" {
|
||||||
|
line += fmt.Sprintf(" (%s)", typeDesc)
|
||||||
|
}
|
||||||
|
|
||||||
|
var flags []string
|
||||||
|
if c.isPrimary {
|
||||||
|
flags = append(flags, "primary key")
|
||||||
|
}
|
||||||
|
if c.isUnique {
|
||||||
|
flags = append(flags, "unique")
|
||||||
|
}
|
||||||
|
if c.isFK {
|
||||||
|
flags = append(flags, "foreign key")
|
||||||
|
}
|
||||||
|
if !c.nullable && !c.isPrimary {
|
||||||
|
flags = append(flags, "not null")
|
||||||
|
} else if c.nullable {
|
||||||
|
flags = append(flags, "nullable")
|
||||||
|
}
|
||||||
|
if len(flags) > 0 {
|
||||||
|
line += " — " + strings.Join(flags, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(line + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(info.relationNames) > 0 {
|
||||||
|
sb.WriteString("Relations (preloadable): " + strings.Join(info.relationNames, ", ") + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// columnNameList returns a comma-separated list of JSON column names (for descriptions).
|
||||||
|
func columnNameList(cols []columnInfo) string {
|
||||||
|
names := make([]string, len(cols))
|
||||||
|
for i, c := range cols {
|
||||||
|
names[i] = c.jsonName
|
||||||
|
}
|
||||||
|
return strings.Join(names, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// writableColumnNames returns JSON names for all non-primary-key columns.
|
||||||
|
func writableColumnNames(cols []columnInfo) []string {
|
||||||
|
var names []string
|
||||||
|
for _, c := range cols {
|
||||||
|
if !c.isPrimary {
|
||||||
|
names = append(names, c.jsonName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Read tool
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func registerReadTool(h *Handler, schema, entity string, info modelInfo) {
|
||||||
|
name := toolName("read", schema, entity)
|
||||||
|
|
||||||
|
var descParts []string
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Read records from the '%s' database table.", info.fullName))
|
||||||
|
if info.pkName != "" {
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Primary key: '%s'. Pass it via 'id' to fetch a single record.", info.pkName))
|
||||||
|
}
|
||||||
|
if info.schemaDoc != "" {
|
||||||
|
descParts = append(descParts, info.schemaDoc)
|
||||||
|
}
|
||||||
|
descParts = append(descParts,
|
||||||
|
"Pagination: use 'limit'/'offset' for offset-based paging, or 'cursor_forward'/'cursor_backward' (pass the primary key value of the last/first record on the current page) for cursor-based paging.",
|
||||||
|
"Filtering: each filter object requires 'column' (JSON field name) and 'operator'. Supported operators: = != > < >= <= like ilike in is_null is_not_null. Combine with 'logic_operator': AND (default) or OR.",
|
||||||
|
"Sorting: each sort object requires 'column' and 'direction' (asc or desc).",
|
||||||
|
)
|
||||||
|
if len(info.relationNames) > 0 {
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Preloadable relations: %s. Pass relation name in 'preloads'.", strings.Join(info.relationNames, ", ")))
|
||||||
|
}
|
||||||
|
|
||||||
|
description := strings.Join(descParts, "\n\n")
|
||||||
|
|
||||||
|
filterDesc := `Array of filter objects. Example: [{"column":"status","operator":"=","value":"active"},{"column":"age","operator":">","value":18,"logic_operator":"AND"}]`
|
||||||
|
if len(info.columns) > 0 {
|
||||||
|
filterDesc += fmt.Sprintf(" Available columns: %s.", columnNameList(info.columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
sortDesc := `Array of sort objects. Example: [{"column":"created_at","direction":"desc"}]`
|
||||||
|
if len(info.columns) > 0 {
|
||||||
|
sortDesc += fmt.Sprintf(" Available columns: %s.", columnNameList(info.columns))
|
||||||
|
}
|
||||||
|
|
||||||
|
tool := mcp.NewTool(name,
|
||||||
|
mcp.WithDescription(description),
|
||||||
|
mcp.WithString("id",
|
||||||
|
mcp.Description(fmt.Sprintf("Primary key (%s) of a single record to fetch. Omit to return multiple records.", info.pkName)),
|
||||||
|
),
|
||||||
|
mcp.WithNumber("limit",
|
||||||
|
mcp.Description("Maximum number of records to return per page. Recommended: 10–100."),
|
||||||
|
),
|
||||||
|
mcp.WithNumber("offset",
|
||||||
|
mcp.Description("Number of records to skip (for offset-based pagination). Use with 'limit'."),
|
||||||
|
),
|
||||||
|
mcp.WithString("cursor_forward",
|
||||||
|
mcp.Description(fmt.Sprintf("Cursor for the next page: pass the '%s' value of the last record on the current page. Requires 'sort' to be set.", info.pkName)),
|
||||||
|
),
|
||||||
|
mcp.WithString("cursor_backward",
|
||||||
|
mcp.Description(fmt.Sprintf("Cursor for the previous page: pass the '%s' value of the first record on the current page. Requires 'sort' to be set.", info.pkName)),
|
||||||
|
),
|
||||||
|
mcp.WithArray("columns",
|
||||||
|
mcp.Description(fmt.Sprintf("Columns to include in the result. Omit to return all columns. Available: %s.", columnNameList(info.columns))),
|
||||||
|
),
|
||||||
|
mcp.WithArray("omit_columns",
|
||||||
|
mcp.Description(fmt.Sprintf("Columns to exclude from the result. Available: %s.", columnNameList(info.columns))),
|
||||||
|
),
|
||||||
|
mcp.WithArray("filters",
|
||||||
|
mcp.Description(filterDesc),
|
||||||
|
),
|
||||||
|
mcp.WithArray("sort",
|
||||||
|
mcp.Description(sortDesc),
|
||||||
|
),
|
||||||
|
mcp.WithArray("preloads",
|
||||||
|
mcp.Description(buildPreloadDesc(info)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
h.mcpServer.AddTool(tool, func(ctx context.Context, req mcp.CallToolRequest) (*mcp.CallToolResult, error) {
|
||||||
|
args := req.GetArguments()
|
||||||
|
id, _ := args["id"].(string)
|
||||||
|
options := parseRequestOptions(args)
|
||||||
|
|
||||||
|
data, metadata, err := h.executeRead(ctx, schema, entity, id, options)
|
||||||
|
if err != nil {
|
||||||
|
return mcp.NewToolResultError(err.Error()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalResult(map[string]interface{}{
|
||||||
|
"success": true,
|
||||||
|
"data": data,
|
||||||
|
"metadata": metadata,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildPreloadDesc(info modelInfo) string {
|
||||||
|
if len(info.relationNames) == 0 {
|
||||||
|
return `Array of relation preload objects. Each object: {"relation":"RelationName"}. No relations defined on this model.`
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`Array of relation preload objects. Each object: {"relation":"RelationName","columns":["col1","col2"]}. Available relations: %s.`,
|
||||||
|
strings.Join(info.relationNames, ", "),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Create tool
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func registerCreateTool(h *Handler, schema, entity string, info modelInfo) {
|
||||||
|
name := toolName("create", schema, entity)
|
||||||
|
|
||||||
|
writable := writableColumnNames(info.columns)
|
||||||
|
|
||||||
|
var descParts []string
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Create one or more new records in the '%s' table.", info.fullName))
|
||||||
|
if len(writable) > 0 {
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Writable fields: %s.", strings.Join(writable, ", ")))
|
||||||
|
}
|
||||||
|
if info.pkName != "" {
|
||||||
|
descParts = append(descParts, fmt.Sprintf("The primary key ('%s') is typically auto-generated — omit it unless you need to supply it explicitly.", info.pkName))
|
||||||
|
}
|
||||||
|
descParts = append(descParts,
|
||||||
|
"Pass a single JSON object to 'data' to create one record. Pass an array of objects to create multiple records in a single transaction (all succeed or all fail).",
|
||||||
|
)
|
||||||
|
if info.schemaDoc != "" {
|
||||||
|
descParts = append(descParts, info.schemaDoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
description := strings.Join(descParts, "\n\n")
|
||||||
|
|
||||||
|
dataDesc := "Record fields to create."
|
||||||
|
if len(writable) > 0 {
|
||||||
|
dataDesc += fmt.Sprintf(" Writable fields: %s.", strings.Join(writable, ", "))
|
||||||
|
}
|
||||||
|
dataDesc += " Pass a single object or an array of objects."
|
||||||
|
|
||||||
|
tool := mcp.NewTool(name,
|
||||||
|
mcp.WithDescription(description),
|
||||||
|
mcp.WithObject("data",
|
||||||
|
mcp.Description(dataDesc),
|
||||||
|
mcp.Required(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
h.mcpServer.AddTool(tool, func(ctx context.Context, req mcp.CallToolRequest) (*mcp.CallToolResult, error) {
|
||||||
|
args := req.GetArguments()
|
||||||
|
data, ok := args["data"]
|
||||||
|
if !ok {
|
||||||
|
return mcp.NewToolResultError("missing required argument: data"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := h.executeCreate(ctx, schema, entity, data)
|
||||||
|
if err != nil {
|
||||||
|
return mcp.NewToolResultError(err.Error()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalResult(map[string]interface{}{
|
||||||
|
"success": true,
|
||||||
|
"data": result,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Update tool
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func registerUpdateTool(h *Handler, schema, entity string, info modelInfo) {
|
||||||
|
name := toolName("update", schema, entity)
|
||||||
|
|
||||||
|
writable := writableColumnNames(info.columns)
|
||||||
|
|
||||||
|
var descParts []string
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Update an existing record in the '%s' table.", info.fullName))
|
||||||
|
if info.pkName != "" {
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Identify the record by its primary key ('%s') via the 'id' argument or by including '%s' inside 'data'.", info.pkName, info.pkName))
|
||||||
|
}
|
||||||
|
if len(writable) > 0 {
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Updatable fields: %s.", strings.Join(writable, ", ")))
|
||||||
|
}
|
||||||
|
descParts = append(descParts,
|
||||||
|
"Only non-null, non-empty fields in 'data' are applied — existing values are preserved for fields you omit. Returns the merged record as stored.",
|
||||||
|
)
|
||||||
|
if info.schemaDoc != "" {
|
||||||
|
descParts = append(descParts, info.schemaDoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
description := strings.Join(descParts, "\n\n")
|
||||||
|
|
||||||
|
idDesc := fmt.Sprintf("Primary key ('%s') of the record to update. Can also be included inside 'data'.", info.pkName)
|
||||||
|
|
||||||
|
dataDesc := "Fields to update (non-null, non-empty values are merged into the existing record)."
|
||||||
|
if len(writable) > 0 {
|
||||||
|
dataDesc += fmt.Sprintf(" Updatable fields: %s.", strings.Join(writable, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
tool := mcp.NewTool(name,
|
||||||
|
mcp.WithDescription(description),
|
||||||
|
mcp.WithString("id",
|
||||||
|
mcp.Description(idDesc),
|
||||||
|
),
|
||||||
|
mcp.WithObject("data",
|
||||||
|
mcp.Description(dataDesc),
|
||||||
|
mcp.Required(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
h.mcpServer.AddTool(tool, func(ctx context.Context, req mcp.CallToolRequest) (*mcp.CallToolResult, error) {
|
||||||
|
args := req.GetArguments()
|
||||||
|
id, _ := args["id"].(string)
|
||||||
|
|
||||||
|
data, ok := args["data"]
|
||||||
|
if !ok {
|
||||||
|
return mcp.NewToolResultError("missing required argument: data"), nil
|
||||||
|
}
|
||||||
|
dataMap, ok := data.(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return mcp.NewToolResultError("data must be an object"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := h.executeUpdate(ctx, schema, entity, id, dataMap)
|
||||||
|
if err != nil {
|
||||||
|
return mcp.NewToolResultError(err.Error()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalResult(map[string]interface{}{
|
||||||
|
"success": true,
|
||||||
|
"data": result,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Delete tool
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func registerDeleteTool(h *Handler, schema, entity string, info modelInfo) {
|
||||||
|
name := toolName("delete", schema, entity)
|
||||||
|
|
||||||
|
descParts := []string{
|
||||||
|
fmt.Sprintf("Delete a record from the '%s' table by its primary key.", info.fullName),
|
||||||
|
}
|
||||||
|
if info.pkName != "" {
|
||||||
|
descParts = append(descParts, fmt.Sprintf("Pass the '%s' value of the record to delete via the 'id' argument.", info.pkName))
|
||||||
|
}
|
||||||
|
descParts = append(descParts, "Returns the deleted record. This operation is irreversible.")
|
||||||
|
|
||||||
|
description := strings.Join(descParts, " ")
|
||||||
|
|
||||||
|
tool := mcp.NewTool(name,
|
||||||
|
mcp.WithDescription(description),
|
||||||
|
mcp.WithString("id",
|
||||||
|
mcp.Description(fmt.Sprintf("Primary key ('%s') of the record to delete.", info.pkName)),
|
||||||
|
mcp.Required(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
h.mcpServer.AddTool(tool, func(ctx context.Context, req mcp.CallToolRequest) (*mcp.CallToolResult, error) {
|
||||||
|
args := req.GetArguments()
|
||||||
|
id, _ := args["id"].(string)
|
||||||
|
|
||||||
|
result, err := h.executeDelete(ctx, schema, entity, id)
|
||||||
|
if err != nil {
|
||||||
|
return mcp.NewToolResultError(err.Error()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalResult(map[string]interface{}{
|
||||||
|
"success": true,
|
||||||
|
"data": result,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Resource registration
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func registerModelResource(h *Handler, schema, entity string, info modelInfo) {
|
||||||
|
resourceURI := info.fullName
|
||||||
|
|
||||||
|
var resourceDesc strings.Builder
|
||||||
|
fmt.Fprintf(&resourceDesc, "Database table: %s", info.fullName)
|
||||||
|
if info.pkName != "" {
|
||||||
|
fmt.Fprintf(&resourceDesc, " (primary key: %s)", info.pkName)
|
||||||
|
}
|
||||||
|
if info.schemaDoc != "" {
|
||||||
|
resourceDesc.WriteString("\n\n")
|
||||||
|
resourceDesc.WriteString(info.schemaDoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
resource := mcp.NewResource(
|
||||||
|
resourceURI,
|
||||||
|
entity,
|
||||||
|
mcp.WithResourceDescription(resourceDesc.String()),
|
||||||
|
mcp.WithMIMEType("application/json"),
|
||||||
|
)
|
||||||
|
|
||||||
|
h.mcpServer.AddResource(resource, func(ctx context.Context, req mcp.ReadResourceRequest) ([]mcp.ResourceContents, error) {
|
||||||
|
limit := 100
|
||||||
|
options := common.RequestOptions{Limit: &limit}
|
||||||
|
|
||||||
|
data, metadata, err := h.executeRead(ctx, schema, entity, "", options)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
payload := map[string]interface{}{
|
||||||
|
"data": data,
|
||||||
|
"metadata": metadata,
|
||||||
|
}
|
||||||
|
jsonBytes, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error marshaling resource: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return []mcp.ResourceContents{
|
||||||
|
mcp.TextResourceContents{
|
||||||
|
URI: req.Params.URI,
|
||||||
|
MIMEType: "application/json",
|
||||||
|
Text: string(jsonBytes),
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Argument parsing helpers
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// parseRequestOptions converts raw MCP tool arguments into common.RequestOptions.
|
||||||
|
func parseRequestOptions(args map[string]interface{}) common.RequestOptions {
|
||||||
|
options := common.RequestOptions{}
|
||||||
|
|
||||||
|
if v, ok := args["limit"]; ok {
|
||||||
|
switch n := v.(type) {
|
||||||
|
case float64:
|
||||||
|
limit := int(n)
|
||||||
|
options.Limit = &limit
|
||||||
|
case int:
|
||||||
|
options.Limit = &n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, ok := args["offset"]; ok {
|
||||||
|
switch n := v.(type) {
|
||||||
|
case float64:
|
||||||
|
offset := int(n)
|
||||||
|
options.Offset = &offset
|
||||||
|
case int:
|
||||||
|
options.Offset = &n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, ok := args["cursor_forward"].(string); ok {
|
||||||
|
options.CursorForward = v
|
||||||
|
}
|
||||||
|
if v, ok := args["cursor_backward"].(string); ok {
|
||||||
|
options.CursorBackward = v
|
||||||
|
}
|
||||||
|
|
||||||
|
options.Columns = parseStringArray(args["columns"])
|
||||||
|
options.OmitColumns = parseStringArray(args["omit_columns"])
|
||||||
|
options.Filters = parseFilters(args["filters"])
|
||||||
|
options.Sort = parseSortOptions(args["sort"])
|
||||||
|
options.Preload = parsePreloadOptions(args["preloads"])
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseStringArray(raw interface{}) []string {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
items, ok := raw.([]interface{})
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
result := make([]string, 0, len(items))
|
||||||
|
for _, item := range items {
|
||||||
|
if s, ok := item.(string); ok {
|
||||||
|
result = append(result, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseFilters(raw interface{}) []common.FilterOption {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
items, ok := raw.([]interface{})
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
result := make([]common.FilterOption, 0, len(items))
|
||||||
|
for _, item := range items {
|
||||||
|
b, err := json.Marshal(item)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var f common.FilterOption
|
||||||
|
if err := json.Unmarshal(b, &f); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if f.Column == "" || f.Operator == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.EqualFold(f.LogicOperator, "or") {
|
||||||
|
f.LogicOperator = "OR"
|
||||||
|
} else {
|
||||||
|
f.LogicOperator = "AND"
|
||||||
|
}
|
||||||
|
result = append(result, f)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseSortOptions(raw interface{}) []common.SortOption {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
items, ok := raw.([]interface{})
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
result := make([]common.SortOption, 0, len(items))
|
||||||
|
for _, item := range items {
|
||||||
|
b, err := json.Marshal(item)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var s common.SortOption
|
||||||
|
if err := json.Unmarshal(b, &s); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if s.Column == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result = append(result, s)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func parsePreloadOptions(raw interface{}) []common.PreloadOption {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
items, ok := raw.([]interface{})
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
result := make([]common.PreloadOption, 0, len(items))
|
||||||
|
for _, item := range items {
|
||||||
|
b, err := json.Marshal(item)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var p common.PreloadOption
|
||||||
|
if err := json.Unmarshal(b, &p); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if p.Relation == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result = append(result, p)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalResult marshals a value to JSON and returns it as an MCP text result.
|
||||||
|
func marshalResult(v interface{}) (*mcp.CallToolResult, error) {
|
||||||
|
b, err := json.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
return mcp.NewToolResultError(fmt.Sprintf("error marshaling result: %v", err)), nil
|
||||||
|
}
|
||||||
|
return mcp.NewToolResultText(string(b)), nil
|
||||||
|
}
|
||||||
572
pkg/resolvespec/EXAMPLES.md
Normal file
572
pkg/resolvespec/EXAMPLES.md
Normal file
@@ -0,0 +1,572 @@
|
|||||||
|
# ResolveSpec Query Features Examples
|
||||||
|
|
||||||
|
This document provides examples of using the advanced query features in ResolveSpec, including OR logic filters, Custom Operators, and FetchRowNumber.
|
||||||
|
|
||||||
|
## OR Logic in Filters (SearchOr)
|
||||||
|
|
||||||
|
### Basic OR Filter Example
|
||||||
|
|
||||||
|
Find all users with status "active" OR "pending":
|
||||||
|
|
||||||
|
```json
|
||||||
|
POST /users
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "active"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "pending",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Combined AND/OR Filters
|
||||||
|
|
||||||
|
Find users with (status="active" OR status="pending") AND age >= 18:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "active"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "pending",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "age",
|
||||||
|
"operator": "gte",
|
||||||
|
"value": 18
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL Generated:** `WHERE (status = 'active' OR status = 'pending') AND age >= 18`
|
||||||
|
|
||||||
|
**Important Notes:**
|
||||||
|
- By default, filters use AND logic
|
||||||
|
- Consecutive filters with `"logic_operator": "OR"` are automatically grouped with parentheses
|
||||||
|
- This grouping ensures OR conditions don't interfere with AND conditions
|
||||||
|
- You don't need to specify `"logic_operator": "AND"` as it's the default
|
||||||
|
|
||||||
|
### Multiple OR Groups
|
||||||
|
|
||||||
|
You can have multiple separate OR groups:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "active"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "pending",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "priority",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "high"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "priority",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "urgent",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL Generated:** `WHERE (status = 'active' OR status = 'pending') AND (priority = 'high' OR priority = 'urgent')`
|
||||||
|
|
||||||
|
## Custom Operators
|
||||||
|
|
||||||
|
### Simple Custom SQL Condition
|
||||||
|
|
||||||
|
Filter by email domain using custom SQL:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"customOperators": [
|
||||||
|
{
|
||||||
|
"name": "company_emails",
|
||||||
|
"sql": "email LIKE '%@company.com'"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multiple Custom Operators
|
||||||
|
|
||||||
|
Combine multiple custom SQL conditions:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"customOperators": [
|
||||||
|
{
|
||||||
|
"name": "recent_active",
|
||||||
|
"sql": "last_login > NOW() - INTERVAL '30 days'"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "high_score",
|
||||||
|
"sql": "score > 1000"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Complex Custom Operator
|
||||||
|
|
||||||
|
Use complex SQL expressions:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"customOperators": [
|
||||||
|
{
|
||||||
|
"name": "priority_users",
|
||||||
|
"sql": "(subscription = 'premium' AND points > 500) OR (subscription = 'enterprise')"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Combining Custom Operators with Regular Filters
|
||||||
|
|
||||||
|
Mix custom operators with standard filters:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "country",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "USA"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"customOperators": [
|
||||||
|
{
|
||||||
|
"name": "active_last_month",
|
||||||
|
"sql": "last_activity > NOW() - INTERVAL '1 month'"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Row Numbers
|
||||||
|
|
||||||
|
### Two Ways to Get Row Numbers
|
||||||
|
|
||||||
|
There are two different features for row numbers:
|
||||||
|
|
||||||
|
1. **`fetch_row_number`** - Get the position of ONE specific record in a sorted/filtered set
|
||||||
|
2. **`RowNumber` field in models** - Automatically number all records in the response
|
||||||
|
|
||||||
|
### 1. FetchRowNumber - Get Position of Specific Record
|
||||||
|
|
||||||
|
Get the rank/position of a specific user in a leaderboard. **Important:** When `fetch_row_number` is specified, the response contains **ONLY that specific record**, not all records.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"sort": [
|
||||||
|
{
|
||||||
|
"column": "score",
|
||||||
|
"direction": "desc"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetch_row_number": "12345"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response - Contains ONLY the specified user:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"data": {
|
||||||
|
"id": 12345,
|
||||||
|
"name": "Alice Smith",
|
||||||
|
"score": 9850,
|
||||||
|
"level": 42
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"total": 10000,
|
||||||
|
"count": 1,
|
||||||
|
"filtered": 10000,
|
||||||
|
"row_number": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result:** User "12345" is ranked #42 out of 10,000 users. The response includes only Alice's data, not the other 9,999 users.
|
||||||
|
|
||||||
|
### Row Number with Filters
|
||||||
|
|
||||||
|
Find position within a filtered subset (e.g., "What's my rank in my country?"):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "country",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "USA"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "active"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sort": [
|
||||||
|
{
|
||||||
|
"column": "score",
|
||||||
|
"direction": "desc"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetch_row_number": "12345"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"data": {
|
||||||
|
"id": 12345,
|
||||||
|
"name": "Bob Johnson",
|
||||||
|
"country": "USA",
|
||||||
|
"score": 7200,
|
||||||
|
"status": "active"
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"total": 2500,
|
||||||
|
"count": 1,
|
||||||
|
"filtered": 2500,
|
||||||
|
"row_number": 156
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result:** Bob is ranked #156 out of 2,500 active USA users. Only Bob's record is returned.
|
||||||
|
|
||||||
|
### 2. RowNumber Field - Auto-Number All Records
|
||||||
|
|
||||||
|
If your model has a `RowNumber int64` field, restheadspec will automatically populate it for paginated results.
|
||||||
|
|
||||||
|
**Model Definition:**
|
||||||
|
```go
|
||||||
|
type Player struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Score int64 `json:"score"`
|
||||||
|
RowNumber int64 `json:"row_number"` // Will be auto-populated
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Request (with pagination):**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"sort": [{"column": "score", "direction": "desc"}],
|
||||||
|
"limit": 10,
|
||||||
|
"offset": 20
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response - RowNumber automatically set:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"id": 456,
|
||||||
|
"name": "Player21",
|
||||||
|
"score": 8900,
|
||||||
|
"row_number": 21
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 789,
|
||||||
|
"name": "Player22",
|
||||||
|
"score": 8850,
|
||||||
|
"row_number": 22
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 123,
|
||||||
|
"name": "Player23",
|
||||||
|
"score": 8800,
|
||||||
|
"row_number": 23
|
||||||
|
}
|
||||||
|
// ... records 24-30 ...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**How It Works:**
|
||||||
|
- `row_number = offset + index + 1` (1-based)
|
||||||
|
- With offset=20, first record gets row_number=21
|
||||||
|
- With offset=20, second record gets row_number=22
|
||||||
|
- Perfect for displaying "Rank" in paginated tables
|
||||||
|
|
||||||
|
**Use Case:** Displaying leaderboards with rank numbers:
|
||||||
|
```
|
||||||
|
Rank | Player | Score
|
||||||
|
-----|-----------|-------
|
||||||
|
21 | Player21 | 8900
|
||||||
|
22 | Player22 | 8850
|
||||||
|
23 | Player23 | 8800
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** This feature is available in all three packages: resolvespec, restheadspec, and websocketspec.
|
||||||
|
|
||||||
|
### When to Use Each Feature
|
||||||
|
|
||||||
|
| Feature | Use Case | Returns | Performance |
|
||||||
|
|---------|----------|---------|-------------|
|
||||||
|
| `fetch_row_number` | "What's my rank?" | 1 record with position | Fast - 1 record |
|
||||||
|
| `RowNumber` field | "Show top 10 with ranks" | Many records numbered | Fast - simple math |
|
||||||
|
|
||||||
|
**Combined Example - Full Leaderboard UI:**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Request 1: Get current user's rank
|
||||||
|
const userRank = await api.read({
|
||||||
|
fetch_row_number: currentUserId,
|
||||||
|
sort: [{column: "score", direction: "desc"}]
|
||||||
|
});
|
||||||
|
// Returns: {id: 123, name: "You", score: 7500, row_number: 156}
|
||||||
|
|
||||||
|
// Request 2: Get top 10 with rank numbers
|
||||||
|
const top10 = await api.read({
|
||||||
|
sort: [{column: "score", direction: "desc"}],
|
||||||
|
limit: 10,
|
||||||
|
offset: 0
|
||||||
|
});
|
||||||
|
// Returns: [{row_number: 1, ...}, {row_number: 2, ...}, ...]
|
||||||
|
|
||||||
|
// Display:
|
||||||
|
// "Your Rank: #156"
|
||||||
|
// "Top Players:"
|
||||||
|
// "#1 - Alice - 9999"
|
||||||
|
// "#2 - Bob - 9876"
|
||||||
|
// ...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Complete Example: Advanced Query
|
||||||
|
|
||||||
|
Combine all features for a complex query:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"columns": ["id", "name", "email", "score", "status"],
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "active"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "trial",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "score",
|
||||||
|
"operator": "gte",
|
||||||
|
"value": 100
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"customOperators": [
|
||||||
|
{
|
||||||
|
"name": "recent_activity",
|
||||||
|
"sql": "last_login > NOW() - INTERVAL '7 days'"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "verified_email",
|
||||||
|
"sql": "email_verified = true"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sort": [
|
||||||
|
{
|
||||||
|
"column": "score",
|
||||||
|
"direction": "desc"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "created_at",
|
||||||
|
"direction": "asc"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetch_row_number": "12345",
|
||||||
|
"limit": 50,
|
||||||
|
"offset": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This query:
|
||||||
|
- Selects specific columns
|
||||||
|
- Filters for users with status "active" OR "trial"
|
||||||
|
- AND score >= 100
|
||||||
|
- Applies custom SQL conditions for recent activity and verified emails
|
||||||
|
- Sorts by score (descending) then creation date (ascending)
|
||||||
|
- Returns the row number of user "12345" in this filtered/sorted set
|
||||||
|
- Returns 50 records starting from the first one
|
||||||
|
|
||||||
|
## Use Cases
|
||||||
|
|
||||||
|
### 1. Leaderboards - Get Current User's Rank
|
||||||
|
|
||||||
|
Get the current user's position and data (returns only their record):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "game_id",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "game123"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sort": [
|
||||||
|
{
|
||||||
|
"column": "score",
|
||||||
|
"direction": "desc"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetch_row_number": "current_user_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Tip:** For full leaderboards, make two requests:
|
||||||
|
1. One with `fetch_row_number` to get user's rank
|
||||||
|
2. One with `limit` and `offset` to get top players list
|
||||||
|
|
||||||
|
### 2. Multi-Status Search
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "order_status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "pending"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "order_status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "processing",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "order_status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "shipped",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Advanced Date Filtering
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"customOperators": [
|
||||||
|
{
|
||||||
|
"name": "this_month",
|
||||||
|
"sql": "created_at >= DATE_TRUNC('month', CURRENT_DATE)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "business_hours",
|
||||||
|
"sql": "EXTRACT(HOUR FROM created_at) BETWEEN 9 AND 17"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Security Considerations
|
||||||
|
|
||||||
|
**Warning:** Custom operators allow raw SQL, which can be a security risk if not properly handled:
|
||||||
|
|
||||||
|
1. **Never** directly interpolate user input into custom operator SQL
|
||||||
|
2. Always validate and sanitize custom operator SQL on the backend
|
||||||
|
3. Consider using a whitelist of allowed custom operators
|
||||||
|
4. Use prepared statements or parameterized queries when possible
|
||||||
|
5. Implement proper authorization checks before executing queries
|
||||||
|
|
||||||
|
Example of safe custom operator handling in Go:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Whitelist of allowed custom operators
|
||||||
|
allowedOperators := map[string]string{
|
||||||
|
"recent_week": "created_at > NOW() - INTERVAL '7 days'",
|
||||||
|
"active_users": "status = 'active' AND last_login > NOW() - INTERVAL '30 days'",
|
||||||
|
"premium_only": "subscription_level = 'premium'",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate custom operators from request
|
||||||
|
for _, op := range req.Options.CustomOperators {
|
||||||
|
if sql, ok := allowedOperators[op.Name]; ok {
|
||||||
|
op.SQL = sql // Use whitelisted SQL
|
||||||
|
} else {
|
||||||
|
return errors.New("custom operator not allowed: " + op.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
@@ -214,6 +214,146 @@ Content-Type: application/json
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "active"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "pending",
|
||||||
|
"logic_operator": "OR"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "age",
|
||||||
|
"operator": "gte",
|
||||||
|
"value": 18
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Produces: `WHERE (status = 'active' OR status = 'pending') AND age >= 18`
|
||||||
|
|
||||||
|
This grouping ensures OR conditions don't interfere with other AND conditions in the query.
|
||||||
|
|
||||||
|
### Custom Operators
|
||||||
|
|
||||||
|
Add custom SQL conditions when needed:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"customOperators": [
|
||||||
|
{
|
||||||
|
"name": "email_domain_filter",
|
||||||
|
"sql": "LOWER(email) LIKE '%@example.com'"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "recent_records",
|
||||||
|
"sql": "created_at > NOW() - INTERVAL '7 days'"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Custom operators are applied as additional WHERE conditions to your query.
|
||||||
|
|
||||||
|
### Fetch Row Number
|
||||||
|
|
||||||
|
Get the row number (position) of a specific record in the filtered and sorted result set. **When `fetch_row_number` is specified, only that specific record is returned** (not all records).
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "active"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sort": [
|
||||||
|
{
|
||||||
|
"column": "score",
|
||||||
|
"direction": "desc"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetch_row_number": "12345"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response - Returns ONLY the specified record with its position:**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"data": {
|
||||||
|
"id": 12345,
|
||||||
|
"name": "John Doe",
|
||||||
|
"score": 850,
|
||||||
|
"status": "active"
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"total": 1000,
|
||||||
|
"count": 1,
|
||||||
|
"filtered": 1000,
|
||||||
|
"row_number": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Use Case:** Perfect for "Show me this user and their ranking" - you get just that one user with their position in the leaderboard.
|
||||||
|
|
||||||
|
**Note:** This is different from the `RowNumber` field feature, which automatically numbers all records in a paginated response based on offset. That feature uses simple math (`offset + index + 1`), while `fetch_row_number` uses SQL window functions to calculate the actual position in a sorted/filtered set. To use the `RowNumber` field feature, simply add a `RowNumber int64` field to your model - it will be automatically populated with the row position based on pagination.
|
||||||
|
|
||||||
|
## Preloading
|
||||||
|
|
||||||
|
Load related entities with custom configuration:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"operation": "read",
|
||||||
|
"options": {
|
||||||
|
"columns": ["id", "name", "email"],
|
||||||
|
"preload": [
|
||||||
|
{
|
||||||
|
"relation": "posts",
|
||||||
|
"columns": ["id", "title", "created_at"],
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"column": "status",
|
||||||
|
"operator": "eq",
|
||||||
|
"value": "published"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sort": [
|
||||||
|
{
|
||||||
|
"column": "created_at",
|
||||||
|
"direction": "desc"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"limit": 5
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"relation": "profile",
|
||||||
|
"columns": ["bio", "website"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cursor Pagination
|
||||||
|
|
||||||
|
Efficient pagination for large datasets:
|
||||||
|
|
||||||
### First Request (No Cursor)
|
### First Request (No Cursor)
|
||||||
|
|
||||||
```json
|
```json
|
||||||
@@ -427,7 +567,7 @@ Define virtual columns using SQL expressions:
|
|||||||
// Check permissions
|
// Check permissions
|
||||||
if !userHasPermission(ctx.Context, ctx.Entity) {
|
if !userHasPermission(ctx.Context, ctx.Entity) {
|
||||||
return fmt.Errorf("unauthorized access to %s", ctx.Entity)
|
return fmt.Errorf("unauthorized access to %s", ctx.Entity)
|
||||||
return nil
|
}
|
||||||
|
|
||||||
// Modify query options
|
// Modify query options
|
||||||
if ctx.Options.Limit == nil || *ctx.Options.Limit > 100 {
|
if ctx.Options.Limit == nil || *ctx.Options.Limit > 100 {
|
||||||
@@ -435,17 +575,24 @@ Add custom SQL conditions when needed:
|
|||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
users[i].Email = maskEmail(users[i].Email)
|
})
|
||||||
}
|
|
||||||
// Register an after-read hook (e.g., for data transformation)
|
// Register an after-read hook (e.g., for data transformation)
|
||||||
handler.Hooks().Register(resolvespec.AfterRead, func(ctx *resolvespec.HookContext) error {
|
handler.Hooks().Register(resolvespec.AfterRead, func(ctx *resolvespec.HookContext) error {
|
||||||
})
|
// Transform or filter results
|
||||||
|
if users, ok := ctx.Result.([]User); ok {
|
||||||
|
for i := range users {
|
||||||
|
users[i].Email = maskEmail(users[i].Email)
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
// Register a before-create hook (e.g., for validation)
|
// Register a before-create hook (e.g., for validation)
|
||||||
handler.Hooks().Register(resolvespec.BeforeCreate, func(ctx *resolvespec.HookContext) error {
|
handler.Hooks().Register(resolvespec.BeforeCreate, func(ctx *resolvespec.HookContext) error {
|
||||||
// Validate data
|
// Validate data
|
||||||
|
if user, ok := ctx.Data.(*User); ok {
|
||||||
|
if user.Email == "" {
|
||||||
return fmt.Errorf("email is required")
|
return fmt.Errorf("email is required")
|
||||||
}
|
}
|
||||||
// Add timestamps
|
// Add timestamps
|
||||||
@@ -497,6 +644,7 @@ handler.Hooks().Register(resolvespec.BeforeCreate, func(ctx *resolvespec.HookCon
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
Tags []Tag `json:"tags,omitempty" gorm:"many2many:post_tags"`
|
Tags []Tag `json:"tags,omitempty" gorm:"many2many:post_tags"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Schema.Table format
|
// Schema.Table format
|
||||||
handler.registry.RegisterModel("core.users", &User{})
|
handler.registry.RegisterModel("core.users", &User{})
|
||||||
handler.registry.RegisterModel("core.posts", &Post{})
|
handler.registry.RegisterModel("core.posts", &Post{})
|
||||||
@@ -507,11 +655,13 @@ handler.Hooks().Register(resolvespec.BeforeCreate, func(ctx *resolvespec.HookCon
|
|||||||
```go
|
```go
|
||||||
package main
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/resolvespec"
|
"github.com/bitechdev/ResolveSpec/pkg/resolvespec"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
|
"gorm.io/driver/postgres"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ const (
|
|||||||
// - pkName: primary key column (e.g. "id")
|
// - pkName: primary key column (e.g. "id")
|
||||||
// - modelColumns: optional list of valid main-table columns (for validation). Pass nil to skip.
|
// - modelColumns: optional list of valid main-table columns (for validation). Pass nil to skip.
|
||||||
// - options: the request options containing sort and cursor information
|
// - options: the request options containing sort and cursor information
|
||||||
|
// - expandJoins: optional map[alias]string of JOIN clauses for join-column sort support
|
||||||
//
|
//
|
||||||
// Returns SQL snippet to embed in WHERE clause.
|
// Returns SQL snippet to embed in WHERE clause.
|
||||||
func GetCursorFilter(
|
func GetCursorFilter(
|
||||||
@@ -31,8 +32,10 @@ func GetCursorFilter(
|
|||||||
pkName string,
|
pkName string,
|
||||||
modelColumns []string,
|
modelColumns []string,
|
||||||
options common.RequestOptions,
|
options common.RequestOptions,
|
||||||
|
expandJoins map[string]string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
// Remove schema prefix if present
|
// Separate schema prefix from bare table name
|
||||||
|
fullTableName := tableName
|
||||||
if strings.Contains(tableName, ".") {
|
if strings.Contains(tableName, ".") {
|
||||||
tableName = strings.SplitN(tableName, ".", 2)[1]
|
tableName = strings.SplitN(tableName, ".", 2)[1]
|
||||||
}
|
}
|
||||||
@@ -57,18 +60,19 @@ func GetCursorFilter(
|
|||||||
// 3. Prepare
|
// 3. Prepare
|
||||||
// --------------------------------------------------------------------- //
|
// --------------------------------------------------------------------- //
|
||||||
var whereClauses []string
|
var whereClauses []string
|
||||||
|
joinSQL := ""
|
||||||
reverse := direction < 0
|
reverse := direction < 0
|
||||||
|
|
||||||
// --------------------------------------------------------------------- //
|
// --------------------------------------------------------------------- //
|
||||||
// 4. Process each sort column
|
// 4. Process each sort column
|
||||||
// --------------------------------------------------------------------- //
|
// --------------------------------------------------------------------- //
|
||||||
for _, s := range sortItems {
|
for _, s := range sortItems {
|
||||||
col := strings.TrimSpace(s.Column)
|
col := strings.Trim(strings.TrimSpace(s.Column), "()")
|
||||||
if col == "" {
|
if col == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse: "created_at", "user.name", etc.
|
// Parse: "created_at", "user.name", "fn.sortorder", etc.
|
||||||
parts := strings.Split(col, ".")
|
parts := strings.Split(col, ".")
|
||||||
field := strings.TrimSpace(parts[len(parts)-1])
|
field := strings.TrimSpace(parts[len(parts)-1])
|
||||||
prefix := strings.Join(parts[:len(parts)-1], ".")
|
prefix := strings.Join(parts[:len(parts)-1], ".")
|
||||||
@@ -81,7 +85,7 @@ func GetCursorFilter(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Resolve column
|
// Resolve column
|
||||||
cursorCol, targetCol, err := resolveColumn(
|
cursorCol, targetCol, isJoin, err := resolveColumn(
|
||||||
field, prefix, tableName, modelColumns,
|
field, prefix, tableName, modelColumns,
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -89,6 +93,22 @@ func GetCursorFilter(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle joins
|
||||||
|
if isJoin {
|
||||||
|
if expandJoins != nil {
|
||||||
|
if joinClause, ok := expandJoins[prefix]; ok {
|
||||||
|
jSQL, cRef := rewriteJoin(joinClause, tableName, prefix)
|
||||||
|
joinSQL = jSQL
|
||||||
|
cursorCol = cRef + "." + field
|
||||||
|
targetCol = prefix + "." + field
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cursorCol == "" {
|
||||||
|
logger.Warn("Skipping cursor sort column %q: join alias %q not in expandJoins", col, prefix)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Build inequality
|
// Build inequality
|
||||||
op := "<"
|
op := "<"
|
||||||
if desc {
|
if desc {
|
||||||
@@ -112,10 +132,12 @@ func GetCursorFilter(
|
|||||||
query := fmt.Sprintf(`EXISTS (
|
query := fmt.Sprintf(`EXISTS (
|
||||||
SELECT 1
|
SELECT 1
|
||||||
FROM %s cursor_select
|
FROM %s cursor_select
|
||||||
|
%s
|
||||||
WHERE cursor_select.%s = %s
|
WHERE cursor_select.%s = %s
|
||||||
AND (%s)
|
AND (%s)
|
||||||
)`,
|
)`,
|
||||||
tableName,
|
fullTableName,
|
||||||
|
joinSQL,
|
||||||
pkName,
|
pkName,
|
||||||
cursorID,
|
cursorID,
|
||||||
orSQL,
|
orSQL,
|
||||||
@@ -136,35 +158,44 @@ func getActiveCursor(options common.RequestOptions) (id string, direction Cursor
|
|||||||
return "", 0
|
return "", 0
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper: resolve column (main table only for now)
|
// Helper: resolve column (main table or join)
|
||||||
func resolveColumn(
|
func resolveColumn(
|
||||||
field, prefix, tableName string,
|
field, prefix, tableName string,
|
||||||
modelColumns []string,
|
modelColumns []string,
|
||||||
) (cursorCol, targetCol string, err error) {
|
) (cursorCol, targetCol string, isJoin bool, err error) {
|
||||||
|
|
||||||
// JSON field
|
// JSON field
|
||||||
if strings.Contains(field, "->") {
|
if strings.Contains(field, "->") {
|
||||||
return "cursor_select." + field, tableName + "." + field, nil
|
return "cursor_select." + field, tableName + "." + field, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Main table column
|
// Main table column
|
||||||
if modelColumns != nil {
|
if modelColumns != nil {
|
||||||
for _, col := range modelColumns {
|
for _, col := range modelColumns {
|
||||||
if strings.EqualFold(col, field) {
|
if strings.EqualFold(col, field) {
|
||||||
return "cursor_select." + field, tableName + "." + field, nil
|
return "cursor_select." + field, tableName + "." + field, false, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No validation → allow all main-table fields
|
// No validation → allow all main-table fields
|
||||||
return "cursor_select." + field, tableName + "." + field, nil
|
return "cursor_select." + field, tableName + "." + field, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Joined column (not supported in resolvespec yet)
|
// Joined column
|
||||||
if prefix != "" && prefix != tableName {
|
if prefix != "" && prefix != tableName {
|
||||||
return "", "", fmt.Errorf("joined columns not supported in cursor pagination: %s", field)
|
return "", "", true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return "", "", fmt.Errorf("invalid column: %s", field)
|
return "", "", false, fmt.Errorf("invalid column: %s", field)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper: rewrite JOIN clause for cursor subquery
|
||||||
|
func rewriteJoin(joinClause, mainTable, alias string) (joinSQL, cursorAlias string) {
|
||||||
|
joinSQL = strings.ReplaceAll(joinClause, mainTable+".", "cursor_select.")
|
||||||
|
cursorAlias = "cursor_select_" + alias
|
||||||
|
joinSQL = strings.ReplaceAll(joinSQL, " "+alias+" ", " "+cursorAlias+" ")
|
||||||
|
joinSQL = strings.ReplaceAll(joinSQL, " "+alias+".", " "+cursorAlias+".")
|
||||||
|
return joinSQL, cursorAlias
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------- //
|
// ------------------------------------------------------------------------- //
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ func TestGetCursorFilter_Forward(t *testing.T) {
|
|||||||
pkName := "id"
|
pkName := "id"
|
||||||
modelColumns := []string{"id", "title", "created_at", "user_id"}
|
modelColumns := []string{"id", "title", "created_at", "user_id"}
|
||||||
|
|
||||||
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("GetCursorFilter failed: %v", err)
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
}
|
}
|
||||||
@@ -65,7 +65,7 @@ func TestGetCursorFilter_Backward(t *testing.T) {
|
|||||||
pkName := "id"
|
pkName := "id"
|
||||||
modelColumns := []string{"id", "title", "created_at", "user_id"}
|
modelColumns := []string{"id", "title", "created_at", "user_id"}
|
||||||
|
|
||||||
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("GetCursorFilter failed: %v", err)
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
}
|
}
|
||||||
@@ -96,7 +96,7 @@ func TestGetCursorFilter_NoCursor(t *testing.T) {
|
|||||||
pkName := "id"
|
pkName := "id"
|
||||||
modelColumns := []string{"id", "title", "created_at"}
|
modelColumns := []string{"id", "title", "created_at"}
|
||||||
|
|
||||||
_, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
_, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Error("Expected error when no cursor is provided")
|
t.Error("Expected error when no cursor is provided")
|
||||||
}
|
}
|
||||||
@@ -116,7 +116,7 @@ func TestGetCursorFilter_NoSort(t *testing.T) {
|
|||||||
pkName := "id"
|
pkName := "id"
|
||||||
modelColumns := []string{"id", "title"}
|
modelColumns := []string{"id", "title"}
|
||||||
|
|
||||||
_, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
_, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Error("Expected error when no sort columns are defined")
|
t.Error("Expected error when no sort columns are defined")
|
||||||
}
|
}
|
||||||
@@ -140,7 +140,7 @@ func TestGetCursorFilter_MultiColumnSort(t *testing.T) {
|
|||||||
pkName := "id"
|
pkName := "id"
|
||||||
modelColumns := []string{"id", "title", "priority", "created_at"}
|
modelColumns := []string{"id", "title", "priority", "created_at"}
|
||||||
|
|
||||||
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("GetCursorFilter failed: %v", err)
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
}
|
}
|
||||||
@@ -170,19 +170,50 @@ func TestGetCursorFilter_WithSchemaPrefix(t *testing.T) {
|
|||||||
pkName := "id"
|
pkName := "id"
|
||||||
modelColumns := []string{"id", "name", "email"}
|
modelColumns := []string{"id", "name", "email"}
|
||||||
|
|
||||||
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("GetCursorFilter failed: %v", err)
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Should handle schema prefix properly
|
// Should include full schema-qualified name in FROM clause
|
||||||
if !strings.Contains(filter, "users") {
|
if !strings.Contains(filter, "public.users") {
|
||||||
t.Errorf("Filter should reference table name users, got: %s", filter)
|
t.Errorf("Filter FROM clause should use schema-qualified name public.users, got: %s", filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Logf("Generated cursor filter with schema: %s", filter)
|
t.Logf("Generated cursor filter with schema: %s", filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetCursorFilter_LateralJoin(t *testing.T) {
|
||||||
|
lateralJoin := "inner join lateral (\nselect string_agg(a.name, '.') as sortorder\nfrom tree(account.rid_account) r\ninner join account a on a.id = r.id\n) fn on true"
|
||||||
|
|
||||||
|
options := common.RequestOptions{
|
||||||
|
Sort: []common.SortOption{{Column: "fn.sortorder", Direction: "ASC"}},
|
||||||
|
CursorForward: "8975",
|
||||||
|
}
|
||||||
|
|
||||||
|
tableName := "core.account"
|
||||||
|
pkName := "rid_account"
|
||||||
|
modelColumns := []string{"rid_account", "description", "pastelno"}
|
||||||
|
expandJoins := map[string]string{"fn": lateralJoin}
|
||||||
|
|
||||||
|
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options, expandJoins)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Generated lateral cursor filter: %s", filter)
|
||||||
|
|
||||||
|
if !strings.Contains(filter, "cursor_select_fn") {
|
||||||
|
t.Errorf("Filter should reference cursor_select_fn alias, got: %s", filter)
|
||||||
|
}
|
||||||
|
if !strings.Contains(filter, "sortorder") {
|
||||||
|
t.Errorf("Filter should reference sortorder column, got: %s", filter)
|
||||||
|
}
|
||||||
|
if strings.Contains(filter, " < ") || strings.Contains(filter, " > ") {
|
||||||
|
t.Errorf("Filter should not contain empty comparison operators, got: %s", filter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestGetActiveCursor(t *testing.T) {
|
func TestGetActiveCursor(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -288,18 +319,19 @@ func TestResolveColumn(t *testing.T) {
|
|||||||
wantErr: false,
|
wantErr: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Joined column (not supported)",
|
name: "Joined column (isJoin=true, no error)",
|
||||||
field: "name",
|
field: "name",
|
||||||
prefix: "user",
|
prefix: "user",
|
||||||
tableName: "posts",
|
tableName: "posts",
|
||||||
modelColumns: []string{"id", "title"},
|
modelColumns: []string{"id", "title"},
|
||||||
wantErr: true,
|
wantErr: false,
|
||||||
|
// cursorCol and targetCol are empty when isJoin=true; handled by caller
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
cursor, target, err := resolveColumn(tt.field, tt.prefix, tt.tableName, tt.modelColumns)
|
cursor, target, isJoin, err := resolveColumn(tt.field, tt.prefix, tt.tableName, tt.modelColumns)
|
||||||
|
|
||||||
if tt.wantErr {
|
if tt.wantErr {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@@ -312,6 +344,14 @@ func TestResolveColumn(t *testing.T) {
|
|||||||
t.Fatalf("Unexpected error: %v", err)
|
t.Fatalf("Unexpected error: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For join columns, cursor/target are empty and isJoin=true
|
||||||
|
if isJoin {
|
||||||
|
if cursor != "" || target != "" {
|
||||||
|
t.Errorf("Expected empty cursor/target for join column, got %q / %q", cursor, target)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if cursor != tt.wantCursor {
|
if cursor != tt.wantCursor {
|
||||||
t.Errorf("Expected cursor %q, got %q", tt.wantCursor, cursor)
|
t.Errorf("Expected cursor %q, got %q", tt.wantCursor, cursor)
|
||||||
}
|
}
|
||||||
@@ -362,7 +402,7 @@ func TestCursorFilter_SQL_Safety(t *testing.T) {
|
|||||||
pkName := "id"
|
pkName := "id"
|
||||||
modelColumns := []string{"id", "created_at"}
|
modelColumns := []string{"id", "created_at"}
|
||||||
|
|
||||||
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
filter, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("GetCursorFilter failed: %v", err)
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
143
pkg/resolvespec/filter_test.go
Normal file
143
pkg/resolvespec/filter_test.go
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
package resolvespec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestBuildFilterCondition tests the filter condition builder
|
||||||
|
func TestBuildFilterCondition(t *testing.T) {
|
||||||
|
h := &Handler{}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
filter common.FilterOption
|
||||||
|
expectedCondition string
|
||||||
|
expectedArgsCount int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Equal operator",
|
||||||
|
filter: common.FilterOption{
|
||||||
|
Column: "status",
|
||||||
|
Operator: "eq",
|
||||||
|
Value: "active",
|
||||||
|
},
|
||||||
|
expectedCondition: "status = ?",
|
||||||
|
expectedArgsCount: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Greater than operator",
|
||||||
|
filter: common.FilterOption{
|
||||||
|
Column: "age",
|
||||||
|
Operator: "gt",
|
||||||
|
Value: 18,
|
||||||
|
},
|
||||||
|
expectedCondition: "age > ?",
|
||||||
|
expectedArgsCount: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "IN operator",
|
||||||
|
filter: common.FilterOption{
|
||||||
|
Column: "status",
|
||||||
|
Operator: "in",
|
||||||
|
Value: []string{"active", "pending"},
|
||||||
|
},
|
||||||
|
expectedCondition: "status IN (?,?)",
|
||||||
|
expectedArgsCount: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "LIKE operator",
|
||||||
|
filter: common.FilterOption{
|
||||||
|
Column: "email",
|
||||||
|
Operator: "like",
|
||||||
|
Value: "%@example.com",
|
||||||
|
},
|
||||||
|
expectedCondition: "email LIKE ?",
|
||||||
|
expectedArgsCount: 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
condition, args := h.buildFilterCondition(tt.filter)
|
||||||
|
|
||||||
|
if condition != tt.expectedCondition {
|
||||||
|
t.Errorf("Expected condition '%s', got '%s'", tt.expectedCondition, condition)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(args) != tt.expectedArgsCount {
|
||||||
|
t.Errorf("Expected %d args, got %d", tt.expectedArgsCount, len(args))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Skip value comparison for slices as they can't be compared with ==
|
||||||
|
// The important part is that args are populated correctly
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestORGrouping tests that consecutive OR filters are properly grouped
|
||||||
|
func TestORGrouping(t *testing.T) {
|
||||||
|
// This is a conceptual test - in practice we'd need a mock SelectQuery
|
||||||
|
// to verify the actual SQL grouping behavior
|
||||||
|
t.Run("Consecutive OR filters should be grouped", func(t *testing.T) {
|
||||||
|
filters := []common.FilterOption{
|
||||||
|
{Column: "status", Operator: "eq", Value: "active"},
|
||||||
|
{Column: "status", Operator: "eq", Value: "pending", LogicOperator: "OR"},
|
||||||
|
{Column: "status", Operator: "eq", Value: "trial", LogicOperator: "OR"},
|
||||||
|
{Column: "age", Operator: "gte", Value: 18},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expected behavior: (status='active' OR status='pending' OR status='trial') AND age>=18
|
||||||
|
// The first three filters should be grouped together
|
||||||
|
// The fourth filter should be separate with AND
|
||||||
|
|
||||||
|
// Count OR groups
|
||||||
|
orGroupCount := 0
|
||||||
|
inORGroup := false
|
||||||
|
|
||||||
|
for i := 1; i < len(filters); i++ {
|
||||||
|
if strings.EqualFold(filters[i].LogicOperator, "OR") && !inORGroup {
|
||||||
|
orGroupCount++
|
||||||
|
inORGroup = true
|
||||||
|
} else if !strings.EqualFold(filters[i].LogicOperator, "OR") {
|
||||||
|
inORGroup = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We should have detected one OR group
|
||||||
|
if orGroupCount != 1 {
|
||||||
|
t.Errorf("Expected 1 OR group, detected %d", orGroupCount)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Multiple OR groups should be handled correctly", func(t *testing.T) {
|
||||||
|
filters := []common.FilterOption{
|
||||||
|
{Column: "status", Operator: "eq", Value: "active"},
|
||||||
|
{Column: "status", Operator: "eq", Value: "pending", LogicOperator: "OR"},
|
||||||
|
{Column: "priority", Operator: "eq", Value: "high"},
|
||||||
|
{Column: "priority", Operator: "eq", Value: "urgent", LogicOperator: "OR"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expected: (status='active' OR status='pending') AND (priority='high' OR priority='urgent')
|
||||||
|
// Should have two OR groups
|
||||||
|
|
||||||
|
orGroupCount := 0
|
||||||
|
inORGroup := false
|
||||||
|
|
||||||
|
for i := 1; i < len(filters); i++ {
|
||||||
|
if strings.EqualFold(filters[i].LogicOperator, "OR") && !inORGroup {
|
||||||
|
orGroupCount++
|
||||||
|
inORGroup = true
|
||||||
|
} else if !strings.EqualFold(filters[i].LogicOperator, "OR") {
|
||||||
|
inORGroup = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We should have detected two OR groups
|
||||||
|
if orGroupCount != 2 {
|
||||||
|
t.Errorf("Expected 2 OR groups, detected %d", orGroupCount)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -138,6 +138,26 @@ func (h *Handler) Handle(w common.ResponseWriter, r common.Request, params map[s
|
|||||||
validator := common.NewColumnValidator(model)
|
validator := common.NewColumnValidator(model)
|
||||||
req.Options = validator.FilterRequestOptions(req.Options)
|
req.Options = validator.FilterRequestOptions(req.Options)
|
||||||
|
|
||||||
|
// Execute BeforeHandle hook - auth check fires here, after model resolution
|
||||||
|
beforeCtx := &HookContext{
|
||||||
|
Context: ctx,
|
||||||
|
Handler: h,
|
||||||
|
Schema: schema,
|
||||||
|
Entity: entity,
|
||||||
|
Model: model,
|
||||||
|
Writer: w,
|
||||||
|
Request: r,
|
||||||
|
Operation: req.Operation,
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeHandle, beforeCtx); err != nil {
|
||||||
|
code := http.StatusUnauthorized
|
||||||
|
if beforeCtx.AbortCode != 0 {
|
||||||
|
code = beforeCtx.AbortCode
|
||||||
|
}
|
||||||
|
h.sendError(w, code, "unauthorized", beforeCtx.AbortMessage, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
switch req.Operation {
|
switch req.Operation {
|
||||||
case "read":
|
case "read":
|
||||||
h.handleRead(ctx, w, id, req.Options)
|
h.handleRead(ctx, w, id, req.Options)
|
||||||
@@ -280,10 +300,13 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply filters
|
// Apply filters with proper grouping for OR logic
|
||||||
for _, filter := range options.Filters {
|
query = h.applyFilters(query, options.Filters)
|
||||||
logger.Debug("Applying filter: %s %s %v", filter.Column, filter.Operator, filter.Value)
|
|
||||||
query = h.applyFilter(query, filter)
|
// Apply custom operators
|
||||||
|
for _, customOp := range options.CustomOperators {
|
||||||
|
logger.Debug("Applying custom operator: %s - %s", customOp.Name, customOp.SQL)
|
||||||
|
query = query.Where(customOp.SQL)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply sorting
|
// Apply sorting
|
||||||
@@ -306,8 +329,13 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
// Extract model columns for validation
|
// Extract model columns for validation
|
||||||
modelColumns := reflection.GetModelColumns(model)
|
modelColumns := reflection.GetModelColumns(model)
|
||||||
|
|
||||||
// Get cursor filter SQL
|
// Default sort to primary key when none provided
|
||||||
cursorFilter, err := GetCursorFilter(tableName, pkName, modelColumns, options)
|
if len(options.Sort) == 0 {
|
||||||
|
options.Sort = []common.SortOption{{Column: pkName, Direction: "ASC"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get cursor filter SQL (expandJoins is empty for resolvespec — no custom SQL join support yet)
|
||||||
|
cursorFilter, err := GetCursorFilter(tableName, pkName, modelColumns, options, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("Error building cursor filter: %v", err)
|
logger.Error("Error building cursor filter: %v", err)
|
||||||
h.sendError(w, http.StatusBadRequest, "cursor_error", "Invalid cursor pagination", err)
|
h.sendError(w, http.StatusBadRequest, "cursor_error", "Invalid cursor pagination", err)
|
||||||
@@ -381,7 +409,77 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply pagination
|
// Handle FetchRowNumber if requested
|
||||||
|
var rowNumber *int64
|
||||||
|
if options.FetchRowNumber != nil && *options.FetchRowNumber != "" {
|
||||||
|
logger.Debug("Fetching row number for ID: %s", *options.FetchRowNumber)
|
||||||
|
pkName := reflection.GetPrimaryKeyName(model)
|
||||||
|
|
||||||
|
// Build ROW_NUMBER window function SQL
|
||||||
|
rowNumberSQL := "ROW_NUMBER() OVER ("
|
||||||
|
if len(options.Sort) > 0 {
|
||||||
|
rowNumberSQL += "ORDER BY "
|
||||||
|
for i, sort := range options.Sort {
|
||||||
|
if i > 0 {
|
||||||
|
rowNumberSQL += ", "
|
||||||
|
}
|
||||||
|
direction := "ASC"
|
||||||
|
if strings.EqualFold(sort.Direction, "desc") {
|
||||||
|
direction = "DESC"
|
||||||
|
}
|
||||||
|
rowNumberSQL += fmt.Sprintf("%s %s", sort.Column, direction)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rowNumberSQL += ")"
|
||||||
|
|
||||||
|
// Create a query to fetch the row number using a subquery approach
|
||||||
|
// We'll select the PK and row_number, then filter by the target ID
|
||||||
|
type RowNumResult struct {
|
||||||
|
RowNum int64 `bun:"row_num"`
|
||||||
|
}
|
||||||
|
|
||||||
|
rowNumQuery := h.db.NewSelect().Table(tableName).
|
||||||
|
ColumnExpr(fmt.Sprintf("%s AS row_num", rowNumberSQL)).
|
||||||
|
Column(pkName)
|
||||||
|
|
||||||
|
// Apply the same filters as the main query
|
||||||
|
for _, filter := range options.Filters {
|
||||||
|
rowNumQuery = h.applyFilter(rowNumQuery, filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply custom operators
|
||||||
|
for _, customOp := range options.CustomOperators {
|
||||||
|
rowNumQuery = rowNumQuery.Where(customOp.SQL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter for the specific ID we want the row number for
|
||||||
|
rowNumQuery = rowNumQuery.Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), *options.FetchRowNumber)
|
||||||
|
|
||||||
|
// Execute query to get row number
|
||||||
|
var result RowNumResult
|
||||||
|
if err := rowNumQuery.Scan(ctx, &result); err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
// Build filter description for error message
|
||||||
|
filterInfo := fmt.Sprintf("filters: %d", len(options.Filters))
|
||||||
|
if len(options.CustomOperators) > 0 {
|
||||||
|
customOps := make([]string, 0, len(options.CustomOperators))
|
||||||
|
for _, op := range options.CustomOperators {
|
||||||
|
customOps = append(customOps, op.SQL)
|
||||||
|
}
|
||||||
|
filterInfo += fmt.Sprintf(", custom operators: [%s]", strings.Join(customOps, "; "))
|
||||||
|
}
|
||||||
|
logger.Warn("No row found for primary key %s=%s with %s", pkName, *options.FetchRowNumber, filterInfo)
|
||||||
|
} else {
|
||||||
|
logger.Warn("Error fetching row number: %v", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
rowNumber = &result.RowNum
|
||||||
|
logger.Debug("Found row number: %d", *rowNumber)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply pagination (skip if FetchRowNumber is set - we want only that record)
|
||||||
|
if options.FetchRowNumber == nil || *options.FetchRowNumber == "" {
|
||||||
if options.Limit != nil && *options.Limit > 0 {
|
if options.Limit != nil && *options.Limit > 0 {
|
||||||
logger.Debug("Applying limit: %d", *options.Limit)
|
logger.Debug("Applying limit: %d", *options.Limit)
|
||||||
query = query.Limit(*options.Limit)
|
query = query.Limit(*options.Limit)
|
||||||
@@ -390,15 +488,26 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
logger.Debug("Applying offset: %d", *options.Offset)
|
logger.Debug("Applying offset: %d", *options.Offset)
|
||||||
query = query.Offset(*options.Offset)
|
query = query.Offset(*options.Offset)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Execute query
|
// Execute query
|
||||||
var result interface{}
|
var result interface{}
|
||||||
|
if id != "" || (options.FetchRowNumber != nil && *options.FetchRowNumber != "") {
|
||||||
|
// Single record query - either by URL ID or FetchRowNumber
|
||||||
|
var targetID string
|
||||||
if id != "" {
|
if id != "" {
|
||||||
logger.Debug("Querying single record with ID: %s", id)
|
targetID = id
|
||||||
|
logger.Debug("Querying single record with URL ID: %s", id)
|
||||||
|
} else {
|
||||||
|
targetID = *options.FetchRowNumber
|
||||||
|
logger.Debug("Querying single record with FetchRowNumber ID: %s", targetID)
|
||||||
|
}
|
||||||
|
|
||||||
// For single record, create a new pointer to the struct type
|
// For single record, create a new pointer to the struct type
|
||||||
singleResult := reflect.New(modelType).Interface()
|
singleResult := reflect.New(modelType).Interface()
|
||||||
|
pkName := reflection.GetPrimaryKeyName(singleResult)
|
||||||
|
|
||||||
query = query.Where(fmt.Sprintf("%s = ?", common.QuoteIdent(reflection.GetPrimaryKeyName(singleResult))), id)
|
query = query.Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), targetID)
|
||||||
if err := query.Scan(ctx, singleResult); err != nil {
|
if err := query.Scan(ctx, singleResult); err != nil {
|
||||||
logger.Error("Error querying record: %v", err)
|
logger.Error("Error querying record: %v", err)
|
||||||
h.sendError(w, http.StatusInternalServerError, "query_error", "Error executing query", err)
|
h.sendError(w, http.StatusInternalServerError, "query_error", "Error executing query", err)
|
||||||
@@ -418,20 +527,39 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
|
|
||||||
logger.Info("Successfully retrieved records")
|
logger.Info("Successfully retrieved records")
|
||||||
|
|
||||||
|
// Build metadata
|
||||||
limit := 0
|
limit := 0
|
||||||
|
offset := 0
|
||||||
|
count := int64(total)
|
||||||
|
|
||||||
|
// When FetchRowNumber is used, we only return 1 record
|
||||||
|
if options.FetchRowNumber != nil && *options.FetchRowNumber != "" {
|
||||||
|
count = 1
|
||||||
|
// Set the fetched row number on the record
|
||||||
|
if rowNumber != nil {
|
||||||
|
logger.Debug("FetchRowNumber: Setting row number %d on record", *rowNumber)
|
||||||
|
h.setRowNumbersOnRecords(result, int(*rowNumber-1)) // -1 because setRowNumbersOnRecords adds 1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
if options.Limit != nil {
|
if options.Limit != nil {
|
||||||
limit = *options.Limit
|
limit = *options.Limit
|
||||||
}
|
}
|
||||||
offset := 0
|
|
||||||
if options.Offset != nil {
|
if options.Offset != nil {
|
||||||
offset = *options.Offset
|
offset = *options.Offset
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set row numbers on records if RowNumber field exists
|
||||||
|
// Only for multiple records (not when fetching single record)
|
||||||
|
h.setRowNumbersOnRecords(result, offset)
|
||||||
|
}
|
||||||
|
|
||||||
h.sendResponse(w, result, &common.Metadata{
|
h.sendResponse(w, result, &common.Metadata{
|
||||||
Total: int64(total),
|
Total: int64(total),
|
||||||
Filtered: int64(total),
|
Filtered: int64(total),
|
||||||
|
Count: count,
|
||||||
Limit: limit,
|
Limit: limit,
|
||||||
Offset: offset,
|
Offset: offset,
|
||||||
|
RowNumber: rowNumber,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1133,6 +1261,24 @@ func (h *Handler) handleDelete(ctx context.Context, w common.ResponseWriter, id
|
|||||||
|
|
||||||
logger.Info("Deleting records from %s.%s", schema, entity)
|
logger.Info("Deleting records from %s.%s", schema, entity)
|
||||||
|
|
||||||
|
// Execute BeforeDelete hooks (covers model-rule checks before any deletion)
|
||||||
|
hookCtx := &HookContext{
|
||||||
|
Context: ctx,
|
||||||
|
Handler: h,
|
||||||
|
Schema: schema,
|
||||||
|
Entity: entity,
|
||||||
|
Model: model,
|
||||||
|
ID: id,
|
||||||
|
Data: data,
|
||||||
|
Writer: w,
|
||||||
|
Tx: h.db,
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
||||||
|
logger.Error("BeforeDelete hook failed: %v", err)
|
||||||
|
h.sendError(w, http.StatusForbidden, "delete_forbidden", "Delete operation not allowed", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Handle batch delete from request data
|
// Handle batch delete from request data
|
||||||
if data != nil {
|
if data != nil {
|
||||||
switch v := data.(type) {
|
switch v := data.(type) {
|
||||||
@@ -1303,29 +1449,165 @@ func (h *Handler) handleDelete(ctx context.Context, w common.ResponseWriter, id
|
|||||||
h.sendResponse(w, recordToDelete, nil)
|
h.sendResponse(w, recordToDelete, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handler) applyFilter(query common.SelectQuery, filter common.FilterOption) common.SelectQuery {
|
// applyFilters applies all filters with proper grouping for OR logic
|
||||||
|
// Groups consecutive OR filters together to ensure proper query precedence
|
||||||
|
// Example: [A, B(OR), C(OR), D(AND)] => WHERE (A OR B OR C) AND D
|
||||||
|
func (h *Handler) applyFilters(query common.SelectQuery, filters []common.FilterOption) common.SelectQuery {
|
||||||
|
if len(filters) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for i < len(filters) {
|
||||||
|
// Check if this starts an OR group (current or next filter has OR logic)
|
||||||
|
startORGroup := i+1 < len(filters) && strings.EqualFold(filters[i+1].LogicOperator, "OR")
|
||||||
|
|
||||||
|
if startORGroup {
|
||||||
|
// Collect all consecutive filters that are OR'd together
|
||||||
|
orGroup := []common.FilterOption{filters[i]}
|
||||||
|
j := i + 1
|
||||||
|
for j < len(filters) && strings.EqualFold(filters[j].LogicOperator, "OR") {
|
||||||
|
orGroup = append(orGroup, filters[j])
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply the OR group as a single grouped WHERE clause
|
||||||
|
query = h.applyFilterGroup(query, orGroup)
|
||||||
|
i = j
|
||||||
|
} else {
|
||||||
|
// Single filter with AND logic (or first filter)
|
||||||
|
condition, args := h.buildFilterCondition(filters[i])
|
||||||
|
if condition != "" {
|
||||||
|
query = query.Where(condition, args...)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyFilterGroup applies a group of filters that should be OR'd together
|
||||||
|
// Always wraps them in parentheses and applies as a single WHERE clause
|
||||||
|
func (h *Handler) applyFilterGroup(query common.SelectQuery, filters []common.FilterOption) common.SelectQuery {
|
||||||
|
if len(filters) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build all conditions and collect args
|
||||||
|
var conditions []string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
|
for _, filter := range filters {
|
||||||
|
condition, filterArgs := h.buildFilterCondition(filter)
|
||||||
|
if condition != "" {
|
||||||
|
conditions = append(conditions, condition)
|
||||||
|
args = append(args, filterArgs...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(conditions) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// Single filter - no need for grouping
|
||||||
|
if len(conditions) == 1 {
|
||||||
|
return query.Where(conditions[0], args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Multiple conditions - group with parentheses and OR
|
||||||
|
groupedCondition := "(" + strings.Join(conditions, " OR ") + ")"
|
||||||
|
return query.Where(groupedCondition, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildFilterCondition builds a filter condition and returns it with args
|
||||||
|
func (h *Handler) buildFilterCondition(filter common.FilterOption) (conditionString string, conditionArgs []interface{}) {
|
||||||
|
var condition string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
switch filter.Operator {
|
switch filter.Operator {
|
||||||
case "eq":
|
case "eq", "=":
|
||||||
return query.Where(fmt.Sprintf("%s = ?", filter.Column), filter.Value)
|
condition = fmt.Sprintf("%s = ?", filter.Column)
|
||||||
case "neq":
|
args = []interface{}{filter.Value}
|
||||||
return query.Where(fmt.Sprintf("%s != ?", filter.Column), filter.Value)
|
case "neq", "!=", "<>":
|
||||||
case "gt":
|
condition = fmt.Sprintf("%s != ?", filter.Column)
|
||||||
return query.Where(fmt.Sprintf("%s > ?", filter.Column), filter.Value)
|
args = []interface{}{filter.Value}
|
||||||
case "gte":
|
case "gt", ">":
|
||||||
return query.Where(fmt.Sprintf("%s >= ?", filter.Column), filter.Value)
|
condition = fmt.Sprintf("%s > ?", filter.Column)
|
||||||
case "lt":
|
args = []interface{}{filter.Value}
|
||||||
return query.Where(fmt.Sprintf("%s < ?", filter.Column), filter.Value)
|
case "gte", ">=":
|
||||||
case "lte":
|
condition = fmt.Sprintf("%s >= ?", filter.Column)
|
||||||
return query.Where(fmt.Sprintf("%s <= ?", filter.Column), filter.Value)
|
args = []interface{}{filter.Value}
|
||||||
|
case "lt", "<":
|
||||||
|
condition = fmt.Sprintf("%s < ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "lte", "<=":
|
||||||
|
condition = fmt.Sprintf("%s <= ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
case "like":
|
case "like":
|
||||||
return query.Where(fmt.Sprintf("%s LIKE ?", filter.Column), filter.Value)
|
condition = fmt.Sprintf("%s LIKE ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
case "ilike":
|
case "ilike":
|
||||||
return query.Where(fmt.Sprintf("%s ILIKE ?", filter.Column), filter.Value)
|
condition = fmt.Sprintf("%s ILIKE ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
case "in":
|
case "in":
|
||||||
return query.Where(fmt.Sprintf("%s IN (?)", filter.Column), filter.Value)
|
condition, args = common.BuildInCondition(filter.Column, filter.Value)
|
||||||
|
if condition == "" {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return condition, args
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) applyFilter(query common.SelectQuery, filter common.FilterOption) common.SelectQuery {
|
||||||
|
// Determine which method to use based on LogicOperator
|
||||||
|
useOrLogic := strings.EqualFold(filter.LogicOperator, "OR")
|
||||||
|
|
||||||
|
var condition string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
|
switch filter.Operator {
|
||||||
|
case "eq", "=":
|
||||||
|
condition = fmt.Sprintf("%s = ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "neq", "!=", "<>":
|
||||||
|
condition = fmt.Sprintf("%s != ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "gt", ">":
|
||||||
|
condition = fmt.Sprintf("%s > ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "gte", ">=":
|
||||||
|
condition = fmt.Sprintf("%s >= ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "lt", "<":
|
||||||
|
condition = fmt.Sprintf("%s < ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "lte", "<=":
|
||||||
|
condition = fmt.Sprintf("%s <= ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "like":
|
||||||
|
condition = fmt.Sprintf("%s LIKE ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "ilike":
|
||||||
|
condition = fmt.Sprintf("%s ILIKE ?", filter.Column)
|
||||||
|
args = []interface{}{filter.Value}
|
||||||
|
case "in":
|
||||||
|
condition, args = common.BuildInCondition(filter.Column, filter.Value)
|
||||||
|
if condition == "" {
|
||||||
|
return query
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Apply filter with appropriate logic operator
|
||||||
|
if useOrLogic {
|
||||||
|
return query.WhereOr(condition, args...)
|
||||||
|
}
|
||||||
|
return query.Where(condition, args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseTableName splits a table name that may contain schema into separate schema and table
|
// parseTableName splits a table name that may contain schema into separate schema and table
|
||||||
@@ -1380,10 +1662,16 @@ func (h *Handler) getSchemaAndTable(defaultSchema, entity string, model interfac
|
|||||||
return schema, entity
|
return schema, entity
|
||||||
}
|
}
|
||||||
|
|
||||||
// getTableName returns the full table name including schema (schema.table)
|
// getTableName returns the full table name including schema.
|
||||||
|
// For most drivers the result is "schema.table". For SQLite, which does not
|
||||||
|
// support schema-qualified names, the schema and table are joined with an
|
||||||
|
// underscore: "schema_table".
|
||||||
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
||||||
schemaName, tableName := h.getSchemaAndTable(schema, entity, model)
|
schemaName, tableName := h.getSchemaAndTable(schema, entity, model)
|
||||||
if schemaName != "" {
|
if schemaName != "" {
|
||||||
|
if h.db.DriverName() == "sqlite" {
|
||||||
|
return fmt.Sprintf("%s_%s", schemaName, tableName)
|
||||||
|
}
|
||||||
return fmt.Sprintf("%s.%s", schemaName, tableName)
|
return fmt.Sprintf("%s.%s", schemaName, tableName)
|
||||||
}
|
}
|
||||||
return tableName
|
return tableName
|
||||||
@@ -1703,6 +1991,51 @@ func toSnakeCase(s string) string {
|
|||||||
return strings.ToLower(result.String())
|
return strings.ToLower(result.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// setRowNumbersOnRecords sets the RowNumber field on each record if it exists
|
||||||
|
// The row number is calculated as offset + index + 1 (1-based)
|
||||||
|
func (h *Handler) setRowNumbersOnRecords(records interface{}, offset int) {
|
||||||
|
// Get the reflect value of the records
|
||||||
|
recordsValue := reflect.ValueOf(records)
|
||||||
|
if recordsValue.Kind() == reflect.Ptr {
|
||||||
|
recordsValue = recordsValue.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure it's a slice
|
||||||
|
if recordsValue.Kind() != reflect.Slice {
|
||||||
|
logger.Debug("setRowNumbersOnRecords: records is not a slice, skipping")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iterate through each record
|
||||||
|
for i := 0; i < recordsValue.Len(); i++ {
|
||||||
|
record := recordsValue.Index(i)
|
||||||
|
|
||||||
|
// Dereference if it's a pointer
|
||||||
|
if record.Kind() == reflect.Ptr {
|
||||||
|
if record.IsNil() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
record = record.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure it's a struct
|
||||||
|
if record.Kind() != reflect.Struct {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to find and set the RowNumber field
|
||||||
|
rowNumberField := record.FieldByName("RowNumber")
|
||||||
|
if rowNumberField.IsValid() && rowNumberField.CanSet() {
|
||||||
|
// Check if the field is of type int64
|
||||||
|
if rowNumberField.Kind() == reflect.Int64 {
|
||||||
|
rowNum := int64(offset + i + 1)
|
||||||
|
rowNumberField.SetInt(rowNum)
|
||||||
|
logger.Debug("Set RowNumber=%d for record index %d", rowNum, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// HandleOpenAPI generates and returns the OpenAPI specification
|
// HandleOpenAPI generates and returns the OpenAPI specification
|
||||||
func (h *Handler) HandleOpenAPI(w common.ResponseWriter, r common.Request) {
|
func (h *Handler) HandleOpenAPI(w common.ResponseWriter, r common.Request) {
|
||||||
if h.openAPIGenerator == nil {
|
if h.openAPIGenerator == nil {
|
||||||
|
|||||||
@@ -12,6 +12,10 @@ import (
|
|||||||
type HookType string
|
type HookType string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// BeforeHandle fires after model resolution, before operation dispatch.
|
||||||
|
// Use this for auth checks that need model rules and user context simultaneously.
|
||||||
|
BeforeHandle HookType = "before_handle"
|
||||||
|
|
||||||
// Read operation hooks
|
// Read operation hooks
|
||||||
BeforeRead HookType = "before_read"
|
BeforeRead HookType = "before_read"
|
||||||
AfterRead HookType = "after_read"
|
AfterRead HookType = "after_read"
|
||||||
@@ -43,6 +47,9 @@ type HookContext struct {
|
|||||||
Writer common.ResponseWriter
|
Writer common.ResponseWriter
|
||||||
Request common.Request
|
Request common.Request
|
||||||
|
|
||||||
|
// Operation being dispatched (e.g. "read", "create", "update", "delete")
|
||||||
|
Operation string
|
||||||
|
|
||||||
// Operation-specific fields
|
// Operation-specific fields
|
||||||
ID string
|
ID string
|
||||||
Data interface{} // For create/update operations
|
Data interface{} // For create/update operations
|
||||||
|
|||||||
@@ -70,17 +70,17 @@ func SetupMuxRoutes(muxRouter *mux.Router, handler *Handler, authMiddleware Midd
|
|||||||
entityWithIDPath := buildRoutePath(schema, entity) + "/{id}"
|
entityWithIDPath := buildRoutePath(schema, entity) + "/{id}"
|
||||||
|
|
||||||
// Create handler functions for this specific entity
|
// Create handler functions for this specific entity
|
||||||
postEntityHandler := createMuxHandler(handler, schema, entity, "")
|
var postEntityHandler http.Handler = createMuxHandler(handler, schema, entity, "")
|
||||||
postEntityWithIDHandler := createMuxHandler(handler, schema, entity, "id")
|
var postEntityWithIDHandler http.Handler = createMuxHandler(handler, schema, entity, "id")
|
||||||
getEntityHandler := createMuxGetHandler(handler, schema, entity, "")
|
var getEntityHandler http.Handler = createMuxGetHandler(handler, schema, entity, "")
|
||||||
optionsEntityHandler := createMuxOptionsHandler(handler, schema, entity, []string{"GET", "POST", "OPTIONS"})
|
optionsEntityHandler := createMuxOptionsHandler(handler, schema, entity, []string{"GET", "POST", "OPTIONS"})
|
||||||
optionsEntityWithIDHandler := createMuxOptionsHandler(handler, schema, entity, []string{"POST", "OPTIONS"})
|
optionsEntityWithIDHandler := createMuxOptionsHandler(handler, schema, entity, []string{"POST", "OPTIONS"})
|
||||||
|
|
||||||
// Apply authentication middleware if provided
|
// Apply authentication middleware if provided
|
||||||
if authMiddleware != nil {
|
if authMiddleware != nil {
|
||||||
postEntityHandler = authMiddleware(postEntityHandler).(http.HandlerFunc)
|
postEntityHandler = authMiddleware(postEntityHandler)
|
||||||
postEntityWithIDHandler = authMiddleware(postEntityWithIDHandler).(http.HandlerFunc)
|
postEntityWithIDHandler = authMiddleware(postEntityWithIDHandler)
|
||||||
getEntityHandler = authMiddleware(getEntityHandler).(http.HandlerFunc)
|
getEntityHandler = authMiddleware(getEntityHandler)
|
||||||
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -216,9 +216,34 @@ type BunRouterHandler interface {
|
|||||||
Handle(method, path string, handler bunrouter.HandlerFunc)
|
Handle(method, path string, handler bunrouter.HandlerFunc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// wrapBunRouterHandler wraps a bunrouter handler with auth middleware if provided
|
||||||
|
func wrapBunRouterHandler(handler bunrouter.HandlerFunc, authMiddleware MiddlewareFunc) bunrouter.HandlerFunc {
|
||||||
|
if authMiddleware == nil {
|
||||||
|
return handler
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
|
// Create an http.Handler that calls the bunrouter handler
|
||||||
|
httpHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Replace the embedded *http.Request with the middleware-enriched one
|
||||||
|
// so that auth context (user ID, etc.) is visible to the handler.
|
||||||
|
enrichedReq := req
|
||||||
|
enrichedReq.Request = r
|
||||||
|
_ = handler(w, enrichedReq)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Wrap with auth middleware and execute
|
||||||
|
wrappedHandler := authMiddleware(httpHandler)
|
||||||
|
wrappedHandler.ServeHTTP(w, req.Request)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SetupBunRouterRoutes sets up bunrouter routes for the ResolveSpec API
|
// SetupBunRouterRoutes sets up bunrouter routes for the ResolveSpec API
|
||||||
// Accepts bunrouter.Router or bunrouter.Group
|
// Accepts bunrouter.Router or bunrouter.Group
|
||||||
func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
// authMiddleware is optional - if provided, routes will be protected with the middleware
|
||||||
|
func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler, authMiddleware MiddlewareFunc) {
|
||||||
|
|
||||||
// CORS config
|
// CORS config
|
||||||
corsConfig := common.DefaultCORSConfig()
|
corsConfig := common.DefaultCORSConfig()
|
||||||
@@ -256,7 +281,7 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
currentEntity := entity
|
currentEntity := entity
|
||||||
|
|
||||||
// POST route without ID
|
// POST route without ID
|
||||||
r.Handle("POST", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
postEntityHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewHTTPRequest(req.Request)
|
reqAdapter := router.NewHTTPRequest(req.Request)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -267,10 +292,11 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("POST", entityPath, wrapBunRouterHandler(postEntityHandler, authMiddleware))
|
||||||
|
|
||||||
// POST route with ID
|
// POST route with ID
|
||||||
r.Handle("POST", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
postEntityWithIDHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewHTTPRequest(req.Request)
|
reqAdapter := router.NewHTTPRequest(req.Request)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -282,10 +308,11 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("POST", entityWithIDPath, wrapBunRouterHandler(postEntityWithIDHandler, authMiddleware))
|
||||||
|
|
||||||
// GET route without ID
|
// GET route without ID
|
||||||
r.Handle("GET", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
getEntityHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewHTTPRequest(req.Request)
|
reqAdapter := router.NewHTTPRequest(req.Request)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -296,10 +323,11 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.HandleGet(respAdapter, reqAdapter, params)
|
handler.HandleGet(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("GET", entityPath, wrapBunRouterHandler(getEntityHandler, authMiddleware))
|
||||||
|
|
||||||
// GET route with ID
|
// GET route with ID
|
||||||
r.Handle("GET", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
getEntityWithIDHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewHTTPRequest(req.Request)
|
reqAdapter := router.NewHTTPRequest(req.Request)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -311,9 +339,11 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.HandleGet(respAdapter, reqAdapter, params)
|
handler.HandleGet(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("GET", entityWithIDPath, wrapBunRouterHandler(getEntityWithIDHandler, authMiddleware))
|
||||||
|
|
||||||
// OPTIONS route without ID (returns metadata)
|
// OPTIONS route without ID (returns metadata)
|
||||||
|
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
||||||
r.Handle("OPTIONS", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
r.Handle("OPTIONS", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewHTTPRequest(req.Request)
|
reqAdapter := router.NewHTTPRequest(req.Request)
|
||||||
@@ -330,6 +360,7 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// OPTIONS route with ID (returns metadata)
|
// OPTIONS route with ID (returns metadata)
|
||||||
|
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
||||||
r.Handle("OPTIONS", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
r.Handle("OPTIONS", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewHTTPRequest(req.Request)
|
reqAdapter := router.NewHTTPRequest(req.Request)
|
||||||
@@ -355,8 +386,8 @@ func ExampleWithBunRouter(bunDB *bun.DB) {
|
|||||||
// Create bunrouter
|
// Create bunrouter
|
||||||
bunRouter := bunrouter.New()
|
bunRouter := bunrouter.New()
|
||||||
|
|
||||||
// Setup ResolveSpec routes with bunrouter
|
// Setup ResolveSpec routes with bunrouter without authentication
|
||||||
SetupBunRouterRoutes(bunRouter, handler)
|
SetupBunRouterRoutes(bunRouter, handler, nil)
|
||||||
|
|
||||||
// Start server
|
// Start server
|
||||||
// http.ListenAndServe(":8080", bunRouter)
|
// http.ListenAndServe(":8080", bunRouter)
|
||||||
@@ -377,8 +408,8 @@ func ExampleBunRouterWithBunDB(bunDB *bun.DB) {
|
|||||||
// Create bunrouter
|
// Create bunrouter
|
||||||
bunRouter := bunrouter.New()
|
bunRouter := bunrouter.New()
|
||||||
|
|
||||||
// Setup ResolveSpec routes
|
// Setup ResolveSpec routes without authentication
|
||||||
SetupBunRouterRoutes(bunRouter, handler)
|
SetupBunRouterRoutes(bunRouter, handler, nil)
|
||||||
|
|
||||||
// This gives you the full uptrace stack: bunrouter + Bun ORM
|
// This gives you the full uptrace stack: bunrouter + Bun ORM
|
||||||
// http.ListenAndServe(":8080", bunRouter)
|
// http.ListenAndServe(":8080", bunRouter)
|
||||||
@@ -396,8 +427,87 @@ func ExampleBunRouterWithGroup(bunDB *bun.DB) {
|
|||||||
apiGroup := bunRouter.NewGroup("/api")
|
apiGroup := bunRouter.NewGroup("/api")
|
||||||
|
|
||||||
// Setup ResolveSpec routes on the group - routes will be under /api
|
// Setup ResolveSpec routes on the group - routes will be under /api
|
||||||
SetupBunRouterRoutes(apiGroup, handler)
|
SetupBunRouterRoutes(apiGroup, handler, nil)
|
||||||
|
|
||||||
// Start server
|
// Start server
|
||||||
// http.ListenAndServe(":8080", bunRouter)
|
// http.ListenAndServe(":8080", bunRouter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ExampleWithGORMAndAuth shows how to use ResolveSpec with GORM and authentication
|
||||||
|
func ExampleWithGORMAndAuth(db *gorm.DB) {
|
||||||
|
// Create handler using GORM
|
||||||
|
_ = NewHandlerWithGORM(db)
|
||||||
|
|
||||||
|
// Create auth middleware
|
||||||
|
// import "github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
|
// secList := security.NewSecurityList(myProvider)
|
||||||
|
// authMiddleware := func(h http.Handler) http.Handler {
|
||||||
|
// return security.NewAuthHandler(secList, h)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Setup router with authentication
|
||||||
|
_ = mux.NewRouter()
|
||||||
|
// SetupMuxRoutes(muxRouter, handler, authMiddleware)
|
||||||
|
|
||||||
|
// Register models
|
||||||
|
// handler.RegisterModel("public", "users", &User{})
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
// http.ListenAndServe(":8080", muxRouter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExampleWithBunAndAuth shows how to use ResolveSpec with Bun and authentication
|
||||||
|
func ExampleWithBunAndAuth(bunDB *bun.DB) {
|
||||||
|
// Create Bun adapter
|
||||||
|
dbAdapter := database.NewBunAdapter(bunDB)
|
||||||
|
|
||||||
|
// Create model registry
|
||||||
|
registry := modelregistry.NewModelRegistry()
|
||||||
|
// registry.RegisterModel("public.users", &User{})
|
||||||
|
|
||||||
|
// Create handler
|
||||||
|
_ = NewHandler(dbAdapter, registry)
|
||||||
|
|
||||||
|
// Create auth middleware
|
||||||
|
// import "github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
|
// secList := security.NewSecurityList(myProvider)
|
||||||
|
// authMiddleware := func(h http.Handler) http.Handler {
|
||||||
|
// return security.NewAuthHandler(secList, h)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Setup routes with authentication
|
||||||
|
_ = mux.NewRouter()
|
||||||
|
// SetupMuxRoutes(muxRouter, handler, authMiddleware)
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
// http.ListenAndServe(":8080", muxRouter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExampleBunRouterWithBunDBAndAuth shows the full uptrace stack with authentication
|
||||||
|
func ExampleBunRouterWithBunDBAndAuth(bunDB *bun.DB) {
|
||||||
|
// Create Bun database adapter
|
||||||
|
dbAdapter := database.NewBunAdapter(bunDB)
|
||||||
|
|
||||||
|
// Create model registry
|
||||||
|
registry := modelregistry.NewModelRegistry()
|
||||||
|
// registry.RegisterModel("public.users", &User{})
|
||||||
|
|
||||||
|
// Create handler with Bun
|
||||||
|
_ = NewHandler(dbAdapter, registry)
|
||||||
|
|
||||||
|
// Create auth middleware
|
||||||
|
// import "github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
|
// secList := security.NewSecurityList(myProvider)
|
||||||
|
// authMiddleware := func(h http.Handler) http.Handler {
|
||||||
|
// return security.NewAuthHandler(secList, h)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Create bunrouter
|
||||||
|
_ = bunrouter.New()
|
||||||
|
|
||||||
|
// Setup ResolveSpec routes with authentication
|
||||||
|
// SetupBunRouterRoutes(bunRouter, handler, authMiddleware)
|
||||||
|
|
||||||
|
// This gives you the full uptrace stack: bunrouter + Bun ORM with authentication
|
||||||
|
// http.ListenAndServe(":8080", bunRouter)
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package resolvespec
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/common"
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
@@ -10,6 +11,17 @@ import (
|
|||||||
|
|
||||||
// RegisterSecurityHooks registers all security-related hooks with the handler
|
// RegisterSecurityHooks registers all security-related hooks with the handler
|
||||||
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
||||||
|
// Hook 0: BeforeHandle - enforce auth after model resolution
|
||||||
|
handler.Hooks().Register(BeforeHandle, func(hookCtx *HookContext) error {
|
||||||
|
if err := security.CheckModelAuthAllowed(newSecurityContext(hookCtx), hookCtx.Operation); err != nil {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortMessage = err.Error()
|
||||||
|
hookCtx.AbortCode = http.StatusUnauthorized
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
// Hook 1: BeforeRead - Load security rules
|
// Hook 1: BeforeRead - Load security rules
|
||||||
handler.Hooks().Register(BeforeRead, func(hookCtx *HookContext) error {
|
handler.Hooks().Register(BeforeRead, func(hookCtx *HookContext) error {
|
||||||
secCtx := newSecurityContext(hookCtx)
|
secCtx := newSecurityContext(hookCtx)
|
||||||
@@ -34,6 +46,18 @@ func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList
|
|||||||
return security.LogDataAccess(secCtx)
|
return security.LogDataAccess(secCtx)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Hook 5: BeforeUpdate - enforce CanUpdate rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeUpdate, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelUpdateAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 6: BeforeDelete - enforce CanDelete rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeDelete, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelDeleteAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
logger.Info("Security hooks registered for resolvespec handler")
|
logger.Info("Security hooks registered for resolvespec handler")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -147,6 +147,7 @@ handler.Hooks.Register(restheadspec.BeforeCreate, func(ctx *restheadspec.HookCon
|
|||||||
```
|
```
|
||||||
|
|
||||||
**Available Hook Types**:
|
**Available Hook Types**:
|
||||||
|
* `BeforeHandle` — fires after model resolution, before operation dispatch (auth checks)
|
||||||
* `BeforeRead`, `AfterRead`
|
* `BeforeRead`, `AfterRead`
|
||||||
* `BeforeCreate`, `AfterCreate`
|
* `BeforeCreate`, `AfterCreate`
|
||||||
* `BeforeUpdate`, `AfterUpdate`
|
* `BeforeUpdate`, `AfterUpdate`
|
||||||
@@ -157,11 +158,13 @@ handler.Hooks.Register(restheadspec.BeforeCreate, func(ctx *restheadspec.HookCon
|
|||||||
* `Handler`: Access to handler, database, and registry
|
* `Handler`: Access to handler, database, and registry
|
||||||
* `Schema`, `Entity`, `TableName`: Request info
|
* `Schema`, `Entity`, `TableName`: Request info
|
||||||
* `Model`: The registered model type
|
* `Model`: The registered model type
|
||||||
|
* `Operation`: Current operation string (`"read"`, `"create"`, `"update"`, `"delete"`)
|
||||||
* `Options`: Parsed request options (filters, sorting, etc.)
|
* `Options`: Parsed request options (filters, sorting, etc.)
|
||||||
* `ID`: Record ID (for single-record operations)
|
* `ID`: Record ID (for single-record operations)
|
||||||
* `Data`: Request data (for create/update)
|
* `Data`: Request data (for create/update)
|
||||||
* `Result`: Operation result (for after hooks)
|
* `Result`: Operation result (for after hooks)
|
||||||
* `Writer`: Response writer (allows hooks to modify response)
|
* `Writer`: Response writer (allows hooks to modify response)
|
||||||
|
* `Abort`, `AbortMessage`, `AbortCode`: Set in hook to abort with an error response
|
||||||
|
|
||||||
## Cursor Pagination
|
## Cursor Pagination
|
||||||
|
|
||||||
|
|||||||
@@ -32,6 +32,8 @@ func (opts *ExtendedRequestOptions) GetCursorFilter(
|
|||||||
modelColumns []string, // optional: for validation
|
modelColumns []string, // optional: for validation
|
||||||
expandJoins map[string]string, // optional: alias → JOIN SQL
|
expandJoins map[string]string, // optional: alias → JOIN SQL
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
|
// Separate schema prefix from bare table name
|
||||||
|
fullTableName := tableName
|
||||||
if strings.Contains(tableName, ".") {
|
if strings.Contains(tableName, ".") {
|
||||||
tableName = strings.SplitN(tableName, ".", 2)[1]
|
tableName = strings.SplitN(tableName, ".", 2)[1]
|
||||||
}
|
}
|
||||||
@@ -62,7 +64,7 @@ func (opts *ExtendedRequestOptions) GetCursorFilter(
|
|||||||
// 4. Process each sort column
|
// 4. Process each sort column
|
||||||
// --------------------------------------------------------------------- //
|
// --------------------------------------------------------------------- //
|
||||||
for _, s := range sortItems {
|
for _, s := range sortItems {
|
||||||
col := strings.TrimSpace(s.Column)
|
col := strings.Trim(strings.TrimSpace(s.Column), "()")
|
||||||
if col == "" {
|
if col == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -91,7 +93,8 @@ func (opts *ExtendedRequestOptions) GetCursorFilter(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Handle joins
|
// Handle joins
|
||||||
if isJoin && expandJoins != nil {
|
if isJoin {
|
||||||
|
if expandJoins != nil {
|
||||||
if joinClause, ok := expandJoins[prefix]; ok {
|
if joinClause, ok := expandJoins[prefix]; ok {
|
||||||
jSQL, cRef := rewriteJoin(joinClause, tableName, prefix)
|
jSQL, cRef := rewriteJoin(joinClause, tableName, prefix)
|
||||||
joinSQL = jSQL
|
joinSQL = jSQL
|
||||||
@@ -99,6 +102,11 @@ func (opts *ExtendedRequestOptions) GetCursorFilter(
|
|||||||
targetCol = prefix + "." + field
|
targetCol = prefix + "." + field
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if cursorCol == "" {
|
||||||
|
logger.Warn("Skipping cursor sort column %q: join alias %q not in expandJoins", col, prefix)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Build inequality
|
// Build inequality
|
||||||
op := "<"
|
op := "<"
|
||||||
@@ -127,7 +135,7 @@ func (opts *ExtendedRequestOptions) GetCursorFilter(
|
|||||||
WHERE cursor_select.%s = %s
|
WHERE cursor_select.%s = %s
|
||||||
AND (%s)
|
AND (%s)
|
||||||
)`,
|
)`,
|
||||||
tableName,
|
fullTableName,
|
||||||
joinSQL,
|
joinSQL,
|
||||||
pkName,
|
pkName,
|
||||||
cursorID,
|
cursorID,
|
||||||
|
|||||||
@@ -187,9 +187,9 @@ func TestGetCursorFilter_WithSchemaPrefix(t *testing.T) {
|
|||||||
t.Fatalf("GetCursorFilter failed: %v", err)
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Should handle schema prefix properly
|
// Should include full schema-qualified name in FROM clause
|
||||||
if !strings.Contains(filter, "users") {
|
if !strings.Contains(filter, "public.users") {
|
||||||
t.Errorf("Filter should reference table name users, got: %s", filter)
|
t.Errorf("Filter FROM clause should use schema-qualified name public.users, got: %s", filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Logf("Generated cursor filter with schema: %s", filter)
|
t.Logf("Generated cursor filter with schema: %s", filter)
|
||||||
@@ -278,6 +278,47 @@ func TestCleanSortField(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetCursorFilter_LateralJoin(t *testing.T) {
|
||||||
|
lateralJoin := "inner join lateral (\nselect string_agg(a.name, '.') as sortorder\nfrom tree(account.rid_account) r\ninner join account a on a.id = r.id\n) fn on true"
|
||||||
|
|
||||||
|
opts := &ExtendedRequestOptions{
|
||||||
|
RequestOptions: common.RequestOptions{
|
||||||
|
Sort: []common.SortOption{
|
||||||
|
{Column: "fn.sortorder", Direction: "ASC"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
opts.CursorForward = "8975"
|
||||||
|
|
||||||
|
tableName := "core.account"
|
||||||
|
pkName := "rid_account"
|
||||||
|
// modelColumns does not contain "sortorder" - it's a lateral join computed column
|
||||||
|
modelColumns := []string{"rid_account", "description", "pastelno"}
|
||||||
|
expandJoins := map[string]string{"fn": lateralJoin}
|
||||||
|
|
||||||
|
filter, err := opts.GetCursorFilter(tableName, pkName, modelColumns, expandJoins)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("GetCursorFilter failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Generated lateral cursor filter: %s", filter)
|
||||||
|
|
||||||
|
// Should contain the rewritten lateral join inside the EXISTS subquery
|
||||||
|
if !strings.Contains(filter, "cursor_select_fn") {
|
||||||
|
t.Errorf("Filter should reference cursor_select_fn alias, got: %s", filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should compare fn.sortorder values
|
||||||
|
if !strings.Contains(filter, "sortorder") {
|
||||||
|
t.Errorf("Filter should reference sortorder column, got: %s", filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should NOT contain empty comparison like "< "
|
||||||
|
if strings.Contains(filter, " < ") || strings.Contains(filter, " > ") {
|
||||||
|
t.Errorf("Filter should not contain empty comparison operators, got: %s", filter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestBuildPriorityChain(t *testing.T) {
|
func TestBuildPriorityChain(t *testing.T) {
|
||||||
clauses := []string{
|
clauses := []string{
|
||||||
"cursor_select.priority > posts.priority",
|
"cursor_select.priority > posts.priority",
|
||||||
|
|||||||
@@ -133,6 +133,41 @@ func (h *Handler) Handle(w common.ResponseWriter, r common.Request, params map[s
|
|||||||
// Add request-scoped data to context (including options)
|
// Add request-scoped data to context (including options)
|
||||||
ctx = WithRequestData(ctx, schema, entity, tableName, model, modelPtr, options)
|
ctx = WithRequestData(ctx, schema, entity, tableName, model, modelPtr, options)
|
||||||
|
|
||||||
|
// Derive operation for auth check
|
||||||
|
var operation string
|
||||||
|
switch method {
|
||||||
|
case "GET":
|
||||||
|
operation = "read"
|
||||||
|
case "POST":
|
||||||
|
operation = "create"
|
||||||
|
case "PUT", "PATCH":
|
||||||
|
operation = "update"
|
||||||
|
case "DELETE":
|
||||||
|
operation = "delete"
|
||||||
|
default:
|
||||||
|
operation = "read"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute BeforeHandle hook - auth check fires here, after model resolution
|
||||||
|
beforeCtx := &HookContext{
|
||||||
|
Context: ctx,
|
||||||
|
Handler: h,
|
||||||
|
Schema: schema,
|
||||||
|
Entity: entity,
|
||||||
|
Model: model,
|
||||||
|
Writer: w,
|
||||||
|
Request: r,
|
||||||
|
Operation: operation,
|
||||||
|
}
|
||||||
|
if err := h.hooks.Execute(BeforeHandle, beforeCtx); err != nil {
|
||||||
|
code := http.StatusUnauthorized
|
||||||
|
if beforeCtx.AbortCode != 0 {
|
||||||
|
code = beforeCtx.AbortCode
|
||||||
|
}
|
||||||
|
h.sendError(w, code, "unauthorized", beforeCtx.AbortMessage, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
switch method {
|
switch method {
|
||||||
case "GET":
|
case "GET":
|
||||||
if id != "" {
|
if id != "" {
|
||||||
@@ -549,8 +584,30 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If ID is provided, filter by ID
|
// Handle FetchRowNumber before applying ID filter
|
||||||
if id != "" {
|
// This must happen before the query to get the row position, then filter by PK
|
||||||
|
var fetchedRowNumber *int64
|
||||||
|
var fetchRowNumberPKValue string
|
||||||
|
if options.FetchRowNumber != nil && *options.FetchRowNumber != "" {
|
||||||
|
pkName := reflection.GetPrimaryKeyName(model)
|
||||||
|
fetchRowNumberPKValue = *options.FetchRowNumber
|
||||||
|
|
||||||
|
logger.Debug("FetchRowNumber: Fetching row number for PK %s = %s", pkName, fetchRowNumberPKValue)
|
||||||
|
|
||||||
|
rowNum, err := h.FetchRowNumber(ctx, tableName, pkName, fetchRowNumberPKValue, options, model)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to fetch row number: %v", err)
|
||||||
|
h.sendError(w, http.StatusBadRequest, "fetch_rownumber_error", "Failed to fetch row number", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchedRowNumber = &rowNum
|
||||||
|
logger.Debug("FetchRowNumber: Row number %d for PK %s = %s", rowNum, pkName, fetchRowNumberPKValue)
|
||||||
|
|
||||||
|
// Now filter the main query to this specific primary key
|
||||||
|
query = query.Where(fmt.Sprintf("%s = ?", common.QuoteIdent(pkName)), fetchRowNumberPKValue)
|
||||||
|
} else if id != "" {
|
||||||
|
// If ID is provided (and not FetchRowNumber), filter by ID
|
||||||
pkName := reflection.GetPrimaryKeyName(model)
|
pkName := reflection.GetPrimaryKeyName(model)
|
||||||
logger.Debug("Filtering by ID=%s: %s", pkName, id)
|
logger.Debug("Filtering by ID=%s: %s", pkName, id)
|
||||||
|
|
||||||
@@ -666,12 +723,19 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
// Extract model columns for validation using the generic database function
|
// Extract model columns for validation using the generic database function
|
||||||
modelColumns := reflection.GetModelColumns(model)
|
modelColumns := reflection.GetModelColumns(model)
|
||||||
|
|
||||||
// Build expand joins map (if needed in future)
|
// Build expand joins map: custom SQL joins are available in cursor subquery
|
||||||
var expandJoins map[string]string
|
expandJoins := make(map[string]string)
|
||||||
if len(options.Expand) > 0 {
|
for _, joinClause := range options.CustomSQLJoin {
|
||||||
expandJoins = make(map[string]string)
|
alias := extractJoinAlias(joinClause)
|
||||||
// TODO: Build actual JOIN SQL for each expand relation
|
if alias != "" {
|
||||||
// For now, pass empty map as joins are handled via Preload
|
expandJoins[alias] = joinClause
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: also add Expand relation JOINs when those are built as SQL rather than Preload
|
||||||
|
|
||||||
|
// Default sort to primary key when none provided
|
||||||
|
if len(options.Sort) == 0 {
|
||||||
|
options.Sort = []common.SortOption{{Column: pkName, Direction: "ASC"}}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get cursor filter SQL
|
// Get cursor filter SQL
|
||||||
@@ -730,7 +794,14 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Set row numbers on each record if the model has a RowNumber field
|
// Set row numbers on each record if the model has a RowNumber field
|
||||||
|
// If FetchRowNumber was used, set the fetched row number instead of offset-based
|
||||||
|
if fetchedRowNumber != nil {
|
||||||
|
// FetchRowNumber: set the actual row position on the record
|
||||||
|
logger.Debug("FetchRowNumber: Setting row number %d on record", *fetchedRowNumber)
|
||||||
|
h.setRowNumbersOnRecords(modelPtr, int(*fetchedRowNumber-1)) // -1 because setRowNumbersOnRecords adds 1
|
||||||
|
} else {
|
||||||
h.setRowNumbersOnRecords(modelPtr, offset)
|
h.setRowNumbersOnRecords(modelPtr, offset)
|
||||||
|
}
|
||||||
|
|
||||||
metadata := &common.Metadata{
|
metadata := &common.Metadata{
|
||||||
Total: int64(total),
|
Total: int64(total),
|
||||||
@@ -740,21 +811,10 @@ func (h *Handler) handleRead(ctx context.Context, w common.ResponseWriter, id st
|
|||||||
Offset: offset,
|
Offset: offset,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch row number for a specific record if requested
|
// If FetchRowNumber was used, also set it in metadata
|
||||||
if options.FetchRowNumber != nil && *options.FetchRowNumber != "" {
|
if fetchedRowNumber != nil {
|
||||||
pkName := reflection.GetPrimaryKeyName(model)
|
metadata.RowNumber = fetchedRowNumber
|
||||||
pkValue := *options.FetchRowNumber
|
logger.Debug("FetchRowNumber: Row number %d set in metadata", *fetchedRowNumber)
|
||||||
|
|
||||||
logger.Debug("Fetching row number for specific PK %s = %s", pkName, pkValue)
|
|
||||||
|
|
||||||
rowNum, err := h.FetchRowNumber(ctx, tableName, pkName, pkValue, options, model)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warn("Failed to fetch row number: %v", err)
|
|
||||||
// Don't fail the entire request, just log the warning
|
|
||||||
} else {
|
|
||||||
metadata.RowNumber = &rowNum
|
|
||||||
logger.Debug("Row number for PK %s: %d", pkValue, rowNum)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute AfterRead hooks
|
// Execute AfterRead hooks
|
||||||
@@ -1480,8 +1540,8 @@ func (h *Handler) handleDelete(ctx context.Context, w common.ResponseWriter, id
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
||||||
logger.Warn("BeforeDelete hook failed for ID %s: %v", itemID, err)
|
logger.Error("BeforeDelete hook failed for ID %s: %v", itemID, err)
|
||||||
continue
|
return fmt.Errorf("delete not allowed for ID %s: %w", itemID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
query := tx.NewDelete().Table(tableName).Where(fmt.Sprintf("%s = ?", common.QuoteIdent(reflection.GetPrimaryKeyName(model))), itemID)
|
query := tx.NewDelete().Table(tableName).Where(fmt.Sprintf("%s = ?", common.QuoteIdent(reflection.GetPrimaryKeyName(model))), itemID)
|
||||||
@@ -1554,8 +1614,8 @@ func (h *Handler) handleDelete(ctx context.Context, w common.ResponseWriter, id
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
||||||
logger.Warn("BeforeDelete hook failed for ID %v: %v", itemID, err)
|
logger.Error("BeforeDelete hook failed for ID %v: %v", itemID, err)
|
||||||
continue
|
return fmt.Errorf("delete not allowed for ID %v: %w", itemID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
query := tx.NewDelete().Table(tableName).Where(fmt.Sprintf("%s = ?", common.QuoteIdent(reflection.GetPrimaryKeyName(model))), itemID)
|
query := tx.NewDelete().Table(tableName).Where(fmt.Sprintf("%s = ?", common.QuoteIdent(reflection.GetPrimaryKeyName(model))), itemID)
|
||||||
@@ -1612,8 +1672,8 @@ func (h *Handler) handleDelete(ctx context.Context, w common.ResponseWriter, id
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
if err := h.hooks.Execute(BeforeDelete, hookCtx); err != nil {
|
||||||
logger.Warn("BeforeDelete hook failed for ID %v: %v", itemID, err)
|
logger.Error("BeforeDelete hook failed for ID %v: %v", itemID, err)
|
||||||
continue
|
return fmt.Errorf("delete not allowed for ID %v: %w", itemID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
query := tx.NewDelete().Table(tableName).Where(fmt.Sprintf("%s = ?", common.QuoteIdent(reflection.GetPrimaryKeyName(model))), itemID)
|
query := tx.NewDelete().Table(tableName).Where(fmt.Sprintf("%s = ?", common.QuoteIdent(reflection.GetPrimaryKeyName(model))), itemID)
|
||||||
@@ -2015,11 +2075,18 @@ func (h *Handler) processChildRelationsForField(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// getTableNameForRelatedModel gets the table name for a related model
|
// getTableNameForRelatedModel gets the table name for a related model.
|
||||||
|
// If the model's TableName() is schema-qualified (e.g. "public.users") the
|
||||||
|
// separator is adjusted for the active driver: underscore for SQLite, dot otherwise.
|
||||||
func (h *Handler) getTableNameForRelatedModel(model interface{}, defaultName string) string {
|
func (h *Handler) getTableNameForRelatedModel(model interface{}, defaultName string) string {
|
||||||
if provider, ok := model.(common.TableNameProvider); ok {
|
if provider, ok := model.(common.TableNameProvider); ok {
|
||||||
tableName := provider.TableName()
|
tableName := provider.TableName()
|
||||||
if tableName != "" {
|
if tableName != "" {
|
||||||
|
if schema, table := h.parseTableName(tableName); schema != "" {
|
||||||
|
if h.db.DriverName() == "sqlite" {
|
||||||
|
return fmt.Sprintf("%s_%s", schema, table)
|
||||||
|
}
|
||||||
|
}
|
||||||
return tableName
|
return tableName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2086,7 +2153,11 @@ func (h *Handler) applyFilter(query common.SelectQuery, filter common.FilterOpti
|
|||||||
// Column is already cast to TEXT if needed
|
// Column is already cast to TEXT if needed
|
||||||
return applyWhere(fmt.Sprintf("%s ILIKE ?", qualifiedColumn), filter.Value)
|
return applyWhere(fmt.Sprintf("%s ILIKE ?", qualifiedColumn), filter.Value)
|
||||||
case "in":
|
case "in":
|
||||||
return applyWhere(fmt.Sprintf("%s IN (?)", qualifiedColumn), filter.Value)
|
cond, inArgs := common.BuildInCondition(qualifiedColumn, filter.Value)
|
||||||
|
if cond == "" {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
return applyWhere(cond, inArgs...)
|
||||||
case "between":
|
case "between":
|
||||||
// Handle between operator - exclusive (> val1 AND < val2)
|
// Handle between operator - exclusive (> val1 AND < val2)
|
||||||
if values, ok := filter.Value.([]interface{}); ok && len(values) == 2 {
|
if values, ok := filter.Value.([]interface{}); ok && len(values) == 2 {
|
||||||
@@ -2162,24 +2233,25 @@ func (h *Handler) applyOrFilterGroup(query common.SelectQuery, filters []*common
|
|||||||
// buildFilterCondition builds a single filter condition and returns the condition string and args
|
// buildFilterCondition builds a single filter condition and returns the condition string and args
|
||||||
func (h *Handler) buildFilterCondition(qualifiedColumn string, filter *common.FilterOption, tableName string) (filterStr string, filterInterface []interface{}) {
|
func (h *Handler) buildFilterCondition(qualifiedColumn string, filter *common.FilterOption, tableName string) (filterStr string, filterInterface []interface{}) {
|
||||||
switch strings.ToLower(filter.Operator) {
|
switch strings.ToLower(filter.Operator) {
|
||||||
case "eq", "equals":
|
case "eq", "equals", "=":
|
||||||
return fmt.Sprintf("%s = ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s = ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "neq", "not_equals", "ne":
|
case "neq", "not_equals", "ne", "!=", "<>":
|
||||||
return fmt.Sprintf("%s != ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s != ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "gt", "greater_than":
|
case "gt", "greater_than", ">":
|
||||||
return fmt.Sprintf("%s > ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s > ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "gte", "greater_than_equals", "ge":
|
case "gte", "greater_than_equals", "ge", ">=":
|
||||||
return fmt.Sprintf("%s >= ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s >= ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "lt", "less_than":
|
case "lt", "less_than", "<":
|
||||||
return fmt.Sprintf("%s < ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s < ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "lte", "less_than_equals", "le":
|
case "lte", "less_than_equals", "le", "<=":
|
||||||
return fmt.Sprintf("%s <= ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s <= ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "like":
|
case "like":
|
||||||
return fmt.Sprintf("%s LIKE ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s LIKE ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "ilike":
|
case "ilike":
|
||||||
return fmt.Sprintf("%s ILIKE ?", qualifiedColumn), []interface{}{filter.Value}
|
return fmt.Sprintf("%s ILIKE ?", qualifiedColumn), []interface{}{filter.Value}
|
||||||
case "in":
|
case "in":
|
||||||
return fmt.Sprintf("%s IN (?)", qualifiedColumn), []interface{}{filter.Value}
|
cond, inArgs := common.BuildInCondition(qualifiedColumn, filter.Value)
|
||||||
|
return cond, inArgs
|
||||||
case "between":
|
case "between":
|
||||||
// Handle between operator - exclusive (> val1 AND < val2)
|
// Handle between operator - exclusive (> val1 AND < val2)
|
||||||
if values, ok := filter.Value.([]interface{}); ok && len(values) == 2 {
|
if values, ok := filter.Value.([]interface{}); ok && len(values) == 2 {
|
||||||
@@ -2264,10 +2336,16 @@ func (h *Handler) getSchemaAndTable(defaultSchema, entity string, model interfac
|
|||||||
return schema, entity
|
return schema, entity
|
||||||
}
|
}
|
||||||
|
|
||||||
// getTableName returns the full table name including schema (schema.table)
|
// getTableName returns the full table name including schema.
|
||||||
|
// For most drivers the result is "schema.table". For SQLite, which does not
|
||||||
|
// support schema-qualified names, the schema and table are joined with an
|
||||||
|
// underscore: "schema_table".
|
||||||
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
||||||
schemaName, tableName := h.getSchemaAndTable(schema, entity, model)
|
schemaName, tableName := h.getSchemaAndTable(schema, entity, model)
|
||||||
if schemaName != "" {
|
if schemaName != "" {
|
||||||
|
if h.db.DriverName() == "sqlite" {
|
||||||
|
return fmt.Sprintf("%s_%s", schemaName, tableName)
|
||||||
|
}
|
||||||
return fmt.Sprintf("%s.%s", schemaName, tableName)
|
return fmt.Sprintf("%s.%s", schemaName, tableName)
|
||||||
}
|
}
|
||||||
return tableName
|
return tableName
|
||||||
@@ -2589,21 +2667,8 @@ func (h *Handler) FetchRowNumber(ctx context.Context, tableName string, pkName s
|
|||||||
sortSQL = fmt.Sprintf("%s.%s ASC", tableName, pkName)
|
sortSQL = fmt.Sprintf("%s.%s ASC", tableName, pkName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build WHERE clauses from filters
|
// Build WHERE clause from filters with proper OR grouping
|
||||||
whereClauses := make([]string, 0)
|
whereSQL := h.buildWhereClauseWithORGrouping(options.Filters, tableName)
|
||||||
for i := range options.Filters {
|
|
||||||
filter := &options.Filters[i]
|
|
||||||
whereClause := h.buildFilterSQL(filter, tableName)
|
|
||||||
if whereClause != "" {
|
|
||||||
whereClauses = append(whereClauses, fmt.Sprintf("(%s)", whereClause))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Combine WHERE clauses
|
|
||||||
whereSQL := ""
|
|
||||||
if len(whereClauses) > 0 {
|
|
||||||
whereSQL = "WHERE " + strings.Join(whereClauses, " AND ")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add custom SQL WHERE if provided
|
// Add custom SQL WHERE if provided
|
||||||
if options.CustomSQLWhere != "" {
|
if options.CustomSQLWhere != "" {
|
||||||
@@ -2651,19 +2716,86 @@ func (h *Handler) FetchRowNumber(ctx context.Context, tableName string, pkName s
|
|||||||
var result []struct {
|
var result []struct {
|
||||||
RN int64 `bun:"rn"`
|
RN int64 `bun:"rn"`
|
||||||
}
|
}
|
||||||
|
logger.Debug("[FetchRowNumber] BEFORE Query call - about to execute raw query")
|
||||||
err := h.db.Query(ctx, &result, queryStr, pkValue)
|
err := h.db.Query(ctx, &result, queryStr, pkValue)
|
||||||
|
logger.Debug("[FetchRowNumber] AFTER Query call - query completed with %d results, err: %v", len(result), err)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, fmt.Errorf("failed to fetch row number: %w", err)
|
return 0, fmt.Errorf("failed to fetch row number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(result) == 0 {
|
if len(result) == 0 {
|
||||||
return 0, fmt.Errorf("no row found for primary key %s", pkValue)
|
whereInfo := "none"
|
||||||
|
if whereSQL != "" {
|
||||||
|
whereInfo = whereSQL
|
||||||
|
}
|
||||||
|
return 0, fmt.Errorf("no row found for primary key %s=%s with active filters: %s", pkName, pkValue, whereInfo)
|
||||||
}
|
}
|
||||||
|
|
||||||
return result[0].RN, nil
|
return result[0].RN, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildFilterSQL converts a filter to SQL WHERE clause string
|
// buildFilterSQL converts a filter to SQL WHERE clause string
|
||||||
|
// buildWhereClauseWithORGrouping builds a WHERE clause from filters with proper OR grouping
|
||||||
|
// Groups consecutive OR filters together to ensure proper SQL precedence
|
||||||
|
// Example: [A, B(OR), C(OR), D(AND)] => WHERE (A OR B OR C) AND D
|
||||||
|
func (h *Handler) buildWhereClauseWithORGrouping(filters []common.FilterOption, tableName string) string {
|
||||||
|
if len(filters) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var groups []string
|
||||||
|
i := 0
|
||||||
|
|
||||||
|
for i < len(filters) {
|
||||||
|
// Check if this starts an OR group (next filter has OR logic)
|
||||||
|
startORGroup := i+1 < len(filters) && strings.EqualFold(filters[i+1].LogicOperator, "OR")
|
||||||
|
|
||||||
|
if startORGroup {
|
||||||
|
// Collect all consecutive filters that are OR'd together
|
||||||
|
orGroup := []string{}
|
||||||
|
|
||||||
|
// Add current filter
|
||||||
|
filterSQL := h.buildFilterSQL(&filters[i], tableName)
|
||||||
|
if filterSQL != "" {
|
||||||
|
orGroup = append(orGroup, filterSQL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect remaining OR filters
|
||||||
|
j := i + 1
|
||||||
|
for j < len(filters) && strings.EqualFold(filters[j].LogicOperator, "OR") {
|
||||||
|
filterSQL := h.buildFilterSQL(&filters[j], tableName)
|
||||||
|
if filterSQL != "" {
|
||||||
|
orGroup = append(orGroup, filterSQL)
|
||||||
|
}
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Group OR filters with parentheses
|
||||||
|
if len(orGroup) > 0 {
|
||||||
|
if len(orGroup) == 1 {
|
||||||
|
groups = append(groups, orGroup[0])
|
||||||
|
} else {
|
||||||
|
groups = append(groups, "("+strings.Join(orGroup, " OR ")+")")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i = j
|
||||||
|
} else {
|
||||||
|
// Single filter with AND logic (or first filter)
|
||||||
|
filterSQL := h.buildFilterSQL(&filters[i], tableName)
|
||||||
|
if filterSQL != "" {
|
||||||
|
groups = append(groups, filterSQL)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(groups) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return "WHERE " + strings.Join(groups, " AND ")
|
||||||
|
}
|
||||||
|
|
||||||
func (h *Handler) buildFilterSQL(filter *common.FilterOption, tableName string) string {
|
func (h *Handler) buildFilterSQL(filter *common.FilterOption, tableName string) string {
|
||||||
qualifiedColumn := h.qualifyColumnName(filter.Column, tableName)
|
qualifiedColumn := h.qualifyColumnName(filter.Column, tableName)
|
||||||
|
|
||||||
@@ -2754,6 +2886,8 @@ func (h *Handler) filterExtendedOptions(validator *common.ColumnValidator, optio
|
|||||||
|
|
||||||
// Filter base RequestOptions
|
// Filter base RequestOptions
|
||||||
filtered.RequestOptions = validator.FilterRequestOptions(options.RequestOptions)
|
filtered.RequestOptions = validator.FilterRequestOptions(options.RequestOptions)
|
||||||
|
// Restore JoinAliases cleared by FilterRequestOptions — still needed for SanitizeWhereClause
|
||||||
|
filtered.RequestOptions.JoinAliases = options.JoinAliases
|
||||||
|
|
||||||
// Filter SearchColumns
|
// Filter SearchColumns
|
||||||
filtered.SearchColumns = validator.FilterValidColumns(options.SearchColumns)
|
filtered.SearchColumns = validator.FilterValidColumns(options.SearchColumns)
|
||||||
|
|||||||
@@ -274,9 +274,11 @@ func (h *Handler) parseOptionsFromHeaders(r common.Request, model interface{}) E
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve relation names (convert table names to field names) if model is provided
|
// Resolve relation names (convert table names/prefixes to actual model field names) if model is provided.
|
||||||
// Skip resolution if X-Files header was provided, as XFiles uses Prefix which already contains the correct field names
|
// This runs for both regular headers and X-Files, because XFile prefixes don't always match model
|
||||||
if model != nil && !options.XFilesPresent {
|
// field names (e.g., prefix "HUB" vs field "HUB_RID_HUB"). RelatedKey/ForeignKey are used to
|
||||||
|
// disambiguate when multiple fields point to the same related type.
|
||||||
|
if model != nil {
|
||||||
h.resolveRelationNamesInOptions(&options, model)
|
h.resolveRelationNamesInOptions(&options, model)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -550,10 +552,8 @@ func (h *Handler) parseCustomSQLJoin(options *ExtendedRequestOptions, value stri
|
|||||||
// - "LEFT JOIN departments d ON ..." -> "d"
|
// - "LEFT JOIN departments d ON ..." -> "d"
|
||||||
// - "INNER JOIN users AS u ON ..." -> "u"
|
// - "INNER JOIN users AS u ON ..." -> "u"
|
||||||
// - "JOIN roles r ON ..." -> "r"
|
// - "JOIN roles r ON ..." -> "r"
|
||||||
|
// - "INNER JOIN LATERAL (...) fn ON true" -> "fn"
|
||||||
func extractJoinAlias(joinClause string) string {
|
func extractJoinAlias(joinClause string) string {
|
||||||
// Pattern: JOIN table_name [AS] alias ON ...
|
|
||||||
// We need to extract the alias (word before ON)
|
|
||||||
|
|
||||||
upperJoin := strings.ToUpper(joinClause)
|
upperJoin := strings.ToUpper(joinClause)
|
||||||
|
|
||||||
// Find the "JOIN" keyword position
|
// Find the "JOIN" keyword position
|
||||||
@@ -562,7 +562,20 @@ func extractJoinAlias(joinClause string) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the "ON" keyword position
|
// Lateral joins: alias is the word after the closing ) and before ON
|
||||||
|
if strings.Contains(upperJoin, "LATERAL") {
|
||||||
|
lastClose := strings.LastIndex(joinClause, ")")
|
||||||
|
if lastClose != -1 {
|
||||||
|
words := strings.Fields(joinClause[lastClose+1:])
|
||||||
|
// words should be like ["fn", "on", "true"] or ["on", "true"]
|
||||||
|
if len(words) >= 1 && !strings.EqualFold(words[0], "on") {
|
||||||
|
return words[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regular joins: find the "ON" keyword position (first occurrence)
|
||||||
onIdx := strings.Index(upperJoin, " ON ")
|
onIdx := strings.Index(upperJoin, " ON ")
|
||||||
if onIdx == -1 {
|
if onIdx == -1 {
|
||||||
return ""
|
return ""
|
||||||
@@ -863,8 +876,21 @@ func (h *Handler) resolveRelationNamesInOptions(options *ExtendedRequestOptions,
|
|||||||
|
|
||||||
// Resolve each part of the path
|
// Resolve each part of the path
|
||||||
currentModel := model
|
currentModel := model
|
||||||
for _, part := range parts {
|
for partIdx, part := range parts {
|
||||||
resolvedPart := h.resolveRelationName(currentModel, part)
|
isLast := partIdx == len(parts)-1
|
||||||
|
var resolvedPart string
|
||||||
|
if isLast {
|
||||||
|
// For the final part, use join-key-aware resolution to disambiguate when
|
||||||
|
// multiple fields point to the same type (e.g., HUB_RID_HUB vs HUB_RID_ASSIGNEDTO).
|
||||||
|
// RelatedKey = parent's local column linking to child; ForeignKey = local column linking to parent.
|
||||||
|
localKey := preload.RelatedKey
|
||||||
|
if localKey == "" {
|
||||||
|
localKey = preload.ForeignKey
|
||||||
|
}
|
||||||
|
resolvedPart = h.resolveRelationNameWithJoinKey(currentModel, part, localKey)
|
||||||
|
} else {
|
||||||
|
resolvedPart = h.resolveRelationName(currentModel, part)
|
||||||
|
}
|
||||||
resolvedParts = append(resolvedParts, resolvedPart)
|
resolvedParts = append(resolvedParts, resolvedPart)
|
||||||
|
|
||||||
// Try to get the model type for the next level
|
// Try to get the model type for the next level
|
||||||
@@ -980,6 +1006,101 @@ func (h *Handler) resolveRelationName(model interface{}, nameOrTable string) str
|
|||||||
return nameOrTable
|
return nameOrTable
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// resolveRelationNameWithJoinKey resolves a relation name like resolveRelationName, but when
|
||||||
|
// multiple fields point to the same related type, uses localKey to pick the one whose bun join
|
||||||
|
// tag starts with "join:localKey=". Falls back to resolveRelationName if no key match is found.
|
||||||
|
func (h *Handler) resolveRelationNameWithJoinKey(model interface{}, nameOrTable string, localKey string) string {
|
||||||
|
if localKey == "" {
|
||||||
|
return h.resolveRelationName(model, nameOrTable)
|
||||||
|
}
|
||||||
|
|
||||||
|
modelType := reflect.TypeOf(model)
|
||||||
|
if modelType == nil {
|
||||||
|
return nameOrTable
|
||||||
|
}
|
||||||
|
if modelType.Kind() == reflect.Ptr {
|
||||||
|
modelType = modelType.Elem()
|
||||||
|
}
|
||||||
|
if modelType == nil || modelType.Kind() != reflect.Struct {
|
||||||
|
return nameOrTable
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it's already a direct field name, return as-is (no ambiguity).
|
||||||
|
for i := 0; i < modelType.NumField(); i++ {
|
||||||
|
if modelType.Field(i).Name == nameOrTable {
|
||||||
|
return nameOrTable
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
normalizedInput := strings.ToLower(strings.ReplaceAll(nameOrTable, "_", ""))
|
||||||
|
localKeyLower := strings.ToLower(localKey)
|
||||||
|
|
||||||
|
// Find all fields whose related type matches nameOrTable, then pick the one
|
||||||
|
// whose bun join tag local key matches localKey.
|
||||||
|
var fallbackField string
|
||||||
|
for i := 0; i < modelType.NumField(); i++ {
|
||||||
|
field := modelType.Field(i)
|
||||||
|
fieldType := field.Type
|
||||||
|
|
||||||
|
var targetType reflect.Type
|
||||||
|
if fieldType.Kind() == reflect.Slice {
|
||||||
|
targetType = fieldType.Elem()
|
||||||
|
} else if fieldType.Kind() == reflect.Ptr {
|
||||||
|
targetType = fieldType.Elem()
|
||||||
|
}
|
||||||
|
if targetType != nil && targetType.Kind() == reflect.Ptr {
|
||||||
|
targetType = targetType.Elem()
|
||||||
|
}
|
||||||
|
if targetType == nil || targetType.Kind() != reflect.Struct {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
normalizedTypeName := strings.ToLower(targetType.Name())
|
||||||
|
normalizedTypeName = strings.TrimPrefix(normalizedTypeName, "modelcore")
|
||||||
|
normalizedTypeName = strings.TrimPrefix(normalizedTypeName, "model")
|
||||||
|
if normalizedTypeName != normalizedInput {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Type name matches; record as fallback.
|
||||||
|
if fallbackField == "" {
|
||||||
|
fallbackField = field.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check bun join tag: "join:localKey=foreignKey"
|
||||||
|
bunTag := field.Tag.Get("bun")
|
||||||
|
for _, tagPart := range strings.Split(bunTag, ",") {
|
||||||
|
tagPart = strings.TrimSpace(tagPart)
|
||||||
|
if !strings.HasPrefix(tagPart, "join:") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
joinSpec := strings.TrimPrefix(tagPart, "join:")
|
||||||
|
// joinSpec can be "col1=col2" or "col1=col2 col3=col4" (multi-col joins)
|
||||||
|
joinCols := strings.Fields(joinSpec)
|
||||||
|
if len(joinCols) == 0 {
|
||||||
|
joinCols = []string{joinSpec}
|
||||||
|
}
|
||||||
|
for _, joinCol := range joinCols {
|
||||||
|
eqIdx := strings.Index(joinCol, "=")
|
||||||
|
if eqIdx < 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
joinLocalKey := strings.ToLower(joinCol[:eqIdx])
|
||||||
|
if joinLocalKey == localKeyLower {
|
||||||
|
logger.Debug("Resolved '%s' (localKey: %s) -> field '%s'", nameOrTable, localKey, field.Name)
|
||||||
|
return field.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fallbackField != "" {
|
||||||
|
logger.Debug("No join key match for '%s' (localKey: %s), using first type match: '%s'", nameOrTable, localKey, fallbackField)
|
||||||
|
return fallbackField
|
||||||
|
}
|
||||||
|
return h.resolveRelationName(model, nameOrTable)
|
||||||
|
}
|
||||||
|
|
||||||
// addXFilesPreload converts an XFiles relation into a PreloadOption
|
// addXFilesPreload converts an XFiles relation into a PreloadOption
|
||||||
// and recursively processes its children
|
// and recursively processes its children
|
||||||
func (h *Handler) addXFilesPreload(xfile *XFiles, options *ExtendedRequestOptions, basePath string) {
|
func (h *Handler) addXFilesPreload(xfile *XFiles, options *ExtendedRequestOptions, basePath string) {
|
||||||
@@ -1061,15 +1182,42 @@ func (h *Handler) addXFilesPreload(xfile *XFiles, options *ExtendedRequestOption
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Transfer SqlJoins from XFiles to PreloadOption first, so aliases are available for WHERE sanitization
|
||||||
|
if len(xfile.SqlJoins) > 0 {
|
||||||
|
preloadOpt.SqlJoins = make([]string, 0, len(xfile.SqlJoins))
|
||||||
|
preloadOpt.JoinAliases = make([]string, 0, len(xfile.SqlJoins))
|
||||||
|
|
||||||
|
for _, joinClause := range xfile.SqlJoins {
|
||||||
|
// Sanitize the join clause
|
||||||
|
sanitizedJoin := common.SanitizeWhereClause(joinClause, "", nil)
|
||||||
|
if sanitizedJoin == "" {
|
||||||
|
logger.Warn("X-Files: SqlJoin failed sanitization for %s: %s", relationPath, joinClause)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
preloadOpt.SqlJoins = append(preloadOpt.SqlJoins, sanitizedJoin)
|
||||||
|
|
||||||
|
// Extract join alias for validation
|
||||||
|
alias := extractJoinAlias(sanitizedJoin)
|
||||||
|
if alias != "" {
|
||||||
|
preloadOpt.JoinAliases = append(preloadOpt.JoinAliases, alias)
|
||||||
|
logger.Debug("X-Files: Extracted join alias for %s: %s", relationPath, alias)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Debug("X-Files: Added %d SQL joins to preload %s", len(preloadOpt.SqlJoins), relationPath)
|
||||||
|
}
|
||||||
|
|
||||||
// Add WHERE clause if SQL conditions specified
|
// Add WHERE clause if SQL conditions specified
|
||||||
|
// SqlJoins must be processed first so join aliases are known and not incorrectly replaced
|
||||||
whereConditions := make([]string, 0)
|
whereConditions := make([]string, 0)
|
||||||
if len(xfile.SqlAnd) > 0 {
|
if len(xfile.SqlAnd) > 0 {
|
||||||
// Process each SQL condition
|
var sqlAndOpts *common.RequestOptions
|
||||||
// Note: We don't add table prefixes here because they're only needed for JOINs
|
if len(preloadOpt.JoinAliases) > 0 {
|
||||||
// The handler will add prefixes later if SqlJoins are present
|
sqlAndOpts = &common.RequestOptions{JoinAliases: preloadOpt.JoinAliases}
|
||||||
|
}
|
||||||
for _, sqlCond := range xfile.SqlAnd {
|
for _, sqlCond := range xfile.SqlAnd {
|
||||||
// Sanitize the condition without adding prefixes
|
sanitizedCond := common.SanitizeWhereClause(sqlCond, xfile.TableName, sqlAndOpts)
|
||||||
sanitizedCond := common.SanitizeWhereClause(sqlCond, xfile.TableName)
|
|
||||||
if sanitizedCond != "" {
|
if sanitizedCond != "" {
|
||||||
whereConditions = append(whereConditions, sanitizedCond)
|
whereConditions = append(whereConditions, sanitizedCond)
|
||||||
}
|
}
|
||||||
@@ -1114,32 +1262,6 @@ func (h *Handler) addXFilesPreload(xfile *XFiles, options *ExtendedRequestOption
|
|||||||
logger.Debug("X-Files: Set foreign key for %s: %s", relationPath, xfile.ForeignKey)
|
logger.Debug("X-Files: Set foreign key for %s: %s", relationPath, xfile.ForeignKey)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transfer SqlJoins from XFiles to PreloadOption
|
|
||||||
if len(xfile.SqlJoins) > 0 {
|
|
||||||
preloadOpt.SqlJoins = make([]string, 0, len(xfile.SqlJoins))
|
|
||||||
preloadOpt.JoinAliases = make([]string, 0, len(xfile.SqlJoins))
|
|
||||||
|
|
||||||
for _, joinClause := range xfile.SqlJoins {
|
|
||||||
// Sanitize the join clause
|
|
||||||
sanitizedJoin := common.SanitizeWhereClause(joinClause, "", nil)
|
|
||||||
if sanitizedJoin == "" {
|
|
||||||
logger.Warn("X-Files: SqlJoin failed sanitization for %s: %s", relationPath, joinClause)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
preloadOpt.SqlJoins = append(preloadOpt.SqlJoins, sanitizedJoin)
|
|
||||||
|
|
||||||
// Extract join alias for validation
|
|
||||||
alias := extractJoinAlias(sanitizedJoin)
|
|
||||||
if alias != "" {
|
|
||||||
preloadOpt.JoinAliases = append(preloadOpt.JoinAliases, alias)
|
|
||||||
logger.Debug("X-Files: Extracted join alias for %s: %s", relationPath, alias)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Debug("X-Files: Added %d SQL joins to preload %s", len(preloadOpt.SqlJoins), relationPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if this table has a recursive child - if so, mark THIS preload as recursive
|
// Check if this table has a recursive child - if so, mark THIS preload as recursive
|
||||||
// and store the recursive child's RelatedKey for recursion generation
|
// and store the recursive child's RelatedKey for recursion generation
|
||||||
hasRecursiveChild := false
|
hasRecursiveChild := false
|
||||||
|
|||||||
@@ -142,6 +142,16 @@ func TestExtractJoinAlias(t *testing.T) {
|
|||||||
joinClause: "LEFT JOIN departments",
|
joinClause: "LEFT JOIN departments",
|
||||||
expected: "",
|
expected: "",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "LATERAL join with alias",
|
||||||
|
joinClause: "inner join lateral (select sortorder from compute_fn(t.id)) fn on true",
|
||||||
|
expected: "fn",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "LATERAL join with multiline subquery containing inner ON",
|
||||||
|
joinClause: "inner join lateral (\nselect string_agg(a.name, '.') as sortorder\nfrom tree(t.id) r\ninner join account a on a.id = r.id\n) fn on true",
|
||||||
|
expected: "fn",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
|
|||||||
@@ -12,6 +12,10 @@ import (
|
|||||||
type HookType string
|
type HookType string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// BeforeHandle fires after model resolution, before operation dispatch.
|
||||||
|
// Use this for auth checks that need model rules and user context simultaneously.
|
||||||
|
BeforeHandle HookType = "before_handle"
|
||||||
|
|
||||||
// Read operation hooks
|
// Read operation hooks
|
||||||
BeforeRead HookType = "before_read"
|
BeforeRead HookType = "before_read"
|
||||||
AfterRead HookType = "after_read"
|
AfterRead HookType = "after_read"
|
||||||
@@ -42,6 +46,9 @@ type HookContext struct {
|
|||||||
Model interface{}
|
Model interface{}
|
||||||
Options ExtendedRequestOptions
|
Options ExtendedRequestOptions
|
||||||
|
|
||||||
|
// Operation being dispatched (e.g. "read", "create", "update", "delete")
|
||||||
|
Operation string
|
||||||
|
|
||||||
// Operation-specific fields
|
// Operation-specific fields
|
||||||
ID string
|
ID string
|
||||||
Data interface{} // For create/update operations
|
Data interface{} // For create/update operations
|
||||||
@@ -56,6 +63,14 @@ type HookContext struct {
|
|||||||
// Response writer - allows hooks to modify response
|
// Response writer - allows hooks to modify response
|
||||||
Writer common.ResponseWriter
|
Writer common.ResponseWriter
|
||||||
|
|
||||||
|
// Request - the original HTTP request
|
||||||
|
Request common.Request
|
||||||
|
|
||||||
|
// Allow hooks to abort the operation
|
||||||
|
Abort bool // If set to true, the operation will be aborted
|
||||||
|
AbortMessage string // Message to return if aborted
|
||||||
|
AbortCode int // HTTP status code if aborted
|
||||||
|
|
||||||
// Tx provides access to the database/transaction for executing additional SQL
|
// Tx provides access to the database/transaction for executing additional SQL
|
||||||
// This allows hooks to run custom queries in addition to the main Query chain
|
// This allows hooks to run custom queries in addition to the main Query chain
|
||||||
Tx common.Database
|
Tx common.Database
|
||||||
@@ -110,6 +125,12 @@ func (r *HookRegistry) Execute(hookType HookType, ctx *HookContext) error {
|
|||||||
logger.Error("Hook %d for %s failed: %v", i+1, hookType, err)
|
logger.Error("Hook %d for %s failed: %v", i+1, hookType, err)
|
||||||
return fmt.Errorf("hook execution failed: %w", err)
|
return fmt.Errorf("hook execution failed: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if hook requested abort
|
||||||
|
if ctx.Abort {
|
||||||
|
logger.Warn("Hook %d for %s requested abort: %s", i+1, hookType, ctx.AbortMessage)
|
||||||
|
return fmt.Errorf("operation aborted by hook: %s", ctx.AbortMessage)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// logger.Debug("All hooks for %s executed successfully", hookType)
|
// logger.Debug("All hooks for %s executed successfully", hookType)
|
||||||
|
|||||||
@@ -125,17 +125,17 @@ func SetupMuxRoutes(muxRouter *mux.Router, handler *Handler, authMiddleware Midd
|
|||||||
metadataPath := buildRoutePath(schema, entity) + "/metadata"
|
metadataPath := buildRoutePath(schema, entity) + "/metadata"
|
||||||
|
|
||||||
// Create handler functions for this specific entity
|
// Create handler functions for this specific entity
|
||||||
entityHandler := createMuxHandler(handler, schema, entity, "")
|
var entityHandler http.Handler = createMuxHandler(handler, schema, entity, "")
|
||||||
entityWithIDHandler := createMuxHandler(handler, schema, entity, "id")
|
var entityWithIDHandler http.Handler = createMuxHandler(handler, schema, entity, "id")
|
||||||
metadataHandler := createMuxGetHandler(handler, schema, entity, "")
|
var metadataHandler http.Handler = createMuxGetHandler(handler, schema, entity, "")
|
||||||
optionsEntityHandler := createMuxOptionsHandler(handler, schema, entity, []string{"GET", "POST", "OPTIONS"})
|
optionsEntityHandler := createMuxOptionsHandler(handler, schema, entity, []string{"GET", "POST", "OPTIONS"})
|
||||||
optionsEntityWithIDHandler := createMuxOptionsHandler(handler, schema, entity, []string{"GET", "PUT", "PATCH", "DELETE", "POST", "OPTIONS"})
|
optionsEntityWithIDHandler := createMuxOptionsHandler(handler, schema, entity, []string{"GET", "PUT", "PATCH", "DELETE", "POST", "OPTIONS"})
|
||||||
|
|
||||||
// Apply authentication middleware if provided
|
// Apply authentication middleware if provided
|
||||||
if authMiddleware != nil {
|
if authMiddleware != nil {
|
||||||
entityHandler = authMiddleware(entityHandler).(http.HandlerFunc)
|
entityHandler = authMiddleware(entityHandler)
|
||||||
entityWithIDHandler = authMiddleware(entityWithIDHandler).(http.HandlerFunc)
|
entityWithIDHandler = authMiddleware(entityWithIDHandler)
|
||||||
metadataHandler = authMiddleware(metadataHandler).(http.HandlerFunc)
|
metadataHandler = authMiddleware(metadataHandler)
|
||||||
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -280,9 +280,34 @@ type BunRouterHandler interface {
|
|||||||
Handle(method, path string, handler bunrouter.HandlerFunc)
|
Handle(method, path string, handler bunrouter.HandlerFunc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// wrapBunRouterHandler wraps a bunrouter handler with auth middleware if provided
|
||||||
|
func wrapBunRouterHandler(handler bunrouter.HandlerFunc, authMiddleware MiddlewareFunc) bunrouter.HandlerFunc {
|
||||||
|
if authMiddleware == nil {
|
||||||
|
return handler
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
|
// Create an http.Handler that calls the bunrouter handler
|
||||||
|
httpHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Replace the embedded *http.Request with the middleware-enriched one
|
||||||
|
// so that auth context (user ID, etc.) is visible to the handler.
|
||||||
|
enrichedReq := req
|
||||||
|
enrichedReq.Request = r
|
||||||
|
_ = handler(w, enrichedReq)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Wrap with auth middleware and execute
|
||||||
|
wrappedHandler := authMiddleware(httpHandler)
|
||||||
|
wrappedHandler.ServeHTTP(w, req.Request)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SetupBunRouterRoutes sets up bunrouter routes for the RestHeadSpec API
|
// SetupBunRouterRoutes sets up bunrouter routes for the RestHeadSpec API
|
||||||
// Accepts bunrouter.Router or bunrouter.Group
|
// Accepts bunrouter.Router or bunrouter.Group
|
||||||
func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
// authMiddleware is optional - if provided, routes will be protected with the middleware
|
||||||
|
func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler, authMiddleware MiddlewareFunc) {
|
||||||
|
|
||||||
// CORS config
|
// CORS config
|
||||||
corsConfig := common.DefaultCORSConfig()
|
corsConfig := common.DefaultCORSConfig()
|
||||||
@@ -292,6 +317,14 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
|
handler.HandleOpenAPI(respAdapter, reqAdapter)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
r.Handle("OPTIONS", "/openapi", func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
|
reqAdapter := router.NewHTTPRequest(req.Request)
|
||||||
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -313,7 +346,7 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
currentEntity := entity
|
currentEntity := entity
|
||||||
|
|
||||||
// GET and POST for /{schema}/{entity}
|
// GET and POST for /{schema}/{entity}
|
||||||
r.Handle("GET", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
getEntityHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -324,9 +357,10 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("GET", entityPath, wrapBunRouterHandler(getEntityHandler, authMiddleware))
|
||||||
|
|
||||||
r.Handle("POST", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
postEntityHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -337,10 +371,11 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("POST", entityPath, wrapBunRouterHandler(postEntityHandler, authMiddleware))
|
||||||
|
|
||||||
// GET, POST, PUT, PATCH, DELETE for /{schema}/{entity}/:id
|
// GET, POST, PUT, PATCH, DELETE for /{schema}/{entity}/:id
|
||||||
r.Handle("GET", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
getEntityWithIDHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -352,9 +387,10 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("GET", entityWithIDPath, wrapBunRouterHandler(getEntityWithIDHandler, authMiddleware))
|
||||||
|
|
||||||
r.Handle("POST", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
postEntityWithIDHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -366,9 +402,10 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("POST", entityWithIDPath, wrapBunRouterHandler(postEntityWithIDHandler, authMiddleware))
|
||||||
|
|
||||||
r.Handle("PUT", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
putEntityWithIDHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -380,9 +417,10 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("PUT", entityWithIDPath, wrapBunRouterHandler(putEntityWithIDHandler, authMiddleware))
|
||||||
|
|
||||||
r.Handle("PATCH", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
patchEntityWithIDHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -394,9 +432,10 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("PATCH", entityWithIDPath, wrapBunRouterHandler(patchEntityWithIDHandler, authMiddleware))
|
||||||
|
|
||||||
r.Handle("DELETE", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
deleteEntityWithIDHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -408,10 +447,11 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.Handle(respAdapter, reqAdapter, params)
|
handler.Handle(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("DELETE", entityWithIDPath, wrapBunRouterHandler(deleteEntityWithIDHandler, authMiddleware))
|
||||||
|
|
||||||
// Metadata endpoint
|
// Metadata endpoint
|
||||||
r.Handle("GET", metadataPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
metadataHandler := func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
common.SetCORSHeaders(respAdapter, reqAdapter, corsConfig)
|
||||||
@@ -422,9 +462,11 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
|
|
||||||
handler.HandleGet(respAdapter, reqAdapter, params)
|
handler.HandleGet(respAdapter, reqAdapter, params)
|
||||||
return nil
|
return nil
|
||||||
})
|
}
|
||||||
|
r.Handle("GET", metadataPath, wrapBunRouterHandler(metadataHandler, authMiddleware))
|
||||||
|
|
||||||
// OPTIONS route without ID (returns metadata)
|
// OPTIONS route without ID (returns metadata)
|
||||||
|
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
||||||
r.Handle("OPTIONS", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
r.Handle("OPTIONS", entityPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
@@ -441,6 +483,7 @@ func SetupBunRouterRoutes(r BunRouterHandler, handler *Handler) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// OPTIONS route with ID (returns metadata)
|
// OPTIONS route with ID (returns metadata)
|
||||||
|
// Don't apply auth middleware to OPTIONS - CORS preflight must not require auth
|
||||||
r.Handle("OPTIONS", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
r.Handle("OPTIONS", entityWithIDPath, func(w http.ResponseWriter, req bunrouter.Request) error {
|
||||||
respAdapter := router.NewHTTPResponseWriter(w)
|
respAdapter := router.NewHTTPResponseWriter(w)
|
||||||
reqAdapter := router.NewBunRouterRequest(req)
|
reqAdapter := router.NewBunRouterRequest(req)
|
||||||
@@ -466,8 +509,8 @@ func ExampleBunRouterWithBunDB(bunDB *bun.DB) {
|
|||||||
// Create bunrouter
|
// Create bunrouter
|
||||||
bunRouter := bunrouter.New()
|
bunRouter := bunrouter.New()
|
||||||
|
|
||||||
// Setup routes
|
// Setup routes without authentication
|
||||||
SetupBunRouterRoutes(bunRouter, handler)
|
SetupBunRouterRoutes(bunRouter, handler, nil)
|
||||||
|
|
||||||
// Start server
|
// Start server
|
||||||
if err := http.ListenAndServe(":8080", bunRouter); err != nil {
|
if err := http.ListenAndServe(":8080", bunRouter); err != nil {
|
||||||
@@ -487,7 +530,7 @@ func ExampleBunRouterWithGroup(bunDB *bun.DB) {
|
|||||||
apiGroup := bunRouter.NewGroup("/api")
|
apiGroup := bunRouter.NewGroup("/api")
|
||||||
|
|
||||||
// Setup RestHeadSpec routes on the group - routes will be under /api
|
// Setup RestHeadSpec routes on the group - routes will be under /api
|
||||||
SetupBunRouterRoutes(apiGroup, handler)
|
SetupBunRouterRoutes(apiGroup, handler, nil)
|
||||||
|
|
||||||
// Start server
|
// Start server
|
||||||
if err := http.ListenAndServe(":8080", bunRouter); err != nil {
|
if err := http.ListenAndServe(":8080", bunRouter); err != nil {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package restheadspec
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/security"
|
"github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
@@ -9,6 +10,17 @@ import (
|
|||||||
|
|
||||||
// RegisterSecurityHooks registers all security-related hooks with the handler
|
// RegisterSecurityHooks registers all security-related hooks with the handler
|
||||||
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
||||||
|
// Hook 0: BeforeHandle - enforce auth after model resolution
|
||||||
|
handler.Hooks().Register(BeforeHandle, func(hookCtx *HookContext) error {
|
||||||
|
if err := security.CheckModelAuthAllowed(newSecurityContext(hookCtx), hookCtx.Operation); err != nil {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortMessage = err.Error()
|
||||||
|
hookCtx.AbortCode = http.StatusUnauthorized
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
// Hook 1: BeforeRead - Load security rules
|
// Hook 1: BeforeRead - Load security rules
|
||||||
handler.Hooks().Register(BeforeRead, func(hookCtx *HookContext) error {
|
handler.Hooks().Register(BeforeRead, func(hookCtx *HookContext) error {
|
||||||
secCtx := newSecurityContext(hookCtx)
|
secCtx := newSecurityContext(hookCtx)
|
||||||
@@ -33,6 +45,18 @@ func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList
|
|||||||
return security.LogDataAccess(secCtx)
|
return security.LogDataAccess(secCtx)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Hook 5: BeforeUpdate - enforce CanUpdate rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeUpdate, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelUpdateAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 6: BeforeDelete - enforce CanDelete rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeDelete, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelDeleteAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
logger.Info("Security hooks registered for restheadspec handler")
|
logger.Info("Security hooks registered for restheadspec handler")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -405,11 +405,16 @@ assert.Equal(t, "user_id = {UserID}", row.Template)
|
|||||||
```
|
```
|
||||||
HTTP Request
|
HTTP Request
|
||||||
↓
|
↓
|
||||||
NewAuthMiddleware → calls provider.Authenticate()
|
NewOptionalAuthMiddleware → calls provider.Authenticate()
|
||||||
↓ (adds UserContext to context)
|
↓ (adds UserContext or guest context; never 401)
|
||||||
SetSecurityMiddleware → adds SecurityList to context
|
SetSecurityMiddleware → adds SecurityList to context
|
||||||
↓
|
↓
|
||||||
Handler.Handle()
|
Handler.Handle() → resolves model
|
||||||
|
↓
|
||||||
|
BeforeHandle Hook → CheckModelAuthAllowed(secCtx, operation)
|
||||||
|
├─ SecurityDisabled → allow
|
||||||
|
├─ CanPublicRead/Create/Update/Delete → allow unauthenticated
|
||||||
|
└─ UserID == 0 → abort 401
|
||||||
↓
|
↓
|
||||||
BeforeRead Hook → calls provider.GetColumnSecurity() + GetRowSecurity()
|
BeforeRead Hook → calls provider.GetColumnSecurity() + GetRowSecurity()
|
||||||
↓
|
↓
|
||||||
@@ -693,15 +698,30 @@ http.Handle("/api/protected", authHandler)
|
|||||||
optionalHandler := security.NewOptionalAuthHandler(securityList, myHandler)
|
optionalHandler := security.NewOptionalAuthHandler(securityList, myHandler)
|
||||||
http.Handle("/home", optionalHandler)
|
http.Handle("/home", optionalHandler)
|
||||||
|
|
||||||
// Example handler
|
// NewOptionalAuthMiddleware - For spec routes; auth enforcement deferred to BeforeHandle
|
||||||
func myHandler(w http.ResponseWriter, r *http.Request) {
|
apiRouter.Use(security.NewOptionalAuthMiddleware(securityList))
|
||||||
userCtx, _ := security.GetUserContext(r.Context())
|
apiRouter.Use(security.SetSecurityMiddleware(securityList))
|
||||||
if userCtx.UserID == 0 {
|
restheadspec.RegisterSecurityHooks(handler, securityList) // includes BeforeHandle
|
||||||
// Guest user
|
```
|
||||||
} else {
|
|
||||||
// Authenticated user
|
---
|
||||||
}
|
|
||||||
}
|
## Model-Level Access Control
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Register model with rules (pkg/modelregistry)
|
||||||
|
modelregistry.RegisterModelWithRules("public.products", &Product{}, modelregistry.ModelRules{
|
||||||
|
SecurityDisabled: false, // skip all auth when true
|
||||||
|
CanPublicRead: true, // unauthenticated reads allowed
|
||||||
|
CanPublicCreate: false, // requires auth
|
||||||
|
CanPublicUpdate: false, // requires auth
|
||||||
|
CanPublicDelete: false, // requires auth
|
||||||
|
CanUpdate: true, // authenticated can update
|
||||||
|
CanDelete: false, // authenticated cannot delete (enforced in BeforeDelete)
|
||||||
|
})
|
||||||
|
|
||||||
|
// CheckModelAuthAllowed used automatically in BeforeHandle hook
|
||||||
|
// No code needed — call RegisterSecurityHooks and it's applied
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -751,14 +751,25 @@ resolvespec.RegisterSecurityHooks(resolveHandler, securityList)
|
|||||||
```
|
```
|
||||||
HTTP Request
|
HTTP Request
|
||||||
↓
|
↓
|
||||||
NewAuthMiddleware (security package)
|
NewOptionalAuthMiddleware (security package) ← recommended for spec routes
|
||||||
├─ Calls provider.Authenticate(request)
|
├─ Calls provider.Authenticate(request)
|
||||||
└─ Adds UserContext to context
|
├─ On success: adds authenticated UserContext to context
|
||||||
|
└─ On failure: adds guest UserContext (UserID=0) to context
|
||||||
↓
|
↓
|
||||||
SetSecurityMiddleware (security package)
|
SetSecurityMiddleware (security package)
|
||||||
└─ Adds SecurityList to context
|
└─ Adds SecurityList to context
|
||||||
↓
|
↓
|
||||||
Spec Handler (restheadspec/funcspec/resolvespec)
|
Spec Handler (restheadspec/funcspec/resolvespec/websocketspec/mqttspec)
|
||||||
|
└─ Resolves schema + entity + model from request
|
||||||
|
↓
|
||||||
|
BeforeHandle Hook (registered by spec via RegisterSecurityHooks)
|
||||||
|
├─ Adapts spec's HookContext → SecurityContext
|
||||||
|
├─ Calls security.CheckModelAuthAllowed(secCtx, operation)
|
||||||
|
│ ├─ Loads model rules from context or registry
|
||||||
|
│ ├─ SecurityDisabled → allow
|
||||||
|
│ ├─ CanPublicRead/Create/Update/Delete → allow unauthenticated
|
||||||
|
│ └─ UserID == 0 → 401 unauthorized
|
||||||
|
└─ On error: aborts with 401
|
||||||
↓
|
↓
|
||||||
BeforeRead Hook (registered by spec)
|
BeforeRead Hook (registered by spec)
|
||||||
├─ Adapts spec's HookContext → SecurityContext
|
├─ Adapts spec's HookContext → SecurityContext
|
||||||
@@ -784,7 +795,8 @@ HTTP Response (secured data)
|
|||||||
```
|
```
|
||||||
|
|
||||||
**Key Points:**
|
**Key Points:**
|
||||||
- Security package is spec-agnostic and provides core logic
|
- `NewOptionalAuthMiddleware` never rejects — it sets guest context on auth failure; `BeforeHandle` enforces auth after model resolution
|
||||||
|
- `BeforeHandle` fires after model resolution, giving access to model rules and user context simultaneously
|
||||||
- Each spec registers its own hooks that adapt to SecurityContext
|
- Each spec registers its own hooks that adapt to SecurityContext
|
||||||
- Security rules are loaded once and cached for the request
|
- Security rules are loaded once and cached for the request
|
||||||
- Row security is applied to the query (database level)
|
- Row security is applied to the query (database level)
|
||||||
@@ -1002,15 +1014,49 @@ func (p *MyProvider) GetRowSecurity(ctx context.Context, userID int, schema, tab
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Model-Level Access Control
|
||||||
|
|
||||||
|
Use `ModelRules` (from `pkg/modelregistry`) to control per-entity auth behavior:
|
||||||
|
|
||||||
|
```go
|
||||||
|
modelregistry.RegisterModelWithRules("public.products", &Product{}, modelregistry.ModelRules{
|
||||||
|
SecurityDisabled: false, // true = skip all auth checks
|
||||||
|
CanPublicRead: true, // unauthenticated GET allowed
|
||||||
|
CanPublicCreate: false, // requires auth
|
||||||
|
CanPublicUpdate: false, // requires auth
|
||||||
|
CanPublicDelete: false, // requires auth
|
||||||
|
CanUpdate: true, // authenticated users can update
|
||||||
|
CanDelete: false, // authenticated users cannot delete
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
`CheckModelAuthAllowed(secCtx, operation)` applies these rules in `BeforeHandle`:
|
||||||
|
1. `SecurityDisabled` → allow all
|
||||||
|
2. `CanPublicRead/Create/Update/Delete` → allow unauthenticated for that operation
|
||||||
|
3. Guest (UserID == 0) → return 401
|
||||||
|
4. Authenticated → allow (operation-specific `CanUpdate`/`CanDelete` checked in `BeforeUpdate`/`BeforeDelete`)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Middleware and Handler API
|
## Middleware and Handler API
|
||||||
|
|
||||||
### NewAuthMiddleware
|
### NewAuthMiddleware
|
||||||
Standard middleware that authenticates all requests:
|
Standard middleware that authenticates all requests and returns 401 on failure:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
router.Use(security.NewAuthMiddleware(securityList))
|
router.Use(security.NewAuthMiddleware(securityList))
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### NewOptionalAuthMiddleware
|
||||||
|
Middleware for spec routes — always continues; sets guest context on auth failure:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Use with RegisterSecurityHooks — auth enforcement is deferred to BeforeHandle
|
||||||
|
apiRouter.Use(security.NewOptionalAuthMiddleware(securityList))
|
||||||
|
apiRouter.Use(security.SetSecurityMiddleware(securityList))
|
||||||
|
restheadspec.RegisterSecurityHooks(handler, securityList) // registers BeforeHandle
|
||||||
|
```
|
||||||
|
|
||||||
Routes can skip authentication using the `SkipAuth` helper:
|
Routes can skip authentication using the `SkipAuth` helper:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/modelregistry"
|
||||||
)
|
)
|
||||||
|
|
||||||
// SecurityContext is a generic interface that any spec can implement to integrate with security features
|
// SecurityContext is a generic interface that any spec can implement to integrate with security features
|
||||||
@@ -226,6 +227,122 @@ func ApplyColumnSecurity(secCtx SecurityContext, securityList *SecurityList) err
|
|||||||
return applyColumnSecurity(secCtx, securityList)
|
return applyColumnSecurity(secCtx, securityList)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// checkModelUpdateAllowed returns an error if CanUpdate is false for the model.
|
||||||
|
// Rules are read from context (set by NewModelAuthMiddleware) with a fallback to the model registry.
|
||||||
|
func checkModelUpdateAllowed(secCtx SecurityContext) error {
|
||||||
|
rules, ok := GetModelRulesFromContext(secCtx.GetContext())
|
||||||
|
if !ok {
|
||||||
|
schema := secCtx.GetSchema()
|
||||||
|
entity := secCtx.GetEntity()
|
||||||
|
var err error
|
||||||
|
if schema != "" {
|
||||||
|
rules, err = modelregistry.GetModelRulesByName(fmt.Sprintf("%s.%s", schema, entity))
|
||||||
|
}
|
||||||
|
if err != nil || schema == "" {
|
||||||
|
rules, err = modelregistry.GetModelRulesByName(entity)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil // model not registered, allow by default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !rules.CanUpdate {
|
||||||
|
return fmt.Errorf("update not allowed for %s", secCtx.GetEntity())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkModelDeleteAllowed returns an error if CanDelete is false for the model.
|
||||||
|
// Rules are read from context (set by NewModelAuthMiddleware) with a fallback to the model registry.
|
||||||
|
func checkModelDeleteAllowed(secCtx SecurityContext) error {
|
||||||
|
rules, ok := GetModelRulesFromContext(secCtx.GetContext())
|
||||||
|
if !ok {
|
||||||
|
schema := secCtx.GetSchema()
|
||||||
|
entity := secCtx.GetEntity()
|
||||||
|
var err error
|
||||||
|
if schema != "" {
|
||||||
|
rules, err = modelregistry.GetModelRulesByName(fmt.Sprintf("%s.%s", schema, entity))
|
||||||
|
}
|
||||||
|
if err != nil || schema == "" {
|
||||||
|
rules, err = modelregistry.GetModelRulesByName(entity)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil // model not registered, allow by default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !rules.CanDelete {
|
||||||
|
return fmt.Errorf("delete not allowed for %s", secCtx.GetEntity())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckModelAuthAllowed checks whether the requested operation is permitted based on
|
||||||
|
// model rules and the current user's authentication state. It is intended for use in
|
||||||
|
// a BeforeHandle hook, fired after model resolution.
|
||||||
|
//
|
||||||
|
// Logic:
|
||||||
|
// 1. Load model rules from context (set by NewModelAuthMiddleware) or fall back to registry.
|
||||||
|
// 2. SecurityDisabled → allow.
|
||||||
|
// 3. operation == "read" && CanPublicRead → allow.
|
||||||
|
// 4. operation == "create" && CanPublicCreate → allow.
|
||||||
|
// 5. operation == "update" && CanPublicUpdate → allow.
|
||||||
|
// 6. operation == "delete" && CanPublicDelete → allow.
|
||||||
|
// 7. Guest (UserID == 0) → return "authentication required".
|
||||||
|
// 8. Authenticated user → allow (operation-specific checks remain in BeforeUpdate/BeforeDelete).
|
||||||
|
func CheckModelAuthAllowed(secCtx SecurityContext, operation string) error {
|
||||||
|
rules, ok := GetModelRulesFromContext(secCtx.GetContext())
|
||||||
|
if !ok {
|
||||||
|
schema := secCtx.GetSchema()
|
||||||
|
entity := secCtx.GetEntity()
|
||||||
|
var err error
|
||||||
|
if schema != "" {
|
||||||
|
rules, err = modelregistry.GetModelRulesByName(fmt.Sprintf("%s.%s", schema, entity))
|
||||||
|
}
|
||||||
|
if err != nil || schema == "" {
|
||||||
|
rules, err = modelregistry.GetModelRulesByName(entity)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
// Model not registered - fall through to auth check
|
||||||
|
userID, _ := secCtx.GetUserID()
|
||||||
|
if userID == 0 {
|
||||||
|
return fmt.Errorf("authentication required")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if rules.SecurityDisabled {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if operation == "read" && rules.CanPublicRead {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if operation == "create" && rules.CanPublicCreate {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if operation == "update" && rules.CanPublicUpdate {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if operation == "delete" && rules.CanPublicDelete {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
userID, _ := secCtx.GetUserID()
|
||||||
|
if userID == 0 {
|
||||||
|
return fmt.Errorf("authentication required")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckModelUpdateAllowed is the public wrapper for checkModelUpdateAllowed.
|
||||||
|
func CheckModelUpdateAllowed(secCtx SecurityContext) error {
|
||||||
|
return checkModelUpdateAllowed(secCtx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckModelDeleteAllowed is the public wrapper for checkModelDeleteAllowed.
|
||||||
|
func CheckModelDeleteAllowed(secCtx SecurityContext) error {
|
||||||
|
return checkModelDeleteAllowed(secCtx)
|
||||||
|
}
|
||||||
|
|
||||||
// Helper functions
|
// Helper functions
|
||||||
|
|
||||||
func contains(s, substr string) bool {
|
func contains(s, substr string) bool {
|
||||||
|
|||||||
@@ -4,6 +4,8 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/modelregistry"
|
||||||
)
|
)
|
||||||
|
|
||||||
// contextKey is a custom type for context keys to avoid collisions
|
// contextKey is a custom type for context keys to avoid collisions
|
||||||
@@ -23,6 +25,7 @@ const (
|
|||||||
UserMetaKey contextKey = "user_meta"
|
UserMetaKey contextKey = "user_meta"
|
||||||
SkipAuthKey contextKey = "skip_auth"
|
SkipAuthKey contextKey = "skip_auth"
|
||||||
OptionalAuthKey contextKey = "optional_auth"
|
OptionalAuthKey contextKey = "optional_auth"
|
||||||
|
ModelRulesKey contextKey = "model_rules"
|
||||||
)
|
)
|
||||||
|
|
||||||
// SkipAuth returns a context with skip auth flag set to true
|
// SkipAuth returns a context with skip auth flag set to true
|
||||||
@@ -136,6 +139,31 @@ func NewOptionalAuthHandler(securityList *SecurityList, next http.Handler) http.
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewOptionalAuthMiddleware creates authentication middleware that always continues.
|
||||||
|
// On auth failure, a guest user context is set instead of returning 401.
|
||||||
|
// Intended for spec routes where auth enforcement is deferred to a BeforeHandle hook
|
||||||
|
// after model resolution.
|
||||||
|
func NewOptionalAuthMiddleware(securityList *SecurityList) func(http.Handler) http.Handler {
|
||||||
|
return func(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
provider := securityList.Provider()
|
||||||
|
if provider == nil {
|
||||||
|
http.Error(w, "Security provider not configured", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userCtx, err := provider.Authenticate(r)
|
||||||
|
if err != nil {
|
||||||
|
guestCtx := createGuestContext(r)
|
||||||
|
next.ServeHTTP(w, setUserContext(r, guestCtx))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
next.ServeHTTP(w, setUserContext(r, userCtx))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// NewAuthMiddleware creates an authentication middleware with the given security list
|
// NewAuthMiddleware creates an authentication middleware with the given security list
|
||||||
// This middleware extracts user authentication from the request and adds it to context
|
// This middleware extracts user authentication from the request and adds it to context
|
||||||
// Routes can skip authentication by setting SkipAuthKey context value (use SkipAuth helper)
|
// Routes can skip authentication by setting SkipAuthKey context value (use SkipAuth helper)
|
||||||
@@ -182,6 +210,68 @@ func NewAuthMiddleware(securityList *SecurityList) func(http.Handler) http.Handl
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewModelAuthMiddleware creates authentication middleware that respects ModelRules for the given model name.
|
||||||
|
// It first checks if ModelRules are set for the model:
|
||||||
|
// - If SecurityDisabled is true, authentication is skipped and a guest context is set.
|
||||||
|
// - Otherwise, all checks from NewAuthMiddleware apply (SkipAuthKey, provider check, OptionalAuthKey, Authenticate).
|
||||||
|
//
|
||||||
|
// If the model is not found in any registry, the middleware falls back to standard NewAuthMiddleware behaviour.
|
||||||
|
func NewModelAuthMiddleware(securityList *SecurityList, modelName string) func(http.Handler) http.Handler {
|
||||||
|
return func(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Check ModelRules first
|
||||||
|
if rules, err := modelregistry.GetModelRulesByName(modelName); err == nil {
|
||||||
|
// Store rules in context for downstream use (e.g., security hooks)
|
||||||
|
r = r.WithContext(context.WithValue(r.Context(), ModelRulesKey, rules))
|
||||||
|
|
||||||
|
if rules.SecurityDisabled {
|
||||||
|
guestCtx := createGuestContext(r)
|
||||||
|
next.ServeHTTP(w, setUserContext(r, guestCtx))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
isRead := r.Method == http.MethodGet || r.Method == http.MethodHead
|
||||||
|
isUpdate := r.Method == http.MethodPut || r.Method == http.MethodPatch
|
||||||
|
if (isRead && rules.CanPublicRead) || (isUpdate && rules.CanPublicUpdate) {
|
||||||
|
guestCtx := createGuestContext(r)
|
||||||
|
next.ServeHTTP(w, setUserContext(r, guestCtx))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this route should skip authentication
|
||||||
|
if skip, ok := r.Context().Value(SkipAuthKey).(bool); ok && skip {
|
||||||
|
guestCtx := createGuestContext(r)
|
||||||
|
next.ServeHTTP(w, setUserContext(r, guestCtx))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the security provider
|
||||||
|
provider := securityList.Provider()
|
||||||
|
if provider == nil {
|
||||||
|
http.Error(w, "Security provider not configured", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this route has optional authentication
|
||||||
|
optional, _ := r.Context().Value(OptionalAuthKey).(bool)
|
||||||
|
|
||||||
|
// Try to authenticate
|
||||||
|
userCtx, err := provider.Authenticate(r)
|
||||||
|
if err != nil {
|
||||||
|
if optional {
|
||||||
|
guestCtx := createGuestContext(r)
|
||||||
|
next.ServeHTTP(w, setUserContext(r, guestCtx))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.Error(w, "Authentication failed: "+err.Error(), http.StatusUnauthorized)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
next.ServeHTTP(w, setUserContext(r, userCtx))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SetSecurityMiddleware adds security context to requests
|
// SetSecurityMiddleware adds security context to requests
|
||||||
// This middleware should be applied after AuthMiddleware
|
// This middleware should be applied after AuthMiddleware
|
||||||
func SetSecurityMiddleware(securityList *SecurityList) func(http.Handler) http.Handler {
|
func SetSecurityMiddleware(securityList *SecurityList) func(http.Handler) http.Handler {
|
||||||
@@ -366,6 +456,131 @@ func GetUserMeta(ctx context.Context) (map[string]any, bool) {
|
|||||||
return meta, ok
|
return meta, ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SessionCookieOptions configures the session cookie set by SetSessionCookie.
|
||||||
|
// All fields are optional; sensible secure defaults are applied when omitted.
|
||||||
|
type SessionCookieOptions struct {
|
||||||
|
// Name is the cookie name. Defaults to "session_token".
|
||||||
|
Name string
|
||||||
|
// Path is the cookie path. Defaults to "/".
|
||||||
|
Path string
|
||||||
|
// Domain restricts the cookie to a specific domain. Empty means current host.
|
||||||
|
Domain string
|
||||||
|
// Secure sets the Secure flag. Defaults to true.
|
||||||
|
// Set to false only in local development over HTTP.
|
||||||
|
Secure *bool
|
||||||
|
// SameSite sets the SameSite policy. Defaults to http.SameSiteLaxMode.
|
||||||
|
SameSite http.SameSite
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o SessionCookieOptions) name() string {
|
||||||
|
if o.Name != "" {
|
||||||
|
return o.Name
|
||||||
|
}
|
||||||
|
return "session_token"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o SessionCookieOptions) path() string {
|
||||||
|
if o.Path != "" {
|
||||||
|
return o.Path
|
||||||
|
}
|
||||||
|
return "/"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o SessionCookieOptions) secure() bool {
|
||||||
|
if o.Secure != nil {
|
||||||
|
return *o.Secure
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o SessionCookieOptions) sameSite() http.SameSite {
|
||||||
|
if o.SameSite != 0 {
|
||||||
|
return o.SameSite
|
||||||
|
}
|
||||||
|
return http.SameSiteLaxMode
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetSessionCookie writes the session_token cookie to the response after a successful login.
|
||||||
|
// Call this immediately after a successful Authenticator.Login() call.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// resp, err := auth.Login(r.Context(), req)
|
||||||
|
// if err != nil { ... }
|
||||||
|
// security.SetSessionCookie(w, resp)
|
||||||
|
// json.NewEncoder(w).Encode(resp)
|
||||||
|
func SetSessionCookie(w http.ResponseWriter, loginResp *LoginResponse, opts ...SessionCookieOptions) {
|
||||||
|
var o SessionCookieOptions
|
||||||
|
if len(opts) > 0 {
|
||||||
|
o = opts[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
maxAge := 0
|
||||||
|
if loginResp.ExpiresIn > 0 {
|
||||||
|
maxAge = int(loginResp.ExpiresIn)
|
||||||
|
}
|
||||||
|
|
||||||
|
http.SetCookie(w, &http.Cookie{
|
||||||
|
Name: o.name(),
|
||||||
|
Value: loginResp.Token,
|
||||||
|
Path: o.path(),
|
||||||
|
Domain: o.Domain,
|
||||||
|
MaxAge: maxAge,
|
||||||
|
HttpOnly: true,
|
||||||
|
Secure: o.secure(),
|
||||||
|
SameSite: o.sameSite(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetSessionCookie returns the session token value from the request cookie, or empty string if not present.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// token := security.GetSessionCookie(r)
|
||||||
|
func GetSessionCookie(r *http.Request, opts ...SessionCookieOptions) string {
|
||||||
|
var o SessionCookieOptions
|
||||||
|
if len(opts) > 0 {
|
||||||
|
o = opts[0]
|
||||||
|
}
|
||||||
|
cookie, err := r.Cookie(o.name())
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return cookie.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearSessionCookie expires the session_token cookie, effectively logging the user out on the browser side.
|
||||||
|
// Call this after a successful Authenticator.Logout() call.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// err := auth.Logout(r.Context(), req)
|
||||||
|
// if err != nil { ... }
|
||||||
|
// security.ClearSessionCookie(w)
|
||||||
|
func ClearSessionCookie(w http.ResponseWriter, opts ...SessionCookieOptions) {
|
||||||
|
var o SessionCookieOptions
|
||||||
|
if len(opts) > 0 {
|
||||||
|
o = opts[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
http.SetCookie(w, &http.Cookie{
|
||||||
|
Name: o.name(),
|
||||||
|
Value: "",
|
||||||
|
Path: o.path(),
|
||||||
|
Domain: o.Domain,
|
||||||
|
MaxAge: -1,
|
||||||
|
HttpOnly: true,
|
||||||
|
Secure: o.secure(),
|
||||||
|
SameSite: o.sameSite(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetModelRulesFromContext extracts ModelRules stored by NewModelAuthMiddleware
|
||||||
|
func GetModelRulesFromContext(ctx context.Context) (modelregistry.ModelRules, bool) {
|
||||||
|
rules, ok := ctx.Value(ModelRulesKey).(modelregistry.ModelRules)
|
||||||
|
return rules, ok
|
||||||
|
}
|
||||||
|
|
||||||
// // Handler adapters for resolvespec/restheadspec compatibility
|
// // Handler adapters for resolvespec/restheadspec compatibility
|
||||||
// // These functions allow using NewAuthHandler and NewOptionalAuthHandler with custom handler abstractions
|
// // These functions allow using NewAuthHandler and NewOptionalAuthHandler with custom handler abstractions
|
||||||
|
|
||||||
|
|||||||
@@ -222,9 +222,8 @@ func (a *DatabaseAuthenticator) Authenticate(r *http.Request) (*UserContext, err
|
|||||||
|
|
||||||
if sessionToken == "" {
|
if sessionToken == "" {
|
||||||
// Try cookie
|
// Try cookie
|
||||||
cookie, err := r.Cookie("session_token")
|
if token := GetSessionCookie(r); token != "" {
|
||||||
if err == nil {
|
tokens = []string{token}
|
||||||
tokens = []string{cookie.Value}
|
|
||||||
reference = "cookie"
|
reference = "cookie"
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -98,6 +98,7 @@ func (p *EmbedFSProvider) Open(name string) (fs.File, error) {
|
|||||||
|
|
||||||
// Apply prefix stripping by prepending the prefix to the requested path
|
// Apply prefix stripping by prepending the prefix to the requested path
|
||||||
actualPath := name
|
actualPath := name
|
||||||
|
alternatePath := ""
|
||||||
if p.stripPrefix != "" {
|
if p.stripPrefix != "" {
|
||||||
// Clean the paths to handle leading/trailing slashes
|
// Clean the paths to handle leading/trailing slashes
|
||||||
prefix := strings.Trim(p.stripPrefix, "/")
|
prefix := strings.Trim(p.stripPrefix, "/")
|
||||||
@@ -105,12 +106,25 @@ func (p *EmbedFSProvider) Open(name string) (fs.File, error) {
|
|||||||
|
|
||||||
if prefix != "" {
|
if prefix != "" {
|
||||||
actualPath = path.Join(prefix, cleanName)
|
actualPath = path.Join(prefix, cleanName)
|
||||||
|
alternatePath = cleanName
|
||||||
} else {
|
} else {
|
||||||
actualPath = cleanName
|
actualPath = cleanName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// First try the actual path with prefix
|
||||||
|
if file, err := p.fs.Open(actualPath); err == nil {
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
|
||||||
return p.fs.Open(actualPath)
|
// If alternate path is different, try it as well
|
||||||
|
if alternatePath != "" && alternatePath != actualPath {
|
||||||
|
if file, err := p.fs.Open(alternatePath); err == nil {
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If both attempts fail, return the error from the first attempt
|
||||||
|
return nil, fmt.Errorf("file not found: %s", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Close releases any resources held by the provider.
|
// Close releases any resources held by the provider.
|
||||||
|
|||||||
@@ -53,6 +53,7 @@ func (p *LocalFSProvider) Open(name string) (fs.File, error) {
|
|||||||
|
|
||||||
// Apply prefix stripping by prepending the prefix to the requested path
|
// Apply prefix stripping by prepending the prefix to the requested path
|
||||||
actualPath := name
|
actualPath := name
|
||||||
|
alternatePath := ""
|
||||||
if p.stripPrefix != "" {
|
if p.stripPrefix != "" {
|
||||||
// Clean the paths to handle leading/trailing slashes
|
// Clean the paths to handle leading/trailing slashes
|
||||||
prefix := strings.Trim(p.stripPrefix, "/")
|
prefix := strings.Trim(p.stripPrefix, "/")
|
||||||
@@ -60,12 +61,26 @@ func (p *LocalFSProvider) Open(name string) (fs.File, error) {
|
|||||||
|
|
||||||
if prefix != "" {
|
if prefix != "" {
|
||||||
actualPath = path.Join(prefix, cleanName)
|
actualPath = path.Join(prefix, cleanName)
|
||||||
|
alternatePath = cleanName
|
||||||
} else {
|
} else {
|
||||||
actualPath = cleanName
|
actualPath = cleanName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.fs.Open(actualPath)
|
// First try the actual path with prefix
|
||||||
|
if file, err := p.fs.Open(actualPath); err == nil {
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If alternate path is different, try it as well
|
||||||
|
if alternatePath != "" && alternatePath != actualPath {
|
||||||
|
if file, err := p.fs.Open(alternatePath); err == nil {
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If both attempts fail, return the error from the first attempt
|
||||||
|
return nil, fmt.Errorf("file not found: %s", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Close releases any resources held by the provider.
|
// Close releases any resources held by the provider.
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ func (p *ZipFSProvider) Open(name string) (fs.File, error) {
|
|||||||
|
|
||||||
// Apply prefix stripping by prepending the prefix to the requested path
|
// Apply prefix stripping by prepending the prefix to the requested path
|
||||||
actualPath := name
|
actualPath := name
|
||||||
|
alternatePath := ""
|
||||||
if p.stripPrefix != "" {
|
if p.stripPrefix != "" {
|
||||||
// Clean the paths to handle leading/trailing slashes
|
// Clean the paths to handle leading/trailing slashes
|
||||||
prefix := strings.Trim(p.stripPrefix, "/")
|
prefix := strings.Trim(p.stripPrefix, "/")
|
||||||
@@ -63,12 +64,26 @@ func (p *ZipFSProvider) Open(name string) (fs.File, error) {
|
|||||||
|
|
||||||
if prefix != "" {
|
if prefix != "" {
|
||||||
actualPath = path.Join(prefix, cleanName)
|
actualPath = path.Join(prefix, cleanName)
|
||||||
|
alternatePath = cleanName
|
||||||
} else {
|
} else {
|
||||||
actualPath = cleanName
|
actualPath = cleanName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.zipFS.Open(actualPath)
|
// First try the actual path with prefix
|
||||||
|
if file, err := p.zipFS.Open(actualPath); err == nil {
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If alternate path is different, try it as well
|
||||||
|
if alternatePath != "" && alternatePath != actualPath {
|
||||||
|
if file, err := p.zipFS.Open(alternatePath); err == nil {
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If both attempts fail, return the error from the first attempt
|
||||||
|
return nil, fmt.Errorf("file not found: %s", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Close releases resources held by the zip reader.
|
// Close releases resources held by the zip reader.
|
||||||
|
|||||||
@@ -330,6 +330,7 @@ Hooks allow you to intercept and modify operations at various points in the life
|
|||||||
|
|
||||||
### Available Hook Types
|
### Available Hook Types
|
||||||
|
|
||||||
|
- **BeforeHandle** — fires after model resolution, before operation dispatch (auth checks)
|
||||||
- **BeforeRead** / **AfterRead**
|
- **BeforeRead** / **AfterRead**
|
||||||
- **BeforeCreate** / **AfterCreate**
|
- **BeforeCreate** / **AfterCreate**
|
||||||
- **BeforeUpdate** / **AfterUpdate**
|
- **BeforeUpdate** / **AfterUpdate**
|
||||||
@@ -337,6 +338,8 @@ Hooks allow you to intercept and modify operations at various points in the life
|
|||||||
- **BeforeSubscribe** / **AfterSubscribe**
|
- **BeforeSubscribe** / **AfterSubscribe**
|
||||||
- **BeforeConnect** / **AfterConnect**
|
- **BeforeConnect** / **AfterConnect**
|
||||||
|
|
||||||
|
`HookContext` includes `Operation string` (`"read"`, `"create"`, `"update"`, `"delete"`) and `Abort bool`, `AbortMessage string`, `AbortCode int` for abort signaling.
|
||||||
|
|
||||||
### Hook Example
|
### Hook Example
|
||||||
|
|
||||||
```go
|
```go
|
||||||
@@ -599,7 +602,19 @@ asyncio.run(main())
|
|||||||
|
|
||||||
## Authentication
|
## Authentication
|
||||||
|
|
||||||
Implement authentication using hooks:
|
Use `RegisterSecurityHooks` for integrated auth with model-rule support:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
|
|
||||||
|
provider := security.NewCompositeSecurityProvider(auth, colSec, rowSec)
|
||||||
|
securityList := security.NewSecurityList(provider)
|
||||||
|
websocketspec.RegisterSecurityHooks(handler, securityList)
|
||||||
|
// Registers BeforeHandle (model auth), BeforeRead (load rules),
|
||||||
|
// AfterRead (column security + audit), BeforeUpdate, BeforeDelete
|
||||||
|
```
|
||||||
|
|
||||||
|
Or implement custom authentication using hooks directly:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
handler := websocketspec.NewHandlerWithGORM(db)
|
handler := websocketspec.NewHandlerWithGORM(db)
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
@@ -176,6 +177,16 @@ func (h *Handler) handleRequest(conn *Connection, msg *Message) {
|
|||||||
Metadata: make(map[string]interface{}),
|
Metadata: make(map[string]interface{}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Execute BeforeHandle hook - auth check fires here, after model resolution
|
||||||
|
hookCtx.Operation = string(msg.Operation)
|
||||||
|
if err := h.hooks.Execute(BeforeHandle, hookCtx); err != nil {
|
||||||
|
if hookCtx.Abort {
|
||||||
|
errResp := NewErrorResponse(msg.ID, "unauthorized", hookCtx.AbortMessage)
|
||||||
|
_ = conn.SendJSON(errResp)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Route to operation handler
|
// Route to operation handler
|
||||||
switch msg.Operation {
|
switch msg.Operation {
|
||||||
case OperationRead:
|
case OperationRead:
|
||||||
@@ -209,10 +220,14 @@ func (h *Handler) handleRead(conn *Connection, msg *Message, hookCtx *HookContex
|
|||||||
var metadata map[string]interface{}
|
var metadata map[string]interface{}
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if hookCtx.ID != "" {
|
// Check if FetchRowNumber is specified (treat as single record read)
|
||||||
// Read single record by ID
|
isFetchRowNumber := hookCtx.Options != nil && hookCtx.Options.FetchRowNumber != nil && *hookCtx.Options.FetchRowNumber != ""
|
||||||
|
|
||||||
|
if hookCtx.ID != "" || isFetchRowNumber {
|
||||||
|
// Read single record by ID or FetchRowNumber
|
||||||
data, err = h.readByID(hookCtx)
|
data, err = h.readByID(hookCtx)
|
||||||
metadata = map[string]interface{}{"total": 1}
|
metadata = map[string]interface{}{"total": 1}
|
||||||
|
// The row number is already set on the record itself via setRowNumbersOnRecords
|
||||||
} else {
|
} else {
|
||||||
// Read multiple records
|
// Read multiple records
|
||||||
data, metadata, err = h.readMultiple(hookCtx)
|
data, metadata, err = h.readMultiple(hookCtx)
|
||||||
@@ -509,10 +524,29 @@ func (h *Handler) notifySubscribers(schema, entity string, operation OperationTy
|
|||||||
// CRUD operation implementations
|
// CRUD operation implementations
|
||||||
|
|
||||||
func (h *Handler) readByID(hookCtx *HookContext) (interface{}, error) {
|
func (h *Handler) readByID(hookCtx *HookContext) (interface{}, error) {
|
||||||
|
// Handle FetchRowNumber before building query
|
||||||
|
var fetchedRowNumber *int64
|
||||||
|
pkName := reflection.GetPrimaryKeyName(hookCtx.Model)
|
||||||
|
|
||||||
|
if hookCtx.Options != nil && hookCtx.Options.FetchRowNumber != nil && *hookCtx.Options.FetchRowNumber != "" {
|
||||||
|
fetchRowNumberPKValue := *hookCtx.Options.FetchRowNumber
|
||||||
|
logger.Debug("[WebSocketSpec] FetchRowNumber: Fetching row number for PK %s = %s", pkName, fetchRowNumberPKValue)
|
||||||
|
|
||||||
|
rowNum, err := h.FetchRowNumber(hookCtx.Context, hookCtx.TableName, pkName, fetchRowNumberPKValue, hookCtx.Options, hookCtx.Model)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to fetch row number: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchedRowNumber = &rowNum
|
||||||
|
logger.Debug("[WebSocketSpec] FetchRowNumber: Row number %d for PK %s = %s", rowNum, pkName, fetchRowNumberPKValue)
|
||||||
|
|
||||||
|
// Override ID with FetchRowNumber value
|
||||||
|
hookCtx.ID = fetchRowNumberPKValue
|
||||||
|
}
|
||||||
|
|
||||||
query := h.db.NewSelect().Model(hookCtx.ModelPtr).Table(hookCtx.TableName)
|
query := h.db.NewSelect().Model(hookCtx.ModelPtr).Table(hookCtx.TableName)
|
||||||
|
|
||||||
// Add ID filter
|
// Add ID filter
|
||||||
pkName := reflection.GetPrimaryKeyName(hookCtx.Model)
|
|
||||||
query = query.Where(fmt.Sprintf("%s = ?", pkName), hookCtx.ID)
|
query = query.Where(fmt.Sprintf("%s = ?", pkName), hookCtx.ID)
|
||||||
|
|
||||||
// Apply columns
|
// Apply columns
|
||||||
@@ -532,6 +566,12 @@ func (h *Handler) readByID(hookCtx *HookContext) (interface{}, error) {
|
|||||||
return nil, fmt.Errorf("failed to read record: %w", err)
|
return nil, fmt.Errorf("failed to read record: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set the fetched row number on the record if FetchRowNumber was used
|
||||||
|
if fetchedRowNumber != nil {
|
||||||
|
logger.Debug("[WebSocketSpec] FetchRowNumber: Setting row number %d on record", *fetchedRowNumber)
|
||||||
|
h.setRowNumbersOnRecords(hookCtx.ModelPtr, int(*fetchedRowNumber-1)) // -1 because setRowNumbersOnRecords adds 1
|
||||||
|
}
|
||||||
|
|
||||||
return hookCtx.ModelPtr, nil
|
return hookCtx.ModelPtr, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -540,10 +580,8 @@ func (h *Handler) readMultiple(hookCtx *HookContext) (data interface{}, metadata
|
|||||||
|
|
||||||
// Apply options (simplified implementation)
|
// Apply options (simplified implementation)
|
||||||
if hookCtx.Options != nil {
|
if hookCtx.Options != nil {
|
||||||
// Apply filters
|
// Apply filters with OR grouping support
|
||||||
for _, filter := range hookCtx.Options.Filters {
|
query = h.applyFilters(query, hookCtx.Options.Filters)
|
||||||
query = query.Where(fmt.Sprintf("%s %s ?", filter.Column, h.getOperatorSQL(filter.Operator)), filter.Value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply sorting
|
// Apply sorting
|
||||||
for _, sort := range hookCtx.Options.Sort {
|
for _, sort := range hookCtx.Options.Sort {
|
||||||
@@ -578,12 +616,22 @@ func (h *Handler) readMultiple(hookCtx *HookContext) (data interface{}, metadata
|
|||||||
return nil, nil, fmt.Errorf("failed to read records: %w", err)
|
return nil, nil, fmt.Errorf("failed to read records: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set row numbers on records if RowNumber field exists
|
||||||
|
offset := 0
|
||||||
|
if hookCtx.Options != nil && hookCtx.Options.Offset != nil {
|
||||||
|
offset = *hookCtx.Options.Offset
|
||||||
|
}
|
||||||
|
h.setRowNumbersOnRecords(hookCtx.ModelPtr, offset)
|
||||||
|
|
||||||
// Get count
|
// Get count
|
||||||
metadata = make(map[string]interface{})
|
metadata = make(map[string]interface{})
|
||||||
countQuery := h.db.NewSelect().Model(hookCtx.ModelPtr).Table(hookCtx.TableName)
|
countQuery := h.db.NewSelect().Model(hookCtx.ModelPtr).Table(hookCtx.TableName)
|
||||||
if hookCtx.Options != nil {
|
if hookCtx.Options != nil {
|
||||||
for _, filter := range hookCtx.Options.Filters {
|
for _, filter := range hookCtx.Options.Filters {
|
||||||
countQuery = countQuery.Where(fmt.Sprintf("%s %s ?", filter.Column, h.getOperatorSQL(filter.Operator)), filter.Value)
|
cond, args := h.buildFilterCondition(filter)
|
||||||
|
if cond != "" {
|
||||||
|
countQuery = countQuery.Where(cond, args...)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
count, _ := countQuery.Count(hookCtx.Context)
|
count, _ := countQuery.Count(hookCtx.Context)
|
||||||
@@ -656,12 +704,15 @@ func (h *Handler) delete(hookCtx *HookContext) error {
|
|||||||
// Helper methods
|
// Helper methods
|
||||||
|
|
||||||
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
func (h *Handler) getTableName(schema, entity string, model interface{}) string {
|
||||||
// Use entity as table name
|
|
||||||
tableName := entity
|
tableName := entity
|
||||||
|
|
||||||
if schema != "" {
|
if schema != "" {
|
||||||
|
if h.db.DriverName() == "sqlite" {
|
||||||
|
tableName = schema + "_" + tableName
|
||||||
|
} else {
|
||||||
tableName = schema + "." + tableName
|
tableName = schema + "." + tableName
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return tableName
|
return tableName
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -680,6 +731,131 @@ func (h *Handler) getMetadata(schema, entity string, model interface{}) map[stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
// getOperatorSQL converts filter operator to SQL operator
|
// getOperatorSQL converts filter operator to SQL operator
|
||||||
|
// applyFilters applies all filters with proper grouping for OR logic
|
||||||
|
// Groups consecutive OR filters together to ensure proper query precedence
|
||||||
|
func (h *Handler) applyFilters(query common.SelectQuery, filters []common.FilterOption) common.SelectQuery {
|
||||||
|
if len(filters) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for i < len(filters) {
|
||||||
|
// Check if this starts an OR group (next filter has OR logic)
|
||||||
|
startORGroup := i+1 < len(filters) && strings.EqualFold(filters[i+1].LogicOperator, "OR")
|
||||||
|
|
||||||
|
if startORGroup {
|
||||||
|
// Collect all consecutive filters that are OR'd together
|
||||||
|
orGroup := []common.FilterOption{filters[i]}
|
||||||
|
j := i + 1
|
||||||
|
for j < len(filters) && strings.EqualFold(filters[j].LogicOperator, "OR") {
|
||||||
|
orGroup = append(orGroup, filters[j])
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply the OR group as a single grouped WHERE clause
|
||||||
|
query = h.applyFilterGroup(query, orGroup)
|
||||||
|
i = j
|
||||||
|
} else {
|
||||||
|
// Single filter with AND logic (or first filter)
|
||||||
|
condition, args := h.buildFilterCondition(filters[i])
|
||||||
|
if condition != "" {
|
||||||
|
query = query.Where(condition, args...)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyFilterGroup applies a group of filters that should be OR'd together
|
||||||
|
// Always wraps them in parentheses and applies as a single WHERE clause
|
||||||
|
func (h *Handler) applyFilterGroup(query common.SelectQuery, filters []common.FilterOption) common.SelectQuery {
|
||||||
|
if len(filters) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build all conditions and collect args
|
||||||
|
var conditions []string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
|
for _, filter := range filters {
|
||||||
|
condition, filterArgs := h.buildFilterCondition(filter)
|
||||||
|
if condition != "" {
|
||||||
|
conditions = append(conditions, condition)
|
||||||
|
args = append(args, filterArgs...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(conditions) == 0 {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// Single filter - no need for grouping
|
||||||
|
if len(conditions) == 1 {
|
||||||
|
return query.Where(conditions[0], args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Multiple conditions - group with parentheses and OR
|
||||||
|
groupedCondition := "(" + strings.Join(conditions, " OR ") + ")"
|
||||||
|
return query.Where(groupedCondition, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildFilterCondition builds a filter condition and returns it with args
|
||||||
|
func (h *Handler) buildFilterCondition(filter common.FilterOption) (conditionString string, conditionArgs []interface{}) {
|
||||||
|
if strings.EqualFold(filter.Operator, "in") {
|
||||||
|
cond, args := common.BuildInCondition(filter.Column, filter.Value)
|
||||||
|
return cond, args
|
||||||
|
}
|
||||||
|
operatorSQL := h.getOperatorSQL(filter.Operator)
|
||||||
|
return fmt.Sprintf("%s %s ?", filter.Column, operatorSQL), []interface{}{filter.Value}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setRowNumbersOnRecords sets the RowNumber field on each record if it exists
|
||||||
|
// The row number is calculated as offset + index + 1 (1-based)
|
||||||
|
func (h *Handler) setRowNumbersOnRecords(records interface{}, offset int) {
|
||||||
|
// Get the reflect value of the records
|
||||||
|
recordsValue := reflect.ValueOf(records)
|
||||||
|
if recordsValue.Kind() == reflect.Ptr {
|
||||||
|
recordsValue = recordsValue.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure it's a slice
|
||||||
|
if recordsValue.Kind() != reflect.Slice {
|
||||||
|
logger.Debug("[WebSocketSpec] setRowNumbersOnRecords: records is not a slice, skipping")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iterate through each record
|
||||||
|
for i := 0; i < recordsValue.Len(); i++ {
|
||||||
|
record := recordsValue.Index(i)
|
||||||
|
|
||||||
|
// Dereference if it's a pointer
|
||||||
|
if record.Kind() == reflect.Ptr {
|
||||||
|
if record.IsNil() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
record = record.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure it's a struct
|
||||||
|
if record.Kind() != reflect.Struct {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to find and set the RowNumber field
|
||||||
|
rowNumberField := record.FieldByName("RowNumber")
|
||||||
|
if rowNumberField.IsValid() && rowNumberField.CanSet() {
|
||||||
|
// Check if the field is of type int64
|
||||||
|
if rowNumberField.Kind() == reflect.Int64 {
|
||||||
|
rowNum := int64(offset + i + 1)
|
||||||
|
rowNumberField.SetInt(rowNum)
|
||||||
|
logger.Debug("[WebSocketSpec] Set RowNumber=%d for record index %d", rowNum, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (h *Handler) getOperatorSQL(operator string) string {
|
func (h *Handler) getOperatorSQL(operator string) string {
|
||||||
switch operator {
|
switch operator {
|
||||||
case "eq":
|
case "eq":
|
||||||
@@ -705,6 +881,92 @@ func (h *Handler) getOperatorSQL(operator string) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FetchRowNumber calculates the row number of a specific record based on sorting and filtering
|
||||||
|
// Returns the 1-based row number of the record with the given primary key value
|
||||||
|
func (h *Handler) FetchRowNumber(ctx context.Context, tableName string, pkName string, pkValue string, options *common.RequestOptions, model interface{}) (int64, error) {
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
logger.Error("[WebSocketSpec] Panic during FetchRowNumber: %v", r)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Build the sort order SQL
|
||||||
|
sortSQL := ""
|
||||||
|
if options != nil && len(options.Sort) > 0 {
|
||||||
|
sortParts := make([]string, 0, len(options.Sort))
|
||||||
|
for _, sort := range options.Sort {
|
||||||
|
if sort.Column == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
direction := "ASC"
|
||||||
|
if strings.EqualFold(sort.Direction, "desc") {
|
||||||
|
direction = "DESC"
|
||||||
|
}
|
||||||
|
sortParts = append(sortParts, fmt.Sprintf("%s %s", sort.Column, direction))
|
||||||
|
}
|
||||||
|
sortSQL = strings.Join(sortParts, ", ")
|
||||||
|
} else {
|
||||||
|
// Default sort by primary key
|
||||||
|
sortSQL = fmt.Sprintf("%s ASC", pkName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build WHERE clause from filters
|
||||||
|
whereSQL := ""
|
||||||
|
var whereArgs []interface{}
|
||||||
|
if options != nil && len(options.Filters) > 0 {
|
||||||
|
var conditions []string
|
||||||
|
for _, filter := range options.Filters {
|
||||||
|
operatorSQL := h.getOperatorSQL(filter.Operator)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("%s.%s %s ?", tableName, filter.Column, operatorSQL))
|
||||||
|
whereArgs = append(whereArgs, filter.Value)
|
||||||
|
}
|
||||||
|
if len(conditions) > 0 {
|
||||||
|
whereSQL = "WHERE " + strings.Join(conditions, " AND ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the final query with parameterized PK value
|
||||||
|
queryStr := fmt.Sprintf(`
|
||||||
|
SELECT search.rn
|
||||||
|
FROM (
|
||||||
|
SELECT %[1]s.%[2]s,
|
||||||
|
ROW_NUMBER() OVER(ORDER BY %[3]s) AS rn
|
||||||
|
FROM %[1]s
|
||||||
|
%[4]s
|
||||||
|
) search
|
||||||
|
WHERE search.%[2]s = ?
|
||||||
|
`,
|
||||||
|
tableName, // [1] - table name
|
||||||
|
pkName, // [2] - primary key column name
|
||||||
|
sortSQL, // [3] - sort order SQL
|
||||||
|
whereSQL, // [4] - WHERE clause
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.Debug("[WebSocketSpec] FetchRowNumber query: %s, pkValue: %s", queryStr, pkValue)
|
||||||
|
|
||||||
|
// Append PK value to whereArgs
|
||||||
|
whereArgs = append(whereArgs, pkValue)
|
||||||
|
|
||||||
|
// Execute the raw query with parameterized PK value
|
||||||
|
var result []struct {
|
||||||
|
RN int64 `bun:"rn"`
|
||||||
|
}
|
||||||
|
err := h.db.Query(ctx, &result, queryStr, whereArgs...)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("failed to fetch row number: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(result) == 0 {
|
||||||
|
whereInfo := "none"
|
||||||
|
if whereSQL != "" {
|
||||||
|
whereInfo = whereSQL
|
||||||
|
}
|
||||||
|
return 0, fmt.Errorf("no row found for primary key %s=%s with active filters: %s", pkName, pkValue, whereInfo)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result[0].RN, nil
|
||||||
|
}
|
||||||
|
|
||||||
// Shutdown gracefully shuts down the handler
|
// Shutdown gracefully shuts down the handler
|
||||||
func (h *Handler) Shutdown() {
|
func (h *Handler) Shutdown() {
|
||||||
h.connManager.Shutdown()
|
h.connManager.Shutdown()
|
||||||
|
|||||||
@@ -82,6 +82,10 @@ func (m *MockDatabase) GetUnderlyingDB() interface{} {
|
|||||||
return args.Get(0)
|
return args.Get(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *MockDatabase) DriverName() string {
|
||||||
|
return "postgres"
|
||||||
|
}
|
||||||
|
|
||||||
// MockSelectQuery is a mock implementation of common.SelectQuery
|
// MockSelectQuery is a mock implementation of common.SelectQuery
|
||||||
type MockSelectQuery struct {
|
type MockSelectQuery struct {
|
||||||
mock.Mock
|
mock.Mock
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package websocketspec
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
"github.com/bitechdev/ResolveSpec/pkg/common"
|
"github.com/bitechdev/ResolveSpec/pkg/common"
|
||||||
)
|
)
|
||||||
@@ -10,6 +11,10 @@ import (
|
|||||||
type HookType string
|
type HookType string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// BeforeHandle fires after model resolution, before operation dispatch.
|
||||||
|
// Use this for auth checks that need model rules and user context simultaneously.
|
||||||
|
BeforeHandle HookType = "before_handle"
|
||||||
|
|
||||||
// BeforeRead is called before a read operation
|
// BeforeRead is called before a read operation
|
||||||
BeforeRead HookType = "before_read"
|
BeforeRead HookType = "before_read"
|
||||||
// AfterRead is called after a read operation
|
// AfterRead is called after a read operation
|
||||||
@@ -83,6 +88,9 @@ type HookContext struct {
|
|||||||
// Options contains the parsed request options
|
// Options contains the parsed request options
|
||||||
Options *common.RequestOptions
|
Options *common.RequestOptions
|
||||||
|
|
||||||
|
// Operation being dispatched (e.g. "read", "create", "update", "delete")
|
||||||
|
Operation string
|
||||||
|
|
||||||
// ID is the record ID for single-record operations
|
// ID is the record ID for single-record operations
|
||||||
ID string
|
ID string
|
||||||
|
|
||||||
@@ -98,6 +106,11 @@ type HookContext struct {
|
|||||||
// Error is any error that occurred (for after hooks)
|
// Error is any error that occurred (for after hooks)
|
||||||
Error error
|
Error error
|
||||||
|
|
||||||
|
// Allow hooks to abort the operation
|
||||||
|
Abort bool // If set to true, the operation will be aborted
|
||||||
|
AbortMessage string // Message to return if aborted
|
||||||
|
AbortCode int // HTTP status code if aborted
|
||||||
|
|
||||||
// Metadata is additional context data
|
// Metadata is additional context data
|
||||||
Metadata map[string]interface{}
|
Metadata map[string]interface{}
|
||||||
}
|
}
|
||||||
@@ -171,6 +184,11 @@ func (hr *HookRegistry) Execute(hookType HookType, ctx *HookContext) error {
|
|||||||
if err := hook(ctx); err != nil {
|
if err := hook(ctx); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if hook requested abort
|
||||||
|
if ctx.Abort {
|
||||||
|
return fmt.Errorf("operation aborted by hook: %s", ctx.AbortMessage)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
108
pkg/websocketspec/security_hooks.go
Normal file
108
pkg/websocketspec/security_hooks.go
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
package websocketspec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/logger"
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/security"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RegisterSecurityHooks registers all security-related hooks with the handler
|
||||||
|
func RegisterSecurityHooks(handler *Handler, securityList *security.SecurityList) {
|
||||||
|
// Hook 0: BeforeHandle - enforce auth after model resolution
|
||||||
|
handler.Hooks().Register(BeforeHandle, func(hookCtx *HookContext) error {
|
||||||
|
if err := security.CheckModelAuthAllowed(newSecurityContext(hookCtx), hookCtx.Operation); err != nil {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortMessage = err.Error()
|
||||||
|
hookCtx.AbortCode = http.StatusUnauthorized
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 1: BeforeRead - Load security rules
|
||||||
|
handler.Hooks().Register(BeforeRead, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.LoadSecurityRules(secCtx, securityList)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 2: AfterRead - Apply column-level security (masking)
|
||||||
|
handler.Hooks().Register(AfterRead, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.ApplyColumnSecurity(secCtx, securityList)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 3 (Optional): Audit logging
|
||||||
|
handler.Hooks().Register(AfterRead, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.LogDataAccess(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 4: BeforeUpdate - enforce CanUpdate rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeUpdate, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelUpdateAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Hook 5: BeforeDelete - enforce CanDelete rule from context/registry
|
||||||
|
handler.Hooks().Register(BeforeDelete, func(hookCtx *HookContext) error {
|
||||||
|
secCtx := newSecurityContext(hookCtx)
|
||||||
|
return security.CheckModelDeleteAllowed(secCtx)
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.Info("Security hooks registered for websocketspec handler")
|
||||||
|
}
|
||||||
|
|
||||||
|
// securityContext adapts websocketspec.HookContext to security.SecurityContext interface
|
||||||
|
type securityContext struct {
|
||||||
|
ctx *HookContext
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSecurityContext(ctx *HookContext) security.SecurityContext {
|
||||||
|
return &securityContext{ctx: ctx}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetContext() context.Context {
|
||||||
|
return s.ctx.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetUserID() (int, bool) {
|
||||||
|
return security.GetUserID(s.ctx.Context)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetSchema() string {
|
||||||
|
return s.ctx.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetEntity() string {
|
||||||
|
return s.ctx.Entity
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetModel() interface{} {
|
||||||
|
return s.ctx.Model
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetQuery retrieves a stored query from hook metadata (websocketspec has no Query field)
|
||||||
|
func (s *securityContext) GetQuery() interface{} {
|
||||||
|
if s.ctx.Metadata == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return s.ctx.Metadata["query"]
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetQuery stores the query in hook metadata
|
||||||
|
func (s *securityContext) SetQuery(query interface{}) {
|
||||||
|
if s.ctx.Metadata == nil {
|
||||||
|
s.ctx.Metadata = make(map[string]interface{})
|
||||||
|
}
|
||||||
|
s.ctx.Metadata["query"] = query
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) GetResult() interface{} {
|
||||||
|
return s.ctx.Result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *securityContext) SetResult(result interface{}) {
|
||||||
|
s.ctx.Result = result
|
||||||
|
}
|
||||||
8
resolvespec-js/.changeset/README.md
Normal file
8
resolvespec-js/.changeset/README.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Changesets
|
||||||
|
|
||||||
|
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
|
||||||
|
with multi-package repos, or single-package repos to help you version and publish your code. You can
|
||||||
|
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
|
||||||
|
|
||||||
|
We have a quick list of common questions to get you started engaging with this project in
|
||||||
|
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
||||||
11
resolvespec-js/.changeset/config.json
Normal file
11
resolvespec-js/.changeset/config.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://unpkg.com/@changesets/config@3.1.2/schema.json",
|
||||||
|
"changelog": "@changesets/cli/changelog",
|
||||||
|
"commit": false,
|
||||||
|
"fixed": [],
|
||||||
|
"linked": [],
|
||||||
|
"access": "restricted",
|
||||||
|
"baseBranch": "main",
|
||||||
|
"updateInternalDependencies": "patch",
|
||||||
|
"ignore": []
|
||||||
|
}
|
||||||
7
resolvespec-js/CHANGELOG.md
Normal file
7
resolvespec-js/CHANGELOG.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# @warkypublic/resolvespec-js
|
||||||
|
|
||||||
|
## 1.0.1
|
||||||
|
|
||||||
|
### Patch Changes
|
||||||
|
|
||||||
|
- Fixed headerpsec
|
||||||
132
resolvespec-js/PLAN.md
Normal file
132
resolvespec-js/PLAN.md
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# ResolveSpec JS - Implementation Plan
|
||||||
|
|
||||||
|
TypeScript client library for ResolveSpec, RestHeaderSpec, WebSocket and MQTT APIs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Status
|
||||||
|
|
||||||
|
| Phase | Description | Status |
|
||||||
|
|-------|-------------|--------|
|
||||||
|
| 0 | Restructure into folders | Done |
|
||||||
|
| 1 | Fix types (align with Go) | Done |
|
||||||
|
| 2 | Fix REST client | Done |
|
||||||
|
| 3 | Build config | Done |
|
||||||
|
| 4 | Tests | Done |
|
||||||
|
| 5 | HeaderSpec client | Done |
|
||||||
|
| 6 | MQTT client | Planned |
|
||||||
|
| 6.5 | Unified class pattern + singleton factories | Done |
|
||||||
|
| 7 | Response cache (TTL) | Planned |
|
||||||
|
| 8 | TanStack Query integration | Planned |
|
||||||
|
| 9 | React Hooks | Planned |
|
||||||
|
|
||||||
|
**Build:** `dist/index.js` (ES) + `dist/index.cjs` (CJS) + `.d.ts` declarations
|
||||||
|
**Tests:** 65 passing (common: 10, resolvespec: 13, websocketspec: 15, headerspec: 27)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folder Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
├── common/
|
||||||
|
│ ├── types.ts # Core types aligned with Go pkg/common/types.go
|
||||||
|
│ └── index.ts
|
||||||
|
├── resolvespec/
|
||||||
|
│ ├── client.ts # ResolveSpecClient class + createResolveSpecClient singleton
|
||||||
|
│ └── index.ts
|
||||||
|
├── headerspec/
|
||||||
|
│ ├── client.ts # HeaderSpecClient class + createHeaderSpecClient singleton + buildHeaders utility
|
||||||
|
│ └── index.ts
|
||||||
|
├── websocketspec/
|
||||||
|
│ ├── types.ts # WS-specific types (WSMessage, WSOptions, etc.)
|
||||||
|
│ ├── client.ts # WebSocketClient class + createWebSocketClient singleton
|
||||||
|
│ └── index.ts
|
||||||
|
├── mqttspec/ # Future
|
||||||
|
│ ├── types.ts
|
||||||
|
│ ├── client.ts
|
||||||
|
│ └── index.ts
|
||||||
|
├── __tests__/
|
||||||
|
│ ├── common.test.ts
|
||||||
|
│ ├── resolvespec.test.ts
|
||||||
|
│ ├── headerspec.test.ts
|
||||||
|
│ └── websocketspec.test.ts
|
||||||
|
└── index.ts # Root barrel export
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Type Alignment with Go
|
||||||
|
|
||||||
|
Types in `src/common/types.ts` match `pkg/common/types.go`:
|
||||||
|
|
||||||
|
- **Operator**: `eq`, `neq`, `gt`, `gte`, `lt`, `lte`, `like`, `ilike`, `in`, `contains`, `startswith`, `endswith`, `between`, `between_inclusive`, `is_null`, `is_not_null`
|
||||||
|
- **FilterOption**: `column`, `operator`, `value`, `logic_operator` (AND/OR)
|
||||||
|
- **Options**: `columns`, `omit_columns`, `filters`, `sort`, `limit`, `offset`, `preload`, `customOperators`, `computedColumns`, `parameters`, `cursor_forward`, `cursor_backward`, `fetch_row_number`
|
||||||
|
- **PreloadOption**: `relation`, `table_name`, `columns`, `omit_columns`, `sort`, `filters`, `where`, `limit`, `offset`, `updatable`, `recursive`, `computed_ql`, `primary_key`, `related_key`, `foreign_key`, `recursive_child_key`, `sql_joins`, `join_aliases`
|
||||||
|
- **Parameter**: `name`, `value`, `sequence?`
|
||||||
|
- **Metadata**: `total`, `count`, `filtered`, `limit`, `offset`, `row_number?`
|
||||||
|
- **APIError**: `code`, `message`, `details?`, `detail?`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## HeaderSpec Header Mapping
|
||||||
|
|
||||||
|
Maps Options to HTTP headers per Go `restheadspec/headers.go`:
|
||||||
|
|
||||||
|
| Header | Options field | Format |
|
||||||
|
|--------|--------------|--------|
|
||||||
|
| `X-Select-Fields` | `columns` | comma-separated |
|
||||||
|
| `X-Not-Select-Fields` | `omit_columns` | comma-separated |
|
||||||
|
| `X-FieldFilter-{col}` | `filters` (eq, AND) | value |
|
||||||
|
| `X-SearchOp-{op}-{col}` | `filters` (AND) | value |
|
||||||
|
| `X-SearchOr-{op}-{col}` | `filters` (OR) | value |
|
||||||
|
| `X-Sort` | `sort` | `+col` (asc), `-col` (desc) |
|
||||||
|
| `X-Limit` | `limit` | number |
|
||||||
|
| `X-Offset` | `offset` | number |
|
||||||
|
| `X-Cursor-Forward` | `cursor_forward` | string |
|
||||||
|
| `X-Cursor-Backward` | `cursor_backward` | string |
|
||||||
|
| `X-Preload` | `preload` | `Rel:col1,col2` pipe-separated |
|
||||||
|
| `X-Fetch-RowNumber` | `fetch_row_number` | string |
|
||||||
|
| `X-CQL-SEL-{col}` | `computedColumns` | expression |
|
||||||
|
| `X-Custom-SQL-W` | `customOperators` | SQL AND-joined |
|
||||||
|
|
||||||
|
Complex values use `ZIP_` + base64 encoding.
|
||||||
|
HTTP methods: GET=read, POST=create, PUT=update, DELETE=delete.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Build & Test
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm install
|
||||||
|
pnpm run build # vite library mode → dist/
|
||||||
|
pnpm run test # vitest
|
||||||
|
pnpm run lint # eslint
|
||||||
|
```
|
||||||
|
|
||||||
|
**Config files:** `tsconfig.json` (ES2020, strict, bundler), `vite.config.ts` (lib mode, dts via vite-plugin-dts)
|
||||||
|
**Externals:** `uuid`, `semver`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Remaining Work
|
||||||
|
|
||||||
|
- **Phase 6 — MQTT Client**: Topic-based CRUD over MQTT (optional/future)
|
||||||
|
- **Phase 7 — Cache**: In-memory response cache with TTL, key = URL + options hash, auto-invalidation on CUD, `skipCache` flag
|
||||||
|
- **Phase 8 — TanStack Query Integration**: Query/mutation hooks wrapping each client, query key factories, automatic cache invalidation
|
||||||
|
- **Phase 9 — React Hooks**: `useResolveSpec`, `useHeaderSpec`, `useWebSocket` hooks with provider context, loading/error states
|
||||||
|
- ESLint config may need updating for new folder structure
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Reference Files
|
||||||
|
|
||||||
|
| Purpose | Path |
|
||||||
|
|---------|------|
|
||||||
|
| Go types (source of truth) | `pkg/common/types.go` |
|
||||||
|
| Go REST handler | `pkg/resolvespec/handler.go` |
|
||||||
|
| Go HeaderSpec handler | `pkg/restheadspec/handler.go` |
|
||||||
|
| Go HeaderSpec header parsing | `pkg/restheadspec/headers.go` |
|
||||||
|
| Go test models | `pkg/testmodels/business.go` |
|
||||||
|
| Go tests | `tests/crud_test.go` |
|
||||||
213
resolvespec-js/README.md
Normal file
213
resolvespec-js/README.md
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
# ResolveSpec JS
|
||||||
|
|
||||||
|
TypeScript client library for ResolveSpec APIs. Supports body-based REST, header-based REST, and WebSocket protocols.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm add @warkypublic/resolvespec-js
|
||||||
|
```
|
||||||
|
|
||||||
|
## Clients
|
||||||
|
|
||||||
|
| Client | Protocol | Singleton Factory |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `ResolveSpecClient` | REST (body-based) | `getResolveSpecClient(config)` |
|
||||||
|
| `HeaderSpecClient` | REST (header-based) | `getHeaderSpecClient(config)` |
|
||||||
|
| `WebSocketClient` | WebSocket | `getWebSocketClient(config)` |
|
||||||
|
|
||||||
|
All clients use the class pattern. Singleton factories return cached instances keyed by URL.
|
||||||
|
|
||||||
|
## REST Client (Body-Based)
|
||||||
|
|
||||||
|
Options sent in JSON request body. Maps to Go `pkg/resolvespec`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { ResolveSpecClient, getResolveSpecClient } from '@warkypublic/resolvespec-js';
|
||||||
|
|
||||||
|
// Class instantiation
|
||||||
|
const client = new ResolveSpecClient({ baseUrl: 'http://localhost:3000', token: 'your-token' });
|
||||||
|
|
||||||
|
// Or singleton factory (returns cached instance per baseUrl)
|
||||||
|
const client = getResolveSpecClient({ baseUrl: 'http://localhost:3000', token: 'your-token' });
|
||||||
|
|
||||||
|
// Read with filters, sort, pagination
|
||||||
|
const result = await client.read('public', 'users', undefined, {
|
||||||
|
columns: ['id', 'name', 'email'],
|
||||||
|
filters: [{ column: 'status', operator: 'eq', value: 'active' }],
|
||||||
|
sort: [{ column: 'name', direction: 'asc' }],
|
||||||
|
limit: 10,
|
||||||
|
offset: 0,
|
||||||
|
preload: [{ relation: 'Posts', columns: ['id', 'title'] }],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Read by ID
|
||||||
|
const user = await client.read('public', 'users', 42);
|
||||||
|
|
||||||
|
// Create
|
||||||
|
const created = await client.create('public', 'users', { name: 'New User' });
|
||||||
|
|
||||||
|
// Update
|
||||||
|
await client.update('public', 'users', { name: 'Updated' }, 42);
|
||||||
|
|
||||||
|
// Delete
|
||||||
|
await client.delete('public', 'users', 42);
|
||||||
|
|
||||||
|
// Metadata
|
||||||
|
const meta = await client.getMetadata('public', 'users');
|
||||||
|
```
|
||||||
|
|
||||||
|
## HeaderSpec Client (Header-Based)
|
||||||
|
|
||||||
|
Options sent via HTTP headers. Maps to Go `pkg/restheadspec`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { HeaderSpecClient, getHeaderSpecClient } from '@warkypublic/resolvespec-js';
|
||||||
|
|
||||||
|
const client = new HeaderSpecClient({ baseUrl: 'http://localhost:3000', token: 'your-token' });
|
||||||
|
// Or: const client = getHeaderSpecClient({ baseUrl: 'http://localhost:3000', token: 'your-token' });
|
||||||
|
|
||||||
|
// GET with options as headers
|
||||||
|
const result = await client.read('public', 'users', undefined, {
|
||||||
|
columns: ['id', 'name'],
|
||||||
|
filters: [
|
||||||
|
{ column: 'status', operator: 'eq', value: 'active' },
|
||||||
|
{ column: 'age', operator: 'gte', value: 18, logic_operator: 'AND' },
|
||||||
|
],
|
||||||
|
sort: [{ column: 'name', direction: 'asc' }],
|
||||||
|
limit: 50,
|
||||||
|
preload: [{ relation: 'Department', columns: ['id', 'name'] }],
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST create
|
||||||
|
await client.create('public', 'users', { name: 'New User' });
|
||||||
|
|
||||||
|
// PUT update
|
||||||
|
await client.update('public', 'users', '42', { name: 'Updated' });
|
||||||
|
|
||||||
|
// DELETE
|
||||||
|
await client.delete('public', 'users', '42');
|
||||||
|
```
|
||||||
|
|
||||||
|
### Header Mapping
|
||||||
|
|
||||||
|
| Header | Options Field | Format |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `X-Select-Fields` | `columns` | comma-separated |
|
||||||
|
| `X-Not-Select-Fields` | `omit_columns` | comma-separated |
|
||||||
|
| `X-FieldFilter-{col}` | `filters` (eq, AND) | value |
|
||||||
|
| `X-SearchOp-{op}-{col}` | `filters` (AND) | value |
|
||||||
|
| `X-SearchOr-{op}-{col}` | `filters` (OR) | value |
|
||||||
|
| `X-Sort` | `sort` | `+col` asc, `-col` desc |
|
||||||
|
| `X-Limit` / `X-Offset` | `limit` / `offset` | number |
|
||||||
|
| `X-Cursor-Forward` | `cursor_forward` | string |
|
||||||
|
| `X-Cursor-Backward` | `cursor_backward` | string |
|
||||||
|
| `X-Preload` | `preload` | `Rel:col1,col2` pipe-separated |
|
||||||
|
| `X-Fetch-RowNumber` | `fetch_row_number` | string |
|
||||||
|
| `X-CQL-SEL-{col}` | `computedColumns` | expression |
|
||||||
|
| `X-Custom-SQL-W` | `customOperators` | SQL AND-joined |
|
||||||
|
|
||||||
|
### Utility Functions
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { buildHeaders, encodeHeaderValue, decodeHeaderValue } from '@warkypublic/resolvespec-js';
|
||||||
|
|
||||||
|
const headers = buildHeaders({ columns: ['id', 'name'], limit: 10 });
|
||||||
|
// => { 'X-Select-Fields': 'id,name', 'X-Limit': '10' }
|
||||||
|
|
||||||
|
const encoded = encodeHeaderValue('complex value'); // 'ZIP_...'
|
||||||
|
const decoded = decodeHeaderValue(encoded); // 'complex value'
|
||||||
|
```
|
||||||
|
|
||||||
|
## WebSocket Client
|
||||||
|
|
||||||
|
Real-time CRUD with subscriptions. Maps to Go `pkg/websocketspec`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { WebSocketClient, getWebSocketClient } from '@warkypublic/resolvespec-js';
|
||||||
|
|
||||||
|
const ws = new WebSocketClient({
|
||||||
|
url: 'ws://localhost:8080/ws',
|
||||||
|
reconnect: true,
|
||||||
|
heartbeatInterval: 30000,
|
||||||
|
});
|
||||||
|
// Or: const ws = getWebSocketClient({ url: 'ws://localhost:8080/ws' });
|
||||||
|
|
||||||
|
await ws.connect();
|
||||||
|
|
||||||
|
// CRUD
|
||||||
|
const users = await ws.read('users', { schema: 'public', limit: 10 });
|
||||||
|
const created = await ws.create('users', { name: 'New' }, { schema: 'public' });
|
||||||
|
await ws.update('users', '1', { name: 'Updated' });
|
||||||
|
await ws.delete('users', '1');
|
||||||
|
|
||||||
|
// Subscribe to changes
|
||||||
|
const subId = await ws.subscribe('users', (notification) => {
|
||||||
|
console.log(notification.operation, notification.data);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Unsubscribe
|
||||||
|
await ws.unsubscribe(subId);
|
||||||
|
|
||||||
|
// Events
|
||||||
|
ws.on('connect', () => console.log('connected'));
|
||||||
|
ws.on('disconnect', () => console.log('disconnected'));
|
||||||
|
ws.on('error', (err) => console.error(err));
|
||||||
|
|
||||||
|
ws.disconnect();
|
||||||
|
```
|
||||||
|
|
||||||
|
## Types
|
||||||
|
|
||||||
|
All types align with Go `pkg/common/types.go`.
|
||||||
|
|
||||||
|
### Key Types
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface Options {
|
||||||
|
columns?: string[];
|
||||||
|
omit_columns?: string[];
|
||||||
|
filters?: FilterOption[];
|
||||||
|
sort?: SortOption[];
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
preload?: PreloadOption[];
|
||||||
|
customOperators?: CustomOperator[];
|
||||||
|
computedColumns?: ComputedColumn[];
|
||||||
|
parameters?: Parameter[];
|
||||||
|
cursor_forward?: string;
|
||||||
|
cursor_backward?: string;
|
||||||
|
fetch_row_number?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FilterOption {
|
||||||
|
column: string;
|
||||||
|
operator: Operator | string;
|
||||||
|
value: any;
|
||||||
|
logic_operator?: 'AND' | 'OR';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Operators: eq, neq, gt, gte, lt, lte, like, ilike, in,
|
||||||
|
// contains, startswith, endswith, between,
|
||||||
|
// between_inclusive, is_null, is_not_null
|
||||||
|
|
||||||
|
interface APIResponse<T> {
|
||||||
|
success: boolean;
|
||||||
|
data: T;
|
||||||
|
metadata?: Metadata;
|
||||||
|
error?: APIError;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm install
|
||||||
|
pnpm run build # dist/index.js (ES) + dist/index.cjs (CJS) + .d.ts
|
||||||
|
pnpm run test # vitest
|
||||||
|
pnpm run lint # eslint
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
||||||
@@ -1,530 +0,0 @@
|
|||||||
# WebSocketSpec JavaScript Client
|
|
||||||
|
|
||||||
A TypeScript/JavaScript client for connecting to WebSocketSpec servers with full support for real-time subscriptions, CRUD operations, and automatic reconnection.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install @warkypublic/resolvespec-js
|
|
||||||
# or
|
|
||||||
yarn add @warkypublic/resolvespec-js
|
|
||||||
# or
|
|
||||||
pnpm add @warkypublic/resolvespec-js
|
|
||||||
```
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { WebSocketClient } from '@warkypublic/resolvespec-js';
|
|
||||||
|
|
||||||
// Create client
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws',
|
|
||||||
reconnect: true,
|
|
||||||
debug: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Connect
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Read records
|
|
||||||
const users = await client.read('users', {
|
|
||||||
schema: 'public',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'active' }
|
|
||||||
],
|
|
||||||
limit: 10
|
|
||||||
});
|
|
||||||
|
|
||||||
// Subscribe to changes
|
|
||||||
const subscriptionId = await client.subscribe('users', (notification) => {
|
|
||||||
console.log('User changed:', notification.operation, notification.data);
|
|
||||||
}, { schema: 'public' });
|
|
||||||
|
|
||||||
// Clean up
|
|
||||||
await client.unsubscribe(subscriptionId);
|
|
||||||
client.disconnect();
|
|
||||||
```
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- **Real-Time Updates**: Subscribe to entity changes and receive instant notifications
|
|
||||||
- **Full CRUD Support**: Create, read, update, and delete operations
|
|
||||||
- **TypeScript Support**: Full type definitions included
|
|
||||||
- **Auto Reconnection**: Automatic reconnection with configurable retry logic
|
|
||||||
- **Heartbeat**: Built-in keepalive mechanism
|
|
||||||
- **Event System**: Listen to connection, error, and message events
|
|
||||||
- **Promise-based API**: All async operations return promises
|
|
||||||
- **Filter & Sort**: Advanced querying with filters, sorting, and pagination
|
|
||||||
- **Preloading**: Load related entities in a single query
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws', // WebSocket server URL
|
|
||||||
reconnect: true, // Enable auto-reconnection
|
|
||||||
reconnectInterval: 3000, // Reconnection delay (ms)
|
|
||||||
maxReconnectAttempts: 10, // Max reconnection attempts
|
|
||||||
heartbeatInterval: 30000, // Heartbeat interval (ms)
|
|
||||||
debug: false // Enable debug logging
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
## API Reference
|
|
||||||
|
|
||||||
### Connection Management
|
|
||||||
|
|
||||||
#### `connect(): Promise<void>`
|
|
||||||
Connect to the WebSocket server.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await client.connect();
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `disconnect(): void`
|
|
||||||
Disconnect from the server.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
client.disconnect();
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `isConnected(): boolean`
|
|
||||||
Check if currently connected.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
if (client.isConnected()) {
|
|
||||||
console.log('Connected!');
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `getState(): ConnectionState`
|
|
||||||
Get current connection state: `'connecting'`, `'connected'`, `'disconnecting'`, `'disconnected'`, or `'reconnecting'`.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const state = client.getState();
|
|
||||||
console.log('State:', state);
|
|
||||||
```
|
|
||||||
|
|
||||||
### CRUD Operations
|
|
||||||
|
|
||||||
#### `read<T>(entity: string, options?): Promise<T>`
|
|
||||||
Read records from an entity.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Read all active users
|
|
||||||
const users = await client.read('users', {
|
|
||||||
schema: 'public',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'active' }
|
|
||||||
],
|
|
||||||
columns: ['id', 'name', 'email'],
|
|
||||||
sort: [
|
|
||||||
{ column: 'name', direction: 'asc' }
|
|
||||||
],
|
|
||||||
limit: 10,
|
|
||||||
offset: 0
|
|
||||||
});
|
|
||||||
|
|
||||||
// Read single record by ID
|
|
||||||
const user = await client.read('users', {
|
|
||||||
schema: 'public',
|
|
||||||
record_id: '123'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Read with preloading
|
|
||||||
const posts = await client.read('posts', {
|
|
||||||
schema: 'public',
|
|
||||||
preload: [
|
|
||||||
{
|
|
||||||
relation: 'user',
|
|
||||||
columns: ['id', 'name', 'email']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
relation: 'comments',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'approved' }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `create<T>(entity: string, data: any, options?): Promise<T>`
|
|
||||||
Create a new record.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const newUser = await client.create('users', {
|
|
||||||
name: 'John Doe',
|
|
||||||
email: 'john@example.com',
|
|
||||||
status: 'active'
|
|
||||||
}, {
|
|
||||||
schema: 'public'
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `update<T>(entity: string, id: string, data: any, options?): Promise<T>`
|
|
||||||
Update an existing record.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const updatedUser = await client.update('users', '123', {
|
|
||||||
name: 'John Updated',
|
|
||||||
email: 'john.new@example.com'
|
|
||||||
}, {
|
|
||||||
schema: 'public'
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `delete(entity: string, id: string, options?): Promise<void>`
|
|
||||||
Delete a record.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await client.delete('users', '123', {
|
|
||||||
schema: 'public'
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `meta<T>(entity: string, options?): Promise<T>`
|
|
||||||
Get metadata for an entity.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const metadata = await client.meta('users', {
|
|
||||||
schema: 'public'
|
|
||||||
});
|
|
||||||
console.log('Columns:', metadata.columns);
|
|
||||||
console.log('Primary key:', metadata.primary_key);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Subscriptions
|
|
||||||
|
|
||||||
#### `subscribe(entity: string, callback: Function, options?): Promise<string>`
|
|
||||||
Subscribe to entity changes.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const subscriptionId = await client.subscribe(
|
|
||||||
'users',
|
|
||||||
(notification) => {
|
|
||||||
console.log('Operation:', notification.operation); // 'create', 'update', or 'delete'
|
|
||||||
console.log('Data:', notification.data);
|
|
||||||
console.log('Timestamp:', notification.timestamp);
|
|
||||||
},
|
|
||||||
{
|
|
||||||
schema: 'public',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'active' }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `unsubscribe(subscriptionId: string): Promise<void>`
|
|
||||||
Unsubscribe from entity changes.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await client.unsubscribe(subscriptionId);
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `getSubscriptions(): Subscription[]`
|
|
||||||
Get list of active subscriptions.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const subscriptions = client.getSubscriptions();
|
|
||||||
console.log('Active subscriptions:', subscriptions.length);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Event Handling
|
|
||||||
|
|
||||||
#### `on(event: string, callback: Function): void`
|
|
||||||
Add event listener.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Connection events
|
|
||||||
client.on('connect', () => {
|
|
||||||
console.log('Connected!');
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('disconnect', (event) => {
|
|
||||||
console.log('Disconnected:', event.code, event.reason);
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('error', (error) => {
|
|
||||||
console.error('Error:', error);
|
|
||||||
});
|
|
||||||
|
|
||||||
// State changes
|
|
||||||
client.on('stateChange', (state) => {
|
|
||||||
console.log('State:', state);
|
|
||||||
});
|
|
||||||
|
|
||||||
// All messages
|
|
||||||
client.on('message', (message) => {
|
|
||||||
console.log('Message:', message);
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `off(event: string): void`
|
|
||||||
Remove event listener.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
client.off('connect');
|
|
||||||
```
|
|
||||||
|
|
||||||
## Filter Operators
|
|
||||||
|
|
||||||
- `eq` - Equal (=)
|
|
||||||
- `neq` - Not Equal (!=)
|
|
||||||
- `gt` - Greater Than (>)
|
|
||||||
- `gte` - Greater Than or Equal (>=)
|
|
||||||
- `lt` - Less Than (<)
|
|
||||||
- `lte` - Less Than or Equal (<=)
|
|
||||||
- `like` - LIKE (case-sensitive)
|
|
||||||
- `ilike` - ILIKE (case-insensitive)
|
|
||||||
- `in` - IN (array of values)
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
### Basic CRUD
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const client = new WebSocketClient({ url: 'ws://localhost:8080/ws' });
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Create
|
|
||||||
const user = await client.create('users', {
|
|
||||||
name: 'Alice',
|
|
||||||
email: 'alice@example.com'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Read
|
|
||||||
const users = await client.read('users', {
|
|
||||||
filters: [{ column: 'status', operator: 'eq', value: 'active' }]
|
|
||||||
});
|
|
||||||
|
|
||||||
// Update
|
|
||||||
await client.update('users', user.id, { name: 'Alice Updated' });
|
|
||||||
|
|
||||||
// Delete
|
|
||||||
await client.delete('users', user.id);
|
|
||||||
|
|
||||||
client.disconnect();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Real-Time Subscriptions
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const client = new WebSocketClient({ url: 'ws://localhost:8080/ws' });
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Subscribe to all user changes
|
|
||||||
const subId = await client.subscribe('users', (notification) => {
|
|
||||||
switch (notification.operation) {
|
|
||||||
case 'create':
|
|
||||||
console.log('New user:', notification.data);
|
|
||||||
break;
|
|
||||||
case 'update':
|
|
||||||
console.log('User updated:', notification.data);
|
|
||||||
break;
|
|
||||||
case 'delete':
|
|
||||||
console.log('User deleted:', notification.data);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Later: unsubscribe
|
|
||||||
await client.unsubscribe(subId);
|
|
||||||
```
|
|
||||||
|
|
||||||
### React Integration
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { useEffect, useState } from 'react';
|
|
||||||
import { WebSocketClient } from '@warkypublic/resolvespec-js';
|
|
||||||
|
|
||||||
function useWebSocket(url: string) {
|
|
||||||
const [client] = useState(() => new WebSocketClient({ url }));
|
|
||||||
const [isConnected, setIsConnected] = useState(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
client.on('connect', () => setIsConnected(true));
|
|
||||||
client.on('disconnect', () => setIsConnected(false));
|
|
||||||
client.connect();
|
|
||||||
|
|
||||||
return () => client.disconnect();
|
|
||||||
}, [client]);
|
|
||||||
|
|
||||||
return { client, isConnected };
|
|
||||||
}
|
|
||||||
|
|
||||||
function UsersComponent() {
|
|
||||||
const { client, isConnected } = useWebSocket('ws://localhost:8080/ws');
|
|
||||||
const [users, setUsers] = useState([]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (!isConnected) return;
|
|
||||||
|
|
||||||
const loadUsers = async () => {
|
|
||||||
// Subscribe to changes
|
|
||||||
await client.subscribe('users', (notification) => {
|
|
||||||
if (notification.operation === 'create') {
|
|
||||||
setUsers(prev => [...prev, notification.data]);
|
|
||||||
} else if (notification.operation === 'update') {
|
|
||||||
setUsers(prev => prev.map(u =>
|
|
||||||
u.id === notification.data.id ? notification.data : u
|
|
||||||
));
|
|
||||||
} else if (notification.operation === 'delete') {
|
|
||||||
setUsers(prev => prev.filter(u => u.id !== notification.data.id));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Load initial data
|
|
||||||
const data = await client.read('users');
|
|
||||||
setUsers(data);
|
|
||||||
};
|
|
||||||
|
|
||||||
loadUsers();
|
|
||||||
}, [client, isConnected]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div>
|
|
||||||
<h2>Users {isConnected ? '🟢' : '🔴'}</h2>
|
|
||||||
{users.map(user => (
|
|
||||||
<div key={user.id}>{user.name}</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### TypeScript with Typed Models
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface User {
|
|
||||||
id: number;
|
|
||||||
name: string;
|
|
||||||
email: string;
|
|
||||||
status: 'active' | 'inactive';
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Post {
|
|
||||||
id: number;
|
|
||||||
title: string;
|
|
||||||
content: string;
|
|
||||||
user_id: number;
|
|
||||||
user?: User;
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new WebSocketClient({ url: 'ws://localhost:8080/ws' });
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Type-safe operations
|
|
||||||
const users = await client.read<User[]>('users', {
|
|
||||||
filters: [{ column: 'status', operator: 'eq', value: 'active' }]
|
|
||||||
});
|
|
||||||
|
|
||||||
const newUser = await client.create<User>('users', {
|
|
||||||
name: 'Bob',
|
|
||||||
email: 'bob@example.com',
|
|
||||||
status: 'active'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Type-safe subscriptions
|
|
||||||
await client.subscribe(
|
|
||||||
'posts',
|
|
||||||
(notification) => {
|
|
||||||
const post = notification.data as Post;
|
|
||||||
console.log('Post:', post.title);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Error Handling
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws',
|
|
||||||
reconnect: true,
|
|
||||||
maxReconnectAttempts: 5
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('error', (error) => {
|
|
||||||
console.error('Connection error:', error);
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('stateChange', (state) => {
|
|
||||||
console.log('State:', state);
|
|
||||||
if (state === 'reconnecting') {
|
|
||||||
console.log('Attempting to reconnect...');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const user = await client.read('users', { record_id: '999' });
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Record not found:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await client.create('users', { /* invalid data */ });
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Validation failed:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Connection failed:', error);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Multiple Subscriptions
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const client = new WebSocketClient({ url: 'ws://localhost:8080/ws' });
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Subscribe to multiple entities
|
|
||||||
const userSub = await client.subscribe('users', (n) => {
|
|
||||||
console.log('[Users]', n.operation, n.data);
|
|
||||||
});
|
|
||||||
|
|
||||||
const postSub = await client.subscribe('posts', (n) => {
|
|
||||||
console.log('[Posts]', n.operation, n.data);
|
|
||||||
}, {
|
|
||||||
filters: [{ column: 'status', operator: 'eq', value: 'published' }]
|
|
||||||
});
|
|
||||||
|
|
||||||
const commentSub = await client.subscribe('comments', (n) => {
|
|
||||||
console.log('[Comments]', n.operation, n.data);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Check active subscriptions
|
|
||||||
console.log('Active:', client.getSubscriptions().length);
|
|
||||||
|
|
||||||
// Clean up
|
|
||||||
await client.unsubscribe(userSub);
|
|
||||||
await client.unsubscribe(postSub);
|
|
||||||
await client.unsubscribe(commentSub);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Best Practices
|
|
||||||
|
|
||||||
1. **Always Clean Up**: Call `disconnect()` when done to close the connection properly
|
|
||||||
2. **Use TypeScript**: Leverage type definitions for better type safety
|
|
||||||
3. **Handle Errors**: Always wrap operations in try-catch blocks
|
|
||||||
4. **Limit Subscriptions**: Don't create too many subscriptions per connection
|
|
||||||
5. **Use Filters**: Apply filters to subscriptions to reduce unnecessary notifications
|
|
||||||
6. **Connection State**: Check `isConnected()` before operations
|
|
||||||
7. **Event Listeners**: Remove event listeners when no longer needed with `off()`
|
|
||||||
8. **Reconnection**: Enable auto-reconnection for production apps
|
|
||||||
|
|
||||||
## Browser Support
|
|
||||||
|
|
||||||
- Chrome/Edge 88+
|
|
||||||
- Firefox 85+
|
|
||||||
- Safari 14+
|
|
||||||
- Node.js 14.16+
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
1
resolvespec-js/dist/index.cjs
vendored
Normal file
1
resolvespec-js/dist/index.cjs
vendored
Normal file
File diff suppressed because one or more lines are too long
366
resolvespec-js/dist/index.d.ts
vendored
Normal file
366
resolvespec-js/dist/index.d.ts
vendored
Normal file
@@ -0,0 +1,366 @@
|
|||||||
|
export declare interface APIError {
|
||||||
|
code: string;
|
||||||
|
message: string;
|
||||||
|
details?: any;
|
||||||
|
detail?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface APIResponse<T = any> {
|
||||||
|
success: boolean;
|
||||||
|
data: T;
|
||||||
|
metadata?: Metadata;
|
||||||
|
error?: APIError;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build HTTP headers from Options, matching Go's restheadspec handler conventions.
|
||||||
|
*
|
||||||
|
* Header mapping:
|
||||||
|
* - X-Select-Fields: comma-separated columns
|
||||||
|
* - X-Not-Select-Fields: comma-separated omit_columns
|
||||||
|
* - X-FieldFilter-{col}: exact match (eq)
|
||||||
|
* - X-SearchOp-{operator}-{col}: AND filter
|
||||||
|
* - X-SearchOr-{operator}-{col}: OR filter
|
||||||
|
* - X-Sort: +col (asc), -col (desc)
|
||||||
|
* - X-Limit, X-Offset: pagination
|
||||||
|
* - X-Cursor-Forward, X-Cursor-Backward: cursor pagination
|
||||||
|
* - X-Preload: RelationName:field1,field2 pipe-separated
|
||||||
|
* - X-Fetch-RowNumber: row number fetch
|
||||||
|
* - X-CQL-SEL-{col}: computed columns
|
||||||
|
* - X-Custom-SQL-W: custom operators (AND)
|
||||||
|
*/
|
||||||
|
export declare function buildHeaders(options: Options): Record<string, string>;
|
||||||
|
|
||||||
|
export declare interface ClientConfig {
|
||||||
|
baseUrl: string;
|
||||||
|
token?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface Column {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
is_nullable: boolean;
|
||||||
|
is_primary: boolean;
|
||||||
|
is_unique: boolean;
|
||||||
|
has_index: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface ComputedColumn {
|
||||||
|
name: string;
|
||||||
|
expression: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare type ConnectionState = 'connecting' | 'connected' | 'disconnecting' | 'disconnected' | 'reconnecting';
|
||||||
|
|
||||||
|
export declare interface CustomOperator {
|
||||||
|
name: string;
|
||||||
|
sql: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decode a header value that may be base64 encoded with ZIP_ or __ prefix.
|
||||||
|
*/
|
||||||
|
export declare function decodeHeaderValue(value: string): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode a value with base64 and ZIP_ prefix for complex header values.
|
||||||
|
*/
|
||||||
|
export declare function encodeHeaderValue(value: string): string;
|
||||||
|
|
||||||
|
export declare interface FilterOption {
|
||||||
|
column: string;
|
||||||
|
operator: Operator | string;
|
||||||
|
value: any;
|
||||||
|
logic_operator?: 'AND' | 'OR';
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare function getHeaderSpecClient(config: ClientConfig): HeaderSpecClient;
|
||||||
|
|
||||||
|
export declare function getResolveSpecClient(config: ClientConfig): ResolveSpecClient;
|
||||||
|
|
||||||
|
export declare function getWebSocketClient(config: WebSocketClientConfig): WebSocketClient;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HeaderSpec REST client.
|
||||||
|
* Sends query options via HTTP headers instead of request body, matching the Go restheadspec handler.
|
||||||
|
*
|
||||||
|
* HTTP methods: GET=read, POST=create, PUT=update, DELETE=delete
|
||||||
|
*/
|
||||||
|
export declare class HeaderSpecClient {
|
||||||
|
private config;
|
||||||
|
constructor(config: ClientConfig);
|
||||||
|
private buildUrl;
|
||||||
|
private baseHeaders;
|
||||||
|
private fetchWithError;
|
||||||
|
read<T = any>(schema: string, entity: string, id?: string, options?: Options): Promise<APIResponse<T>>;
|
||||||
|
create<T = any>(schema: string, entity: string, data: any, options?: Options): Promise<APIResponse<T>>;
|
||||||
|
update<T = any>(schema: string, entity: string, id: string, data: any, options?: Options): Promise<APIResponse<T>>;
|
||||||
|
delete(schema: string, entity: string, id: string): Promise<APIResponse<void>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare type MessageType = 'request' | 'response' | 'notification' | 'subscription' | 'error' | 'ping' | 'pong';
|
||||||
|
|
||||||
|
export declare interface Metadata {
|
||||||
|
total: number;
|
||||||
|
count: number;
|
||||||
|
filtered: number;
|
||||||
|
limit: number;
|
||||||
|
offset: number;
|
||||||
|
row_number?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare type Operation = 'read' | 'create' | 'update' | 'delete';
|
||||||
|
|
||||||
|
export declare type Operator = 'eq' | 'neq' | 'gt' | 'gte' | 'lt' | 'lte' | 'like' | 'ilike' | 'in' | 'contains' | 'startswith' | 'endswith' | 'between' | 'between_inclusive' | 'is_null' | 'is_not_null';
|
||||||
|
|
||||||
|
export declare interface Options {
|
||||||
|
preload?: PreloadOption[];
|
||||||
|
columns?: string[];
|
||||||
|
omit_columns?: string[];
|
||||||
|
filters?: FilterOption[];
|
||||||
|
sort?: SortOption[];
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
customOperators?: CustomOperator[];
|
||||||
|
computedColumns?: ComputedColumn[];
|
||||||
|
parameters?: Parameter[];
|
||||||
|
cursor_forward?: string;
|
||||||
|
cursor_backward?: string;
|
||||||
|
fetch_row_number?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface Parameter {
|
||||||
|
name: string;
|
||||||
|
value: string;
|
||||||
|
sequence?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface PreloadOption {
|
||||||
|
relation: string;
|
||||||
|
table_name?: string;
|
||||||
|
columns?: string[];
|
||||||
|
omit_columns?: string[];
|
||||||
|
sort?: SortOption[];
|
||||||
|
filters?: FilterOption[];
|
||||||
|
where?: string;
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
updatable?: boolean;
|
||||||
|
computed_ql?: Record<string, string>;
|
||||||
|
recursive?: boolean;
|
||||||
|
primary_key?: string;
|
||||||
|
related_key?: string;
|
||||||
|
foreign_key?: string;
|
||||||
|
recursive_child_key?: string;
|
||||||
|
sql_joins?: string[];
|
||||||
|
join_aliases?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface RequestBody {
|
||||||
|
operation: Operation;
|
||||||
|
id?: number | string | string[];
|
||||||
|
data?: any | any[];
|
||||||
|
options?: Options;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare class ResolveSpecClient {
|
||||||
|
private config;
|
||||||
|
constructor(config: ClientConfig);
|
||||||
|
private buildUrl;
|
||||||
|
private baseHeaders;
|
||||||
|
private fetchWithError;
|
||||||
|
getMetadata(schema: string, entity: string): Promise<APIResponse<TableMetadata>>;
|
||||||
|
read<T = any>(schema: string, entity: string, id?: number | string | string[], options?: Options): Promise<APIResponse<T>>;
|
||||||
|
create<T = any>(schema: string, entity: string, data: any | any[], options?: Options): Promise<APIResponse<T>>;
|
||||||
|
update<T = any>(schema: string, entity: string, data: any | any[], id?: number | string | string[], options?: Options): Promise<APIResponse<T>>;
|
||||||
|
delete(schema: string, entity: string, id: number | string): Promise<APIResponse<void>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare type SortDirection = 'asc' | 'desc' | 'ASC' | 'DESC';
|
||||||
|
|
||||||
|
export declare interface SortOption {
|
||||||
|
column: string;
|
||||||
|
direction: SortDirection;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface Subscription {
|
||||||
|
id: string;
|
||||||
|
entity: string;
|
||||||
|
schema?: string;
|
||||||
|
options?: WSOptions;
|
||||||
|
callback?: (notification: WSNotificationMessage) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface SubscriptionOptions {
|
||||||
|
filters?: FilterOption[];
|
||||||
|
onNotification?: (notification: WSNotificationMessage) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface TableMetadata {
|
||||||
|
schema: string;
|
||||||
|
table: string;
|
||||||
|
columns: Column[];
|
||||||
|
relations: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare class WebSocketClient {
|
||||||
|
private ws;
|
||||||
|
private config;
|
||||||
|
private messageHandlers;
|
||||||
|
private subscriptions;
|
||||||
|
private eventListeners;
|
||||||
|
private state;
|
||||||
|
private reconnectAttempts;
|
||||||
|
private reconnectTimer;
|
||||||
|
private heartbeatTimer;
|
||||||
|
private isManualClose;
|
||||||
|
constructor(config: WebSocketClientConfig);
|
||||||
|
connect(): Promise<void>;
|
||||||
|
disconnect(): void;
|
||||||
|
request<T = any>(operation: WSOperation, entity: string, options?: {
|
||||||
|
schema?: string;
|
||||||
|
record_id?: string;
|
||||||
|
data?: any;
|
||||||
|
options?: WSOptions;
|
||||||
|
}): Promise<T>;
|
||||||
|
read<T = any>(entity: string, options?: {
|
||||||
|
schema?: string;
|
||||||
|
record_id?: string;
|
||||||
|
filters?: FilterOption[];
|
||||||
|
columns?: string[];
|
||||||
|
sort?: SortOption[];
|
||||||
|
preload?: PreloadOption[];
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
}): Promise<T>;
|
||||||
|
create<T = any>(entity: string, data: any, options?: {
|
||||||
|
schema?: string;
|
||||||
|
}): Promise<T>;
|
||||||
|
update<T = any>(entity: string, id: string, data: any, options?: {
|
||||||
|
schema?: string;
|
||||||
|
}): Promise<T>;
|
||||||
|
delete(entity: string, id: string, options?: {
|
||||||
|
schema?: string;
|
||||||
|
}): Promise<void>;
|
||||||
|
meta<T = any>(entity: string, options?: {
|
||||||
|
schema?: string;
|
||||||
|
}): Promise<T>;
|
||||||
|
subscribe(entity: string, callback: (notification: WSNotificationMessage) => void, options?: {
|
||||||
|
schema?: string;
|
||||||
|
filters?: FilterOption[];
|
||||||
|
}): Promise<string>;
|
||||||
|
unsubscribe(subscriptionId: string): Promise<void>;
|
||||||
|
getSubscriptions(): Subscription[];
|
||||||
|
getState(): ConnectionState;
|
||||||
|
isConnected(): boolean;
|
||||||
|
on<K extends keyof WebSocketClientEvents>(event: K, callback: WebSocketClientEvents[K]): void;
|
||||||
|
off<K extends keyof WebSocketClientEvents>(event: K): void;
|
||||||
|
private handleMessage;
|
||||||
|
private handleResponse;
|
||||||
|
private handleNotification;
|
||||||
|
private send;
|
||||||
|
private startHeartbeat;
|
||||||
|
private stopHeartbeat;
|
||||||
|
private setState;
|
||||||
|
private ensureConnected;
|
||||||
|
private emit;
|
||||||
|
private log;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WebSocketClientConfig {
|
||||||
|
url: string;
|
||||||
|
reconnect?: boolean;
|
||||||
|
reconnectInterval?: number;
|
||||||
|
maxReconnectAttempts?: number;
|
||||||
|
heartbeatInterval?: number;
|
||||||
|
debug?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WebSocketClientEvents {
|
||||||
|
connect: () => void;
|
||||||
|
disconnect: (event: CloseEvent) => void;
|
||||||
|
error: (error: Error) => void;
|
||||||
|
message: (message: WSMessage) => void;
|
||||||
|
stateChange: (state: ConnectionState) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WSErrorInfo {
|
||||||
|
code: string;
|
||||||
|
message: string;
|
||||||
|
details?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WSMessage {
|
||||||
|
id?: string;
|
||||||
|
type: MessageType;
|
||||||
|
operation?: WSOperation;
|
||||||
|
schema?: string;
|
||||||
|
entity?: string;
|
||||||
|
record_id?: string;
|
||||||
|
data?: any;
|
||||||
|
options?: WSOptions;
|
||||||
|
subscription_id?: string;
|
||||||
|
success?: boolean;
|
||||||
|
error?: WSErrorInfo;
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
timestamp?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WSNotificationMessage {
|
||||||
|
type: 'notification';
|
||||||
|
operation: WSOperation;
|
||||||
|
subscription_id: string;
|
||||||
|
schema?: string;
|
||||||
|
entity: string;
|
||||||
|
data: any;
|
||||||
|
timestamp: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare type WSOperation = 'read' | 'create' | 'update' | 'delete' | 'subscribe' | 'unsubscribe' | 'meta';
|
||||||
|
|
||||||
|
export declare interface WSOptions {
|
||||||
|
filters?: FilterOption[];
|
||||||
|
columns?: string[];
|
||||||
|
omit_columns?: string[];
|
||||||
|
preload?: PreloadOption[];
|
||||||
|
sort?: SortOption[];
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
parameters?: Parameter[];
|
||||||
|
cursor_forward?: string;
|
||||||
|
cursor_backward?: string;
|
||||||
|
fetch_row_number?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WSRequestMessage {
|
||||||
|
id: string;
|
||||||
|
type: 'request';
|
||||||
|
operation: WSOperation;
|
||||||
|
schema?: string;
|
||||||
|
entity: string;
|
||||||
|
record_id?: string;
|
||||||
|
data?: any;
|
||||||
|
options?: WSOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WSResponseMessage {
|
||||||
|
id: string;
|
||||||
|
type: 'response';
|
||||||
|
success: boolean;
|
||||||
|
data?: any;
|
||||||
|
error?: WSErrorInfo;
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
timestamp: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare interface WSSubscriptionMessage {
|
||||||
|
id: string;
|
||||||
|
type: 'subscription';
|
||||||
|
operation: 'subscribe' | 'unsubscribe';
|
||||||
|
schema?: string;
|
||||||
|
entity: string;
|
||||||
|
options?: WSOptions;
|
||||||
|
subscription_id?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { }
|
||||||
469
resolvespec-js/dist/index.js
vendored
Normal file
469
resolvespec-js/dist/index.js
vendored
Normal file
@@ -0,0 +1,469 @@
|
|||||||
|
import { v4 as l } from "uuid";
|
||||||
|
const d = /* @__PURE__ */ new Map();
|
||||||
|
function E(n) {
|
||||||
|
const e = n.baseUrl;
|
||||||
|
let t = d.get(e);
|
||||||
|
return t || (t = new g(n), d.set(e, t)), t;
|
||||||
|
}
|
||||||
|
class g {
|
||||||
|
constructor(e) {
|
||||||
|
this.config = e;
|
||||||
|
}
|
||||||
|
buildUrl(e, t, s) {
|
||||||
|
let r = `${this.config.baseUrl}/${e}/${t}`;
|
||||||
|
return s && (r += `/${s}`), r;
|
||||||
|
}
|
||||||
|
baseHeaders() {
|
||||||
|
const e = {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
};
|
||||||
|
return this.config.token && (e.Authorization = `Bearer ${this.config.token}`), e;
|
||||||
|
}
|
||||||
|
async fetchWithError(e, t) {
|
||||||
|
const s = await fetch(e, t), r = await s.json();
|
||||||
|
if (!s.ok)
|
||||||
|
throw new Error(r.error?.message || "An error occurred");
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
async getMetadata(e, t) {
|
||||||
|
const s = this.buildUrl(e, t);
|
||||||
|
return this.fetchWithError(s, {
|
||||||
|
method: "GET",
|
||||||
|
headers: this.baseHeaders()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async read(e, t, s, r) {
|
||||||
|
const i = typeof s == "number" || typeof s == "string" ? String(s) : void 0, a = this.buildUrl(e, t, i), c = {
|
||||||
|
operation: "read",
|
||||||
|
id: Array.isArray(s) ? s : void 0,
|
||||||
|
options: r
|
||||||
|
};
|
||||||
|
return this.fetchWithError(a, {
|
||||||
|
method: "POST",
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(c)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async create(e, t, s, r) {
|
||||||
|
const i = this.buildUrl(e, t), a = {
|
||||||
|
operation: "create",
|
||||||
|
data: s,
|
||||||
|
options: r
|
||||||
|
};
|
||||||
|
return this.fetchWithError(i, {
|
||||||
|
method: "POST",
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(a)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async update(e, t, s, r, i) {
|
||||||
|
const a = typeof r == "number" || typeof r == "string" ? String(r) : void 0, c = this.buildUrl(e, t, a), o = {
|
||||||
|
operation: "update",
|
||||||
|
id: Array.isArray(r) ? r : void 0,
|
||||||
|
data: s,
|
||||||
|
options: i
|
||||||
|
};
|
||||||
|
return this.fetchWithError(c, {
|
||||||
|
method: "POST",
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(o)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async delete(e, t, s) {
|
||||||
|
const r = this.buildUrl(e, t, String(s)), i = {
|
||||||
|
operation: "delete"
|
||||||
|
};
|
||||||
|
return this.fetchWithError(r, {
|
||||||
|
method: "POST",
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(i)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const f = /* @__PURE__ */ new Map();
|
||||||
|
function _(n) {
|
||||||
|
const e = n.url;
|
||||||
|
let t = f.get(e);
|
||||||
|
return t || (t = new p(n), f.set(e, t)), t;
|
||||||
|
}
|
||||||
|
class p {
|
||||||
|
constructor(e) {
|
||||||
|
this.ws = null, this.messageHandlers = /* @__PURE__ */ new Map(), this.subscriptions = /* @__PURE__ */ new Map(), this.eventListeners = {}, this.state = "disconnected", this.reconnectAttempts = 0, this.reconnectTimer = null, this.heartbeatTimer = null, this.isManualClose = !1, this.config = {
|
||||||
|
url: e.url,
|
||||||
|
reconnect: e.reconnect ?? !0,
|
||||||
|
reconnectInterval: e.reconnectInterval ?? 3e3,
|
||||||
|
maxReconnectAttempts: e.maxReconnectAttempts ?? 10,
|
||||||
|
heartbeatInterval: e.heartbeatInterval ?? 3e4,
|
||||||
|
debug: e.debug ?? !1
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async connect() {
|
||||||
|
if (this.ws?.readyState === WebSocket.OPEN) {
|
||||||
|
this.log("Already connected");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return this.isManualClose = !1, this.setState("connecting"), new Promise((e, t) => {
|
||||||
|
try {
|
||||||
|
this.ws = new WebSocket(this.config.url), this.ws.onopen = () => {
|
||||||
|
this.log("Connected to WebSocket server"), this.setState("connected"), this.reconnectAttempts = 0, this.startHeartbeat(), this.emit("connect"), e();
|
||||||
|
}, this.ws.onmessage = (s) => {
|
||||||
|
this.handleMessage(s.data);
|
||||||
|
}, this.ws.onerror = (s) => {
|
||||||
|
this.log("WebSocket error:", s);
|
||||||
|
const r = new Error("WebSocket connection error");
|
||||||
|
this.emit("error", r), t(r);
|
||||||
|
}, this.ws.onclose = (s) => {
|
||||||
|
this.log("WebSocket closed:", s.code, s.reason), this.stopHeartbeat(), this.setState("disconnected"), this.emit("disconnect", s), this.config.reconnect && !this.isManualClose && this.reconnectAttempts < this.config.maxReconnectAttempts && (this.reconnectAttempts++, this.log(`Reconnection attempt ${this.reconnectAttempts}/${this.config.maxReconnectAttempts}`), this.setState("reconnecting"), this.reconnectTimer = setTimeout(() => {
|
||||||
|
this.connect().catch((r) => {
|
||||||
|
this.log("Reconnection failed:", r);
|
||||||
|
});
|
||||||
|
}, this.config.reconnectInterval));
|
||||||
|
};
|
||||||
|
} catch (s) {
|
||||||
|
t(s);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
disconnect() {
|
||||||
|
this.isManualClose = !0, this.reconnectTimer && (clearTimeout(this.reconnectTimer), this.reconnectTimer = null), this.stopHeartbeat(), this.ws && (this.setState("disconnecting"), this.ws.close(), this.ws = null), this.setState("disconnected"), this.messageHandlers.clear();
|
||||||
|
}
|
||||||
|
async request(e, t, s) {
|
||||||
|
this.ensureConnected();
|
||||||
|
const r = l(), i = {
|
||||||
|
id: r,
|
||||||
|
type: "request",
|
||||||
|
operation: e,
|
||||||
|
entity: t,
|
||||||
|
schema: s?.schema,
|
||||||
|
record_id: s?.record_id,
|
||||||
|
data: s?.data,
|
||||||
|
options: s?.options
|
||||||
|
};
|
||||||
|
return new Promise((a, c) => {
|
||||||
|
this.messageHandlers.set(r, (o) => {
|
||||||
|
o.success ? a(o.data) : c(new Error(o.error?.message || "Request failed"));
|
||||||
|
}), this.send(i), setTimeout(() => {
|
||||||
|
this.messageHandlers.has(r) && (this.messageHandlers.delete(r), c(new Error("Request timeout")));
|
||||||
|
}, 3e4);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async read(e, t) {
|
||||||
|
return this.request("read", e, {
|
||||||
|
schema: t?.schema,
|
||||||
|
record_id: t?.record_id,
|
||||||
|
options: {
|
||||||
|
filters: t?.filters,
|
||||||
|
columns: t?.columns,
|
||||||
|
sort: t?.sort,
|
||||||
|
preload: t?.preload,
|
||||||
|
limit: t?.limit,
|
||||||
|
offset: t?.offset
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async create(e, t, s) {
|
||||||
|
return this.request("create", e, {
|
||||||
|
schema: s?.schema,
|
||||||
|
data: t
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async update(e, t, s, r) {
|
||||||
|
return this.request("update", e, {
|
||||||
|
schema: r?.schema,
|
||||||
|
record_id: t,
|
||||||
|
data: s
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async delete(e, t, s) {
|
||||||
|
await this.request("delete", e, {
|
||||||
|
schema: s?.schema,
|
||||||
|
record_id: t
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async meta(e, t) {
|
||||||
|
return this.request("meta", e, {
|
||||||
|
schema: t?.schema
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async subscribe(e, t, s) {
|
||||||
|
this.ensureConnected();
|
||||||
|
const r = l(), i = {
|
||||||
|
id: r,
|
||||||
|
type: "subscription",
|
||||||
|
operation: "subscribe",
|
||||||
|
entity: e,
|
||||||
|
schema: s?.schema,
|
||||||
|
options: {
|
||||||
|
filters: s?.filters
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return new Promise((a, c) => {
|
||||||
|
this.messageHandlers.set(r, (o) => {
|
||||||
|
if (o.success && o.data?.subscription_id) {
|
||||||
|
const h = o.data.subscription_id;
|
||||||
|
this.subscriptions.set(h, {
|
||||||
|
id: h,
|
||||||
|
entity: e,
|
||||||
|
schema: s?.schema,
|
||||||
|
options: { filters: s?.filters },
|
||||||
|
callback: t
|
||||||
|
}), this.log(`Subscribed to ${e} with ID: ${h}`), a(h);
|
||||||
|
} else
|
||||||
|
c(new Error(o.error?.message || "Subscription failed"));
|
||||||
|
}), this.send(i), setTimeout(() => {
|
||||||
|
this.messageHandlers.has(r) && (this.messageHandlers.delete(r), c(new Error("Subscription timeout")));
|
||||||
|
}, 1e4);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async unsubscribe(e) {
|
||||||
|
this.ensureConnected();
|
||||||
|
const t = l(), s = {
|
||||||
|
id: t,
|
||||||
|
type: "subscription",
|
||||||
|
operation: "unsubscribe",
|
||||||
|
subscription_id: e
|
||||||
|
};
|
||||||
|
return new Promise((r, i) => {
|
||||||
|
this.messageHandlers.set(t, (a) => {
|
||||||
|
a.success ? (this.subscriptions.delete(e), this.log(`Unsubscribed from ${e}`), r()) : i(new Error(a.error?.message || "Unsubscribe failed"));
|
||||||
|
}), this.send(s), setTimeout(() => {
|
||||||
|
this.messageHandlers.has(t) && (this.messageHandlers.delete(t), i(new Error("Unsubscribe timeout")));
|
||||||
|
}, 1e4);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
getSubscriptions() {
|
||||||
|
return Array.from(this.subscriptions.values());
|
||||||
|
}
|
||||||
|
getState() {
|
||||||
|
return this.state;
|
||||||
|
}
|
||||||
|
isConnected() {
|
||||||
|
return this.ws?.readyState === WebSocket.OPEN;
|
||||||
|
}
|
||||||
|
on(e, t) {
|
||||||
|
this.eventListeners[e] = t;
|
||||||
|
}
|
||||||
|
off(e) {
|
||||||
|
delete this.eventListeners[e];
|
||||||
|
}
|
||||||
|
// Private methods
|
||||||
|
handleMessage(e) {
|
||||||
|
try {
|
||||||
|
const t = JSON.parse(e);
|
||||||
|
switch (this.log("Received message:", t), this.emit("message", t), t.type) {
|
||||||
|
case "response":
|
||||||
|
this.handleResponse(t);
|
||||||
|
break;
|
||||||
|
case "notification":
|
||||||
|
this.handleNotification(t);
|
||||||
|
break;
|
||||||
|
case "pong":
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
this.log("Unknown message type:", t.type);
|
||||||
|
}
|
||||||
|
} catch (t) {
|
||||||
|
this.log("Error parsing message:", t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
handleResponse(e) {
|
||||||
|
const t = this.messageHandlers.get(e.id);
|
||||||
|
t && (t(e), this.messageHandlers.delete(e.id));
|
||||||
|
}
|
||||||
|
handleNotification(e) {
|
||||||
|
const t = this.subscriptions.get(e.subscription_id);
|
||||||
|
t?.callback && t.callback(e);
|
||||||
|
}
|
||||||
|
send(e) {
|
||||||
|
if (!this.ws || this.ws.readyState !== WebSocket.OPEN)
|
||||||
|
throw new Error("WebSocket is not connected");
|
||||||
|
const t = JSON.stringify(e);
|
||||||
|
this.log("Sending message:", e), this.ws.send(t);
|
||||||
|
}
|
||||||
|
startHeartbeat() {
|
||||||
|
this.heartbeatTimer || (this.heartbeatTimer = setInterval(() => {
|
||||||
|
if (this.isConnected()) {
|
||||||
|
const e = {
|
||||||
|
id: l(),
|
||||||
|
type: "ping"
|
||||||
|
};
|
||||||
|
this.send(e);
|
||||||
|
}
|
||||||
|
}, this.config.heartbeatInterval));
|
||||||
|
}
|
||||||
|
stopHeartbeat() {
|
||||||
|
this.heartbeatTimer && (clearInterval(this.heartbeatTimer), this.heartbeatTimer = null);
|
||||||
|
}
|
||||||
|
setState(e) {
|
||||||
|
this.state !== e && (this.state = e, this.emit("stateChange", e));
|
||||||
|
}
|
||||||
|
ensureConnected() {
|
||||||
|
if (!this.isConnected())
|
||||||
|
throw new Error("WebSocket is not connected. Call connect() first.");
|
||||||
|
}
|
||||||
|
emit(e, ...t) {
|
||||||
|
const s = this.eventListeners[e];
|
||||||
|
s && s(...t);
|
||||||
|
}
|
||||||
|
log(...e) {
|
||||||
|
this.config.debug && console.log("[WebSocketClient]", ...e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function v(n) {
|
||||||
|
return typeof btoa == "function" ? "ZIP_" + btoa(n) : "ZIP_" + Buffer.from(n, "utf-8").toString("base64");
|
||||||
|
}
|
||||||
|
function w(n) {
|
||||||
|
let e = n;
|
||||||
|
return e.startsWith("ZIP_") ? (e = e.slice(4).replace(/[\n\r ]/g, ""), e = m(e)) : e.startsWith("__") && (e = e.slice(2).replace(/[\n\r ]/g, ""), e = m(e)), (e.startsWith("ZIP_") || e.startsWith("__")) && (e = w(e)), e;
|
||||||
|
}
|
||||||
|
function m(n) {
|
||||||
|
return typeof atob == "function" ? atob(n) : Buffer.from(n, "base64").toString("utf-8");
|
||||||
|
}
|
||||||
|
function u(n) {
|
||||||
|
const e = {};
|
||||||
|
if (n.columns?.length && (e["X-Select-Fields"] = n.columns.join(",")), n.omit_columns?.length && (e["X-Not-Select-Fields"] = n.omit_columns.join(",")), n.filters?.length)
|
||||||
|
for (const t of n.filters) {
|
||||||
|
const s = t.logic_operator ?? "AND", r = y(t.operator), i = S(t);
|
||||||
|
t.operator === "eq" && s === "AND" ? e[`X-FieldFilter-${t.column}`] = i : s === "OR" ? e[`X-SearchOr-${r}-${t.column}`] = i : e[`X-SearchOp-${r}-${t.column}`] = i;
|
||||||
|
}
|
||||||
|
if (n.sort?.length) {
|
||||||
|
const t = n.sort.map((s) => s.direction.toUpperCase() === "DESC" ? `-${s.column}` : `+${s.column}`);
|
||||||
|
e["X-Sort"] = t.join(",");
|
||||||
|
}
|
||||||
|
if (n.limit !== void 0 && (e["X-Limit"] = String(n.limit)), n.offset !== void 0 && (e["X-Offset"] = String(n.offset)), n.cursor_forward && (e["X-Cursor-Forward"] = n.cursor_forward), n.cursor_backward && (e["X-Cursor-Backward"] = n.cursor_backward), n.preload?.length) {
|
||||||
|
const t = n.preload.map((s) => s.columns?.length ? `${s.relation}:${s.columns.join(",")}` : s.relation);
|
||||||
|
e["X-Preload"] = t.join("|");
|
||||||
|
}
|
||||||
|
if (n.fetch_row_number && (e["X-Fetch-RowNumber"] = n.fetch_row_number), n.computedColumns?.length)
|
||||||
|
for (const t of n.computedColumns)
|
||||||
|
e[`X-CQL-SEL-${t.name}`] = t.expression;
|
||||||
|
if (n.customOperators?.length) {
|
||||||
|
const t = n.customOperators.map(
|
||||||
|
(s) => s.sql
|
||||||
|
);
|
||||||
|
e["X-Custom-SQL-W"] = t.join(" AND ");
|
||||||
|
}
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
function y(n) {
|
||||||
|
switch (n) {
|
||||||
|
case "eq":
|
||||||
|
return "equals";
|
||||||
|
case "neq":
|
||||||
|
return "notequals";
|
||||||
|
case "gt":
|
||||||
|
return "greaterthan";
|
||||||
|
case "gte":
|
||||||
|
return "greaterthanorequal";
|
||||||
|
case "lt":
|
||||||
|
return "lessthan";
|
||||||
|
case "lte":
|
||||||
|
return "lessthanorequal";
|
||||||
|
case "like":
|
||||||
|
case "ilike":
|
||||||
|
case "contains":
|
||||||
|
return "contains";
|
||||||
|
case "startswith":
|
||||||
|
return "beginswith";
|
||||||
|
case "endswith":
|
||||||
|
return "endswith";
|
||||||
|
case "in":
|
||||||
|
return "in";
|
||||||
|
case "between":
|
||||||
|
return "between";
|
||||||
|
case "between_inclusive":
|
||||||
|
return "betweeninclusive";
|
||||||
|
case "is_null":
|
||||||
|
return "empty";
|
||||||
|
case "is_not_null":
|
||||||
|
return "notempty";
|
||||||
|
default:
|
||||||
|
return n;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function S(n) {
|
||||||
|
return n.value === null || n.value === void 0 ? "" : Array.isArray(n.value) ? n.value.join(",") : String(n.value);
|
||||||
|
}
|
||||||
|
const b = /* @__PURE__ */ new Map();
|
||||||
|
function C(n) {
|
||||||
|
const e = n.baseUrl;
|
||||||
|
let t = b.get(e);
|
||||||
|
return t || (t = new H(n), b.set(e, t)), t;
|
||||||
|
}
|
||||||
|
class H {
|
||||||
|
constructor(e) {
|
||||||
|
this.config = e;
|
||||||
|
}
|
||||||
|
buildUrl(e, t, s) {
|
||||||
|
let r = `${this.config.baseUrl}/${e}/${t}`;
|
||||||
|
return s && (r += `/${s}`), r;
|
||||||
|
}
|
||||||
|
baseHeaders() {
|
||||||
|
const e = {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
};
|
||||||
|
return this.config.token && (e.Authorization = `Bearer ${this.config.token}`), e;
|
||||||
|
}
|
||||||
|
async fetchWithError(e, t) {
|
||||||
|
const s = await fetch(e, t), r = await s.json();
|
||||||
|
if (!s.ok)
|
||||||
|
throw new Error(
|
||||||
|
r.error?.message || `${s.statusText} (${s.status})`
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
data: r,
|
||||||
|
success: !0,
|
||||||
|
error: r.error ? r.error : void 0,
|
||||||
|
metadata: {
|
||||||
|
count: s.headers.get("content-range") ? Number(s.headers.get("content-range")?.split("/")[1]) : 0,
|
||||||
|
total: s.headers.get("content-range") ? Number(s.headers.get("content-range")?.split("/")[1]) : 0,
|
||||||
|
filtered: s.headers.get("content-range") ? Number(s.headers.get("content-range")?.split("/")[1]) : 0,
|
||||||
|
offset: s.headers.get("content-range") ? Number(
|
||||||
|
s.headers.get("content-range")?.split("/")[0].split("-")[0]
|
||||||
|
) : 0,
|
||||||
|
limit: s.headers.get("x-limit") ? Number(s.headers.get("x-limit")) : 0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async read(e, t, s, r) {
|
||||||
|
const i = this.buildUrl(e, t, s), a = r ? u(r) : {};
|
||||||
|
return this.fetchWithError(i, {
|
||||||
|
method: "GET",
|
||||||
|
headers: { ...this.baseHeaders(), ...a }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async create(e, t, s, r) {
|
||||||
|
const i = this.buildUrl(e, t), a = r ? u(r) : {};
|
||||||
|
return this.fetchWithError(i, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { ...this.baseHeaders(), ...a },
|
||||||
|
body: JSON.stringify(s)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async update(e, t, s, r, i) {
|
||||||
|
const a = this.buildUrl(e, t, s), c = i ? u(i) : {};
|
||||||
|
return this.fetchWithError(a, {
|
||||||
|
method: "PUT",
|
||||||
|
headers: { ...this.baseHeaders(), ...c },
|
||||||
|
body: JSON.stringify(r)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async delete(e, t, s) {
|
||||||
|
const r = this.buildUrl(e, t, s);
|
||||||
|
return this.fetchWithError(r, {
|
||||||
|
method: "DELETE",
|
||||||
|
headers: this.baseHeaders()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export {
|
||||||
|
H as HeaderSpecClient,
|
||||||
|
g as ResolveSpecClient,
|
||||||
|
p as WebSocketClient,
|
||||||
|
u as buildHeaders,
|
||||||
|
w as decodeHeaderValue,
|
||||||
|
v as encodeHeaderValue,
|
||||||
|
C as getHeaderSpecClient,
|
||||||
|
E as getResolveSpecClient,
|
||||||
|
_ as getWebSocketClient
|
||||||
|
};
|
||||||
@@ -1,20 +1,23 @@
|
|||||||
{
|
{
|
||||||
"name": "@warkypublic/resolvespec-js",
|
"name": "@warkypublic/resolvespec-js",
|
||||||
"version": "1.0.0",
|
"version": "1.0.1",
|
||||||
"description": "Client side library for the ResolveSpec API",
|
"description": "TypeScript client library for ResolveSpec REST, HeaderSpec, and WebSocket APIs",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "./src/index.ts",
|
"main": "./dist/index.cjs",
|
||||||
"module": "./src/index.ts",
|
|
||||||
"types": "./src/index.ts",
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public",
|
|
||||||
"main": "./dist/index.js",
|
|
||||||
"module": "./dist/index.js",
|
"module": "./dist/index.js",
|
||||||
"types": "./dist/index.d.ts"
|
"types": "./dist/index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"import": "./dist/index.js",
|
||||||
|
"require": "./dist/index.cjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"access": "public"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"dist",
|
"dist",
|
||||||
"bin",
|
|
||||||
"README.md"
|
"README.md"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -25,38 +28,33 @@
|
|||||||
"lint": "eslint src"
|
"lint": "eslint src"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"string",
|
"resolvespec",
|
||||||
"blob",
|
"headerspec",
|
||||||
"dependencies",
|
"websocket",
|
||||||
"workspace",
|
"rest-client",
|
||||||
"package",
|
"typescript",
|
||||||
"cli",
|
"api-client"
|
||||||
"tools",
|
|
||||||
"npm",
|
|
||||||
"yarn",
|
|
||||||
"pnpm"
|
|
||||||
],
|
],
|
||||||
"author": "Hein (Warkanum) Puth",
|
"author": "Hein (Warkanum) Puth",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"semver": "^7.6.3",
|
"uuid": "^13.0.0"
|
||||||
"uuid": "^11.0.3"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@changesets/cli": "^2.27.10",
|
"@changesets/cli": "^2.29.8",
|
||||||
"@eslint/js": "^9.16.0",
|
"@eslint/js": "^10.0.1",
|
||||||
"@types/jsdom": "^21.1.7",
|
"@types/jsdom": "^27.0.0",
|
||||||
"eslint": "^9.16.0",
|
"eslint": "^10.0.0",
|
||||||
"globals": "^15.13.0",
|
"globals": "^17.3.0",
|
||||||
"jsdom": "^25.0.1",
|
"jsdom": "^28.1.0",
|
||||||
"typescript": "^5.7.2",
|
"typescript": "^5.9.3",
|
||||||
"typescript-eslint": "^8.17.0",
|
"typescript-eslint": "^8.55.0",
|
||||||
"vite": "^6.0.2",
|
"vite": "^7.3.1",
|
||||||
"vite-plugin-dts": "^4.3.0",
|
"vite-plugin-dts": "^4.5.4",
|
||||||
"vitest": "^2.1.8"
|
"vitest": "^4.0.18"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=14.16"
|
"node": ">=18"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
3376
resolvespec-js/pnpm-lock.yaml
generated
Normal file
3376
resolvespec-js/pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
143
resolvespec-js/src/__tests__/common.test.ts
Normal file
143
resolvespec-js/src/__tests__/common.test.ts
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import type {
|
||||||
|
Options,
|
||||||
|
FilterOption,
|
||||||
|
SortOption,
|
||||||
|
PreloadOption,
|
||||||
|
RequestBody,
|
||||||
|
APIResponse,
|
||||||
|
Metadata,
|
||||||
|
APIError,
|
||||||
|
Parameter,
|
||||||
|
ComputedColumn,
|
||||||
|
CustomOperator,
|
||||||
|
} from '../common/types';
|
||||||
|
|
||||||
|
describe('Common Types', () => {
|
||||||
|
it('should construct a valid FilterOption with logic_operator', () => {
|
||||||
|
const filter: FilterOption = {
|
||||||
|
column: 'name',
|
||||||
|
operator: 'eq',
|
||||||
|
value: 'test',
|
||||||
|
logic_operator: 'OR',
|
||||||
|
};
|
||||||
|
expect(filter.logic_operator).toBe('OR');
|
||||||
|
expect(filter.operator).toBe('eq');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should construct Options with all new fields', () => {
|
||||||
|
const opts: Options = {
|
||||||
|
columns: ['id', 'name'],
|
||||||
|
omit_columns: ['secret'],
|
||||||
|
filters: [{ column: 'age', operator: 'gte', value: 18 }],
|
||||||
|
sort: [{ column: 'name', direction: 'asc' }],
|
||||||
|
limit: 10,
|
||||||
|
offset: 0,
|
||||||
|
cursor_forward: 'abc123',
|
||||||
|
cursor_backward: 'xyz789',
|
||||||
|
fetch_row_number: '42',
|
||||||
|
parameters: [{ name: 'param1', value: 'val1', sequence: 1 }],
|
||||||
|
computedColumns: [{ name: 'full_name', expression: "first || ' ' || last" }],
|
||||||
|
customOperators: [{ name: 'custom', sql: "status = 'active'" }],
|
||||||
|
preload: [{
|
||||||
|
relation: 'Items',
|
||||||
|
columns: ['id', 'title'],
|
||||||
|
omit_columns: ['internal'],
|
||||||
|
sort: [{ column: 'id', direction: 'ASC' }],
|
||||||
|
recursive: true,
|
||||||
|
primary_key: 'id',
|
||||||
|
related_key: 'parent_id',
|
||||||
|
sql_joins: ['LEFT JOIN other ON other.id = items.other_id'],
|
||||||
|
join_aliases: ['other'],
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
expect(opts.omit_columns).toEqual(['secret']);
|
||||||
|
expect(opts.cursor_forward).toBe('abc123');
|
||||||
|
expect(opts.fetch_row_number).toBe('42');
|
||||||
|
expect(opts.parameters![0].sequence).toBe(1);
|
||||||
|
expect(opts.preload![0].recursive).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should construct a RequestBody with numeric id', () => {
|
||||||
|
const body: RequestBody = {
|
||||||
|
operation: 'read',
|
||||||
|
id: 42,
|
||||||
|
options: { limit: 10 },
|
||||||
|
};
|
||||||
|
expect(body.id).toBe(42);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should construct a RequestBody with string array id', () => {
|
||||||
|
const body: RequestBody = {
|
||||||
|
operation: 'delete',
|
||||||
|
id: ['1', '2', '3'],
|
||||||
|
};
|
||||||
|
expect(Array.isArray(body.id)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should construct Metadata with count and row_number', () => {
|
||||||
|
const meta: Metadata = {
|
||||||
|
total: 100,
|
||||||
|
count: 10,
|
||||||
|
filtered: 50,
|
||||||
|
limit: 10,
|
||||||
|
offset: 0,
|
||||||
|
row_number: 5,
|
||||||
|
};
|
||||||
|
expect(meta.count).toBe(10);
|
||||||
|
expect(meta.row_number).toBe(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should construct APIError with detail field', () => {
|
||||||
|
const err: APIError = {
|
||||||
|
code: 'not_found',
|
||||||
|
message: 'Record not found',
|
||||||
|
detail: 'The record with id 42 does not exist',
|
||||||
|
};
|
||||||
|
expect(err.detail).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should construct APIResponse with metadata', () => {
|
||||||
|
const resp: APIResponse<string[]> = {
|
||||||
|
success: true,
|
||||||
|
data: ['a', 'b'],
|
||||||
|
metadata: { total: 2, count: 2, filtered: 2, limit: 10, offset: 0 },
|
||||||
|
};
|
||||||
|
expect(resp.metadata?.count).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support all operator types', () => {
|
||||||
|
const operators: FilterOption['operator'][] = [
|
||||||
|
'eq', 'neq', 'gt', 'gte', 'lt', 'lte',
|
||||||
|
'like', 'ilike', 'in',
|
||||||
|
'contains', 'startswith', 'endswith',
|
||||||
|
'between', 'between_inclusive',
|
||||||
|
'is_null', 'is_not_null',
|
||||||
|
];
|
||||||
|
for (const op of operators) {
|
||||||
|
const f: FilterOption = { column: 'x', operator: op, value: 'v' };
|
||||||
|
expect(f.operator).toBe(op);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support PreloadOption with computed_ql and where', () => {
|
||||||
|
const preload: PreloadOption = {
|
||||||
|
relation: 'Details',
|
||||||
|
where: "status = 'active'",
|
||||||
|
computed_ql: { cql1: 'SUM(amount)' },
|
||||||
|
table_name: 'detail_table',
|
||||||
|
updatable: true,
|
||||||
|
foreign_key: 'detail_id',
|
||||||
|
recursive_child_key: 'parent_detail_id',
|
||||||
|
};
|
||||||
|
expect(preload.computed_ql?.cql1).toBe('SUM(amount)');
|
||||||
|
expect(preload.updatable).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support Parameter interface', () => {
|
||||||
|
const p: Parameter = { name: 'key', value: 'val' };
|
||||||
|
expect(p.name).toBe('key');
|
||||||
|
const p2: Parameter = { name: 'key2', value: 'val2', sequence: 5 };
|
||||||
|
expect(p2.sequence).toBe(5);
|
||||||
|
});
|
||||||
|
});
|
||||||
239
resolvespec-js/src/__tests__/headerspec.test.ts
Normal file
239
resolvespec-js/src/__tests__/headerspec.test.ts
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { buildHeaders, encodeHeaderValue, decodeHeaderValue, HeaderSpecClient, getHeaderSpecClient } from '../headerspec/client';
|
||||||
|
import type { Options, ClientConfig, APIResponse } from '../common/types';
|
||||||
|
|
||||||
|
describe('buildHeaders', () => {
|
||||||
|
it('should set X-Select-Fields for columns', () => {
|
||||||
|
const h = buildHeaders({ columns: ['id', 'name', 'email'] });
|
||||||
|
expect(h['X-Select-Fields']).toBe('id,name,email');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-Not-Select-Fields for omit_columns', () => {
|
||||||
|
const h = buildHeaders({ omit_columns: ['secret', 'internal'] });
|
||||||
|
expect(h['X-Not-Select-Fields']).toBe('secret,internal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-FieldFilter for eq AND filters', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
filters: [{ column: 'status', operator: 'eq', value: 'active' }],
|
||||||
|
});
|
||||||
|
expect(h['X-FieldFilter-status']).toBe('active');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-SearchOp for non-eq AND filters', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
filters: [{ column: 'age', operator: 'gte', value: 18 }],
|
||||||
|
});
|
||||||
|
expect(h['X-SearchOp-greaterthanorequal-age']).toBe('18');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-SearchOr for OR filters', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
filters: [{ column: 'name', operator: 'contains', value: 'test', logic_operator: 'OR' }],
|
||||||
|
});
|
||||||
|
expect(h['X-SearchOr-contains-name']).toBe('test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-Sort with direction prefixes', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
sort: [
|
||||||
|
{ column: 'name', direction: 'asc' },
|
||||||
|
{ column: 'created_at', direction: 'DESC' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
expect(h['X-Sort']).toBe('+name,-created_at');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-Limit and X-Offset', () => {
|
||||||
|
const h = buildHeaders({ limit: 25, offset: 50 });
|
||||||
|
expect(h['X-Limit']).toBe('25');
|
||||||
|
expect(h['X-Offset']).toBe('50');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set cursor pagination headers', () => {
|
||||||
|
const h = buildHeaders({ cursor_forward: 'abc', cursor_backward: 'xyz' });
|
||||||
|
expect(h['X-Cursor-Forward']).toBe('abc');
|
||||||
|
expect(h['X-Cursor-Backward']).toBe('xyz');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-Preload with pipe-separated relations', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
preload: [
|
||||||
|
{ relation: 'Items', columns: ['id', 'name'] },
|
||||||
|
{ relation: 'Category' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
expect(h['X-Preload']).toBe('Items:id,name|Category');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-Fetch-RowNumber', () => {
|
||||||
|
const h = buildHeaders({ fetch_row_number: '42' });
|
||||||
|
expect(h['X-Fetch-RowNumber']).toBe('42');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-CQL-SEL for computed columns', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
computedColumns: [
|
||||||
|
{ name: 'total', expression: 'price * qty' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
expect(h['X-CQL-SEL-total']).toBe('price * qty');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set X-Custom-SQL-W for custom operators', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
customOperators: [
|
||||||
|
{ name: 'active', sql: "status = 'active'" },
|
||||||
|
{ name: 'verified', sql: "verified = true" },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
expect(h['X-Custom-SQL-W']).toBe("status = 'active' AND verified = true");
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty object for empty options', () => {
|
||||||
|
const h = buildHeaders({});
|
||||||
|
expect(Object.keys(h)).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle between filter with array value', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
filters: [{ column: 'price', operator: 'between', value: [10, 100] }],
|
||||||
|
});
|
||||||
|
expect(h['X-SearchOp-between-price']).toBe('10,100');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle is_null filter with null value', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
filters: [{ column: 'deleted_at', operator: 'is_null', value: null }],
|
||||||
|
});
|
||||||
|
expect(h['X-SearchOp-empty-deleted_at']).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle in filter with array value', () => {
|
||||||
|
const h = buildHeaders({
|
||||||
|
filters: [{ column: 'id', operator: 'in', value: [1, 2, 3] }],
|
||||||
|
});
|
||||||
|
expect(h['X-SearchOp-in-id']).toBe('1,2,3');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('encodeHeaderValue / decodeHeaderValue', () => {
|
||||||
|
it('should round-trip encode/decode', () => {
|
||||||
|
const original = 'some complex value with spaces & symbols!';
|
||||||
|
const encoded = encodeHeaderValue(original);
|
||||||
|
expect(encoded.startsWith('ZIP_')).toBe(true);
|
||||||
|
const decoded = decodeHeaderValue(encoded);
|
||||||
|
expect(decoded).toBe(original);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should decode __ prefixed values', () => {
|
||||||
|
const encoded = '__' + btoa('hello');
|
||||||
|
expect(decodeHeaderValue(encoded)).toBe('hello');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return plain values as-is', () => {
|
||||||
|
expect(decodeHeaderValue('plain')).toBe('plain');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('HeaderSpecClient', () => {
|
||||||
|
const config: ClientConfig = { baseUrl: 'http://localhost:3000', token: 'tok' };
|
||||||
|
|
||||||
|
function mockFetch<T>(data: APIResponse<T>, ok = true) {
|
||||||
|
return vi.fn().mockResolvedValue({
|
||||||
|
ok,
|
||||||
|
json: () => Promise.resolve(data),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('read() sends GET with headers from options', async () => {
|
||||||
|
globalThis.fetch = mockFetch({ success: true, data: [{ id: 1 }] });
|
||||||
|
const client = new HeaderSpecClient(config);
|
||||||
|
|
||||||
|
await client.read('public', 'users', undefined, {
|
||||||
|
columns: ['id', 'name'],
|
||||||
|
limit: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
const [url, opts] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(url).toBe('http://localhost:3000/public/users');
|
||||||
|
expect(opts.method).toBe('GET');
|
||||||
|
expect(opts.headers['X-Select-Fields']).toBe('id,name');
|
||||||
|
expect(opts.headers['X-Limit']).toBe('10');
|
||||||
|
expect(opts.headers['Authorization']).toBe('Bearer tok');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('read() with id appends to URL', async () => {
|
||||||
|
globalThis.fetch = mockFetch({ success: true, data: {} });
|
||||||
|
const client = new HeaderSpecClient(config);
|
||||||
|
|
||||||
|
await client.read('public', 'users', '42');
|
||||||
|
|
||||||
|
const [url] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(url).toBe('http://localhost:3000/public/users/42');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('create() sends POST with body and headers', async () => {
|
||||||
|
globalThis.fetch = mockFetch({ success: true, data: { id: 1 } });
|
||||||
|
const client = new HeaderSpecClient(config);
|
||||||
|
|
||||||
|
await client.create('public', 'users', { name: 'Test' });
|
||||||
|
|
||||||
|
const [url, opts] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(opts.method).toBe('POST');
|
||||||
|
expect(JSON.parse(opts.body)).toEqual({ name: 'Test' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('update() sends PUT with id in URL', async () => {
|
||||||
|
globalThis.fetch = mockFetch({ success: true, data: {} });
|
||||||
|
const client = new HeaderSpecClient(config);
|
||||||
|
|
||||||
|
await client.update('public', 'users', '1', { name: 'Updated' }, {
|
||||||
|
filters: [{ column: 'active', operator: 'eq', value: true }],
|
||||||
|
});
|
||||||
|
|
||||||
|
const [url, opts] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(url).toBe('http://localhost:3000/public/users/1');
|
||||||
|
expect(opts.method).toBe('PUT');
|
||||||
|
expect(opts.headers['X-FieldFilter-active']).toBe('true');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('delete() sends DELETE', async () => {
|
||||||
|
globalThis.fetch = mockFetch({ success: true, data: undefined as any });
|
||||||
|
const client = new HeaderSpecClient(config);
|
||||||
|
|
||||||
|
await client.delete('public', 'users', '1');
|
||||||
|
|
||||||
|
const [url, opts] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(url).toBe('http://localhost:3000/public/users/1');
|
||||||
|
expect(opts.method).toBe('DELETE');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws on non-ok response', async () => {
|
||||||
|
globalThis.fetch = mockFetch(
|
||||||
|
{ success: false, data: null as any, error: { code: 'err', message: 'fail' } },
|
||||||
|
false
|
||||||
|
);
|
||||||
|
const client = new HeaderSpecClient(config);
|
||||||
|
|
||||||
|
await expect(client.read('public', 'users')).rejects.toThrow('fail');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getHeaderSpecClient singleton', () => {
|
||||||
|
it('returns same instance for same baseUrl', () => {
|
||||||
|
const a = getHeaderSpecClient({ baseUrl: 'http://hs-singleton:3000' });
|
||||||
|
const b = getHeaderSpecClient({ baseUrl: 'http://hs-singleton:3000' });
|
||||||
|
expect(a).toBe(b);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns different instances for different baseUrls', () => {
|
||||||
|
const a = getHeaderSpecClient({ baseUrl: 'http://hs-singleton-a:3000' });
|
||||||
|
const b = getHeaderSpecClient({ baseUrl: 'http://hs-singleton-b:3000' });
|
||||||
|
expect(a).not.toBe(b);
|
||||||
|
});
|
||||||
|
});
|
||||||
178
resolvespec-js/src/__tests__/resolvespec.test.ts
Normal file
178
resolvespec-js/src/__tests__/resolvespec.test.ts
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { ResolveSpecClient, getResolveSpecClient } from '../resolvespec/client';
|
||||||
|
import type { ClientConfig, APIResponse } from '../common/types';
|
||||||
|
|
||||||
|
const config: ClientConfig = { baseUrl: 'http://localhost:3000', token: 'test-token' };
|
||||||
|
|
||||||
|
function mockFetchResponse<T>(data: APIResponse<T>, ok = true, status = 200) {
|
||||||
|
return vi.fn().mockResolvedValue({
|
||||||
|
ok,
|
||||||
|
status,
|
||||||
|
json: () => Promise.resolve(data),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ResolveSpecClient', () => {
|
||||||
|
it('read() sends POST with operation read', async () => {
|
||||||
|
const response: APIResponse = { success: true, data: [{ id: 1 }] };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
const result = await client.read('public', 'users', 1);
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
|
||||||
|
const [url, opts] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(url).toBe('http://localhost:3000/public/users/1');
|
||||||
|
expect(opts.method).toBe('POST');
|
||||||
|
expect(opts.headers['Authorization']).toBe('Bearer test-token');
|
||||||
|
|
||||||
|
const body = JSON.parse(opts.body);
|
||||||
|
expect(body.operation).toBe('read');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('read() with string array id puts id in body', async () => {
|
||||||
|
const response: APIResponse = { success: true, data: [] };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
await client.read('public', 'users', ['1', '2']);
|
||||||
|
const body = JSON.parse((globalThis.fetch as any).mock.calls[0][1].body);
|
||||||
|
expect(body.id).toEqual(['1', '2']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('read() passes options through', async () => {
|
||||||
|
const response: APIResponse = { success: true, data: [] };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
await client.read('public', 'users', undefined, {
|
||||||
|
columns: ['id', 'name'],
|
||||||
|
omit_columns: ['secret'],
|
||||||
|
filters: [{ column: 'active', operator: 'eq', value: true }],
|
||||||
|
sort: [{ column: 'name', direction: 'asc' }],
|
||||||
|
limit: 10,
|
||||||
|
offset: 0,
|
||||||
|
cursor_forward: 'cursor1',
|
||||||
|
fetch_row_number: '5',
|
||||||
|
});
|
||||||
|
|
||||||
|
const body = JSON.parse((globalThis.fetch as any).mock.calls[0][1].body);
|
||||||
|
expect(body.options.columns).toEqual(['id', 'name']);
|
||||||
|
expect(body.options.omit_columns).toEqual(['secret']);
|
||||||
|
expect(body.options.cursor_forward).toBe('cursor1');
|
||||||
|
expect(body.options.fetch_row_number).toBe('5');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('create() sends POST with operation create and data', async () => {
|
||||||
|
const response: APIResponse = { success: true, data: { id: 1, name: 'Test' } };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
const result = await client.create('public', 'users', { name: 'Test' });
|
||||||
|
expect(result.data.name).toBe('Test');
|
||||||
|
|
||||||
|
const body = JSON.parse((globalThis.fetch as any).mock.calls[0][1].body);
|
||||||
|
expect(body.operation).toBe('create');
|
||||||
|
expect(body.data.name).toBe('Test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('update() with single id puts id in URL', async () => {
|
||||||
|
const response: APIResponse = { success: true, data: { id: 1 } };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
await client.update('public', 'users', { name: 'Updated' }, 1);
|
||||||
|
const [url] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(url).toBe('http://localhost:3000/public/users/1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('update() with string array id puts id in body', async () => {
|
||||||
|
const response: APIResponse = { success: true, data: {} };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
await client.update('public', 'users', { active: false }, ['1', '2']);
|
||||||
|
const body = JSON.parse((globalThis.fetch as any).mock.calls[0][1].body);
|
||||||
|
expect(body.id).toEqual(['1', '2']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('delete() sends POST with operation delete', async () => {
|
||||||
|
const response: APIResponse<void> = { success: true, data: undefined as any };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
await client.delete('public', 'users', 1);
|
||||||
|
const [url, opts] = (globalThis.fetch as any).mock.calls[0];
|
||||||
|
expect(url).toBe('http://localhost:3000/public/users/1');
|
||||||
|
|
||||||
|
const body = JSON.parse(opts.body);
|
||||||
|
expect(body.operation).toBe('delete');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getMetadata() sends GET request', async () => {
|
||||||
|
const response: APIResponse = {
|
||||||
|
success: true,
|
||||||
|
data: { schema: 'public', table: 'users', columns: [], relations: [] },
|
||||||
|
};
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
const result = await client.getMetadata('public', 'users');
|
||||||
|
expect(result.data.table).toBe('users');
|
||||||
|
|
||||||
|
const opts = (globalThis.fetch as any).mock.calls[0][1];
|
||||||
|
expect(opts.method).toBe('GET');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws on non-ok response', async () => {
|
||||||
|
const errorResp = {
|
||||||
|
success: false,
|
||||||
|
data: null,
|
||||||
|
error: { code: 'not_found', message: 'Not found' },
|
||||||
|
};
|
||||||
|
globalThis.fetch = mockFetchResponse(errorResp as any, false, 404);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
await expect(client.read('public', 'users', 999)).rejects.toThrow('Not found');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws generic error when no error message', async () => {
|
||||||
|
globalThis.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
status: 500,
|
||||||
|
json: () => Promise.resolve({ success: false, data: null }),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(config);
|
||||||
|
await expect(client.read('public', 'users')).rejects.toThrow('An error occurred');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('config without token omits Authorization header', async () => {
|
||||||
|
const noAuthConfig: ClientConfig = { baseUrl: 'http://localhost:3000' };
|
||||||
|
const response: APIResponse = { success: true, data: [] };
|
||||||
|
globalThis.fetch = mockFetchResponse(response);
|
||||||
|
|
||||||
|
const client = new ResolveSpecClient(noAuthConfig);
|
||||||
|
await client.read('public', 'users');
|
||||||
|
const opts = (globalThis.fetch as any).mock.calls[0][1];
|
||||||
|
expect(opts.headers['Authorization']).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getResolveSpecClient singleton', () => {
|
||||||
|
it('returns same instance for same baseUrl', () => {
|
||||||
|
const a = getResolveSpecClient({ baseUrl: 'http://singleton-test:3000' });
|
||||||
|
const b = getResolveSpecClient({ baseUrl: 'http://singleton-test:3000' });
|
||||||
|
expect(a).toBe(b);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns different instances for different baseUrls', () => {
|
||||||
|
const a = getResolveSpecClient({ baseUrl: 'http://singleton-a:3000' });
|
||||||
|
const b = getResolveSpecClient({ baseUrl: 'http://singleton-b:3000' });
|
||||||
|
expect(a).not.toBe(b);
|
||||||
|
});
|
||||||
|
});
|
||||||
336
resolvespec-js/src/__tests__/websocketspec.test.ts
Normal file
336
resolvespec-js/src/__tests__/websocketspec.test.ts
Normal file
@@ -0,0 +1,336 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { WebSocketClient, getWebSocketClient } from '../websocketspec/client';
|
||||||
|
import type { WebSocketClientConfig } from '../websocketspec/types';
|
||||||
|
|
||||||
|
// Mock uuid
|
||||||
|
vi.mock('uuid', () => ({
|
||||||
|
v4: vi.fn(() => 'mock-uuid-1234'),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock WebSocket
|
||||||
|
class MockWebSocket {
|
||||||
|
static OPEN = 1;
|
||||||
|
static CLOSED = 3;
|
||||||
|
|
||||||
|
url: string;
|
||||||
|
readyState = MockWebSocket.OPEN;
|
||||||
|
onopen: ((ev: any) => void) | null = null;
|
||||||
|
onclose: ((ev: any) => void) | null = null;
|
||||||
|
onmessage: ((ev: any) => void) | null = null;
|
||||||
|
onerror: ((ev: any) => void) | null = null;
|
||||||
|
|
||||||
|
private sentMessages: string[] = [];
|
||||||
|
|
||||||
|
constructor(url: string) {
|
||||||
|
this.url = url;
|
||||||
|
// Simulate async open
|
||||||
|
setTimeout(() => {
|
||||||
|
this.onopen?.({});
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
send(data: string) {
|
||||||
|
this.sentMessages.push(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
close() {
|
||||||
|
this.readyState = MockWebSocket.CLOSED;
|
||||||
|
this.onclose?.({ code: 1000, reason: 'Normal closure' } as any);
|
||||||
|
}
|
||||||
|
|
||||||
|
getSentMessages(): any[] {
|
||||||
|
return this.sentMessages.map((m) => JSON.parse(m));
|
||||||
|
}
|
||||||
|
|
||||||
|
simulateMessage(data: any) {
|
||||||
|
this.onmessage?.({ data: JSON.stringify(data) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mockWsInstance: MockWebSocket | null = null;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockWsInstance = null;
|
||||||
|
(globalThis as any).WebSocket = class extends MockWebSocket {
|
||||||
|
constructor(url: string) {
|
||||||
|
super(url);
|
||||||
|
mockWsInstance = this;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
(globalThis as any).WebSocket.OPEN = MockWebSocket.OPEN;
|
||||||
|
(globalThis as any).WebSocket.CLOSED = MockWebSocket.CLOSED;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('WebSocketClient', () => {
|
||||||
|
const wsConfig: WebSocketClientConfig = {
|
||||||
|
url: 'ws://localhost:8080',
|
||||||
|
reconnect: false,
|
||||||
|
heartbeatInterval: 60000,
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should connect and set state to connected', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
expect(client.getState()).toBe('connected');
|
||||||
|
expect(client.isConnected()).toBe(true);
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should disconnect and set state to disconnected', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
client.disconnect();
|
||||||
|
expect(client.getState()).toBe('disconnected');
|
||||||
|
expect(client.isConnected()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should send read request', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
const readPromise = client.read('users', {
|
||||||
|
schema: 'public',
|
||||||
|
filters: [{ column: 'active', operator: 'eq', value: true }],
|
||||||
|
limit: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Simulate server response
|
||||||
|
const sent = mockWsInstance!.getSentMessages();
|
||||||
|
expect(sent.length).toBe(1);
|
||||||
|
expect(sent[0].operation).toBe('read');
|
||||||
|
expect(sent[0].entity).toBe('users');
|
||||||
|
expect(sent[0].options.filters[0].column).toBe('active');
|
||||||
|
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[0].id,
|
||||||
|
type: 'response',
|
||||||
|
success: true,
|
||||||
|
data: [{ id: 1 }],
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await readPromise;
|
||||||
|
expect(result).toEqual([{ id: 1 }]);
|
||||||
|
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should send create request', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
const createPromise = client.create('users', { name: 'Test' }, { schema: 'public' });
|
||||||
|
|
||||||
|
const sent = mockWsInstance!.getSentMessages();
|
||||||
|
expect(sent[0].operation).toBe('create');
|
||||||
|
expect(sent[0].data.name).toBe('Test');
|
||||||
|
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[0].id,
|
||||||
|
type: 'response',
|
||||||
|
success: true,
|
||||||
|
data: { id: 1, name: 'Test' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await createPromise;
|
||||||
|
expect(result.name).toBe('Test');
|
||||||
|
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should send update request with record_id', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
const updatePromise = client.update('users', '1', { name: 'Updated' });
|
||||||
|
|
||||||
|
const sent = mockWsInstance!.getSentMessages();
|
||||||
|
expect(sent[0].operation).toBe('update');
|
||||||
|
expect(sent[0].record_id).toBe('1');
|
||||||
|
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[0].id,
|
||||||
|
type: 'response',
|
||||||
|
success: true,
|
||||||
|
data: { id: 1, name: 'Updated' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await updatePromise;
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should send delete request', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
const deletePromise = client.delete('users', '1');
|
||||||
|
|
||||||
|
const sent = mockWsInstance!.getSentMessages();
|
||||||
|
expect(sent[0].operation).toBe('delete');
|
||||||
|
expect(sent[0].record_id).toBe('1');
|
||||||
|
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[0].id,
|
||||||
|
type: 'response',
|
||||||
|
success: true,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await deletePromise;
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject on failed request', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
const readPromise = client.read('users');
|
||||||
|
|
||||||
|
const sent = mockWsInstance!.getSentMessages();
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[0].id,
|
||||||
|
type: 'response',
|
||||||
|
success: false,
|
||||||
|
error: { code: 'not_found', message: 'Not found' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(readPromise).rejects.toThrow('Not found');
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subscriptions', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
const callback = vi.fn();
|
||||||
|
const subPromise = client.subscribe('users', callback, {
|
||||||
|
schema: 'public',
|
||||||
|
});
|
||||||
|
|
||||||
|
const sent = mockWsInstance!.getSentMessages();
|
||||||
|
expect(sent[0].type).toBe('subscription');
|
||||||
|
expect(sent[0].operation).toBe('subscribe');
|
||||||
|
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[0].id,
|
||||||
|
type: 'response',
|
||||||
|
success: true,
|
||||||
|
data: { subscription_id: 'sub-1' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const subId = await subPromise;
|
||||||
|
expect(subId).toBe('sub-1');
|
||||||
|
expect(client.getSubscriptions()).toHaveLength(1);
|
||||||
|
|
||||||
|
// Simulate notification
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
type: 'notification',
|
||||||
|
operation: 'create',
|
||||||
|
subscription_id: 'sub-1',
|
||||||
|
entity: 'users',
|
||||||
|
data: { id: 2, name: 'New' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(callback).toHaveBeenCalledTimes(1);
|
||||||
|
expect(callback.mock.calls[0][0].data.id).toBe(2);
|
||||||
|
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle unsubscribe', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
// Subscribe first
|
||||||
|
const subPromise = client.subscribe('users', vi.fn());
|
||||||
|
let sent = mockWsInstance!.getSentMessages();
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[0].id,
|
||||||
|
type: 'response',
|
||||||
|
success: true,
|
||||||
|
data: { subscription_id: 'sub-1' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
await subPromise;
|
||||||
|
|
||||||
|
// Unsubscribe
|
||||||
|
const unsubPromise = client.unsubscribe('sub-1');
|
||||||
|
sent = mockWsInstance!.getSentMessages();
|
||||||
|
mockWsInstance!.simulateMessage({
|
||||||
|
id: sent[sent.length - 1].id,
|
||||||
|
type: 'response',
|
||||||
|
success: true,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await unsubPromise;
|
||||||
|
expect(client.getSubscriptions()).toHaveLength(0);
|
||||||
|
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should emit events', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
const connectCb = vi.fn();
|
||||||
|
const stateChangeCb = vi.fn();
|
||||||
|
|
||||||
|
client.on('connect', connectCb);
|
||||||
|
client.on('stateChange', stateChangeCb);
|
||||||
|
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
expect(connectCb).toHaveBeenCalledTimes(1);
|
||||||
|
expect(stateChangeCb).toHaveBeenCalled();
|
||||||
|
|
||||||
|
client.off('connect');
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject when sending without connection', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await expect(client.read('users')).rejects.toThrow('WebSocket is not connected');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle pong messages without error', async () => {
|
||||||
|
const client = new WebSocketClient(wsConfig);
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
// Should not throw
|
||||||
|
mockWsInstance!.simulateMessage({ type: 'pong' });
|
||||||
|
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle malformed messages gracefully', async () => {
|
||||||
|
const client = new WebSocketClient({ ...wsConfig, debug: false });
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
// Simulate non-JSON message
|
||||||
|
mockWsInstance!.onmessage?.({ data: 'not-json' } as any);
|
||||||
|
|
||||||
|
client.disconnect();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getWebSocketClient singleton', () => {
|
||||||
|
it('returns same instance for same url', () => {
|
||||||
|
const a = getWebSocketClient({ url: 'ws://ws-singleton:8080' });
|
||||||
|
const b = getWebSocketClient({ url: 'ws://ws-singleton:8080' });
|
||||||
|
expect(a).toBe(b);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns different instances for different urls', () => {
|
||||||
|
const a = getWebSocketClient({ url: 'ws://ws-singleton-a:8080' });
|
||||||
|
const b = getWebSocketClient({ url: 'ws://ws-singleton-b:8080' });
|
||||||
|
expect(a).not.toBe(b);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
import { ClientConfig, APIResponse, TableMetadata, Options, RequestBody } from "./types";
|
|
||||||
|
|
||||||
// Helper functions
|
|
||||||
const getHeaders = (options?: Record<string,any>): HeadersInit => {
|
|
||||||
const headers: HeadersInit = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
};
|
|
||||||
|
|
||||||
if (options?.token) {
|
|
||||||
headers['Authorization'] = `Bearer ${options.token}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return headers;
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildUrl = (config: ClientConfig, schema: string, entity: string, id?: string): string => {
|
|
||||||
let url = `${config.baseUrl}/${schema}/${entity}`;
|
|
||||||
if (id) {
|
|
||||||
url += `/${id}`;
|
|
||||||
}
|
|
||||||
return url;
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchWithError = async <T>(url: string, options: RequestInit): Promise<APIResponse<T>> => {
|
|
||||||
try {
|
|
||||||
const response = await fetch(url, options);
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error?.message || 'An error occurred');
|
|
||||||
}
|
|
||||||
|
|
||||||
return data;
|
|
||||||
} catch (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// API Functions
|
|
||||||
export const getMetadata = async (
|
|
||||||
config: ClientConfig,
|
|
||||||
schema: string,
|
|
||||||
entity: string
|
|
||||||
): Promise<APIResponse<TableMetadata>> => {
|
|
||||||
const url = buildUrl(config, schema, entity);
|
|
||||||
return fetchWithError<TableMetadata>(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getHeaders(config),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const read = async <T = any>(
|
|
||||||
config: ClientConfig,
|
|
||||||
schema: string,
|
|
||||||
entity: string,
|
|
||||||
id?: string,
|
|
||||||
options?: Options
|
|
||||||
): Promise<APIResponse<T>> => {
|
|
||||||
const url = buildUrl(config, schema, entity, id);
|
|
||||||
const body: RequestBody = {
|
|
||||||
operation: 'read',
|
|
||||||
options,
|
|
||||||
};
|
|
||||||
|
|
||||||
return fetchWithError<T>(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getHeaders(config),
|
|
||||||
body: JSON.stringify(body),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const create = async <T = any>(
|
|
||||||
config: ClientConfig,
|
|
||||||
schema: string,
|
|
||||||
entity: string,
|
|
||||||
data: any | any[],
|
|
||||||
options?: Options
|
|
||||||
): Promise<APIResponse<T>> => {
|
|
||||||
const url = buildUrl(config, schema, entity);
|
|
||||||
const body: RequestBody = {
|
|
||||||
operation: 'create',
|
|
||||||
data,
|
|
||||||
options,
|
|
||||||
};
|
|
||||||
|
|
||||||
return fetchWithError<T>(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getHeaders(config),
|
|
||||||
body: JSON.stringify(body),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const update = async <T = any>(
|
|
||||||
config: ClientConfig,
|
|
||||||
schema: string,
|
|
||||||
entity: string,
|
|
||||||
data: any | any[],
|
|
||||||
id?: string | string[],
|
|
||||||
options?: Options
|
|
||||||
): Promise<APIResponse<T>> => {
|
|
||||||
const url = buildUrl(config, schema, entity, typeof id === 'string' ? id : undefined);
|
|
||||||
const body: RequestBody = {
|
|
||||||
operation: 'update',
|
|
||||||
id: typeof id === 'string' ? undefined : id,
|
|
||||||
data,
|
|
||||||
options,
|
|
||||||
};
|
|
||||||
|
|
||||||
return fetchWithError<T>(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getHeaders(config),
|
|
||||||
body: JSON.stringify(body),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteEntity = async (
|
|
||||||
config: ClientConfig,
|
|
||||||
schema: string,
|
|
||||||
entity: string,
|
|
||||||
id: string
|
|
||||||
): Promise<APIResponse<void>> => {
|
|
||||||
const url = buildUrl(config, schema, entity, id);
|
|
||||||
const body: RequestBody = {
|
|
||||||
operation: 'delete',
|
|
||||||
};
|
|
||||||
|
|
||||||
return fetchWithError<void>(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getHeaders(config),
|
|
||||||
body: JSON.stringify(body),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
1
resolvespec-js/src/common/index.ts
Normal file
1
resolvespec-js/src/common/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export * from './types';
|
||||||
129
resolvespec-js/src/common/types.ts
Normal file
129
resolvespec-js/src/common/types.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
// Types aligned with Go pkg/common/types.go
|
||||||
|
|
||||||
|
export type Operator =
|
||||||
|
| 'eq' | 'neq' | 'gt' | 'gte' | 'lt' | 'lte'
|
||||||
|
| 'like' | 'ilike' | 'in'
|
||||||
|
| 'contains' | 'startswith' | 'endswith'
|
||||||
|
| 'between' | 'between_inclusive'
|
||||||
|
| 'is_null' | 'is_not_null';
|
||||||
|
|
||||||
|
export type Operation = 'read' | 'create' | 'update' | 'delete';
|
||||||
|
export type SortDirection = 'asc' | 'desc' | 'ASC' | 'DESC';
|
||||||
|
|
||||||
|
export interface Parameter {
|
||||||
|
name: string;
|
||||||
|
value: string;
|
||||||
|
sequence?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PreloadOption {
|
||||||
|
relation: string;
|
||||||
|
table_name?: string;
|
||||||
|
columns?: string[];
|
||||||
|
omit_columns?: string[];
|
||||||
|
sort?: SortOption[];
|
||||||
|
filters?: FilterOption[];
|
||||||
|
where?: string;
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
updatable?: boolean;
|
||||||
|
computed_ql?: Record<string, string>;
|
||||||
|
recursive?: boolean;
|
||||||
|
// Relationship keys
|
||||||
|
primary_key?: string;
|
||||||
|
related_key?: string;
|
||||||
|
foreign_key?: string;
|
||||||
|
recursive_child_key?: string;
|
||||||
|
// Custom SQL JOINs
|
||||||
|
sql_joins?: string[];
|
||||||
|
join_aliases?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FilterOption {
|
||||||
|
column: string;
|
||||||
|
operator: Operator | string;
|
||||||
|
value: any;
|
||||||
|
logic_operator?: 'AND' | 'OR';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SortOption {
|
||||||
|
column: string;
|
||||||
|
direction: SortDirection;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CustomOperator {
|
||||||
|
name: string;
|
||||||
|
sql: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ComputedColumn {
|
||||||
|
name: string;
|
||||||
|
expression: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Options {
|
||||||
|
preload?: PreloadOption[];
|
||||||
|
columns?: string[];
|
||||||
|
omit_columns?: string[];
|
||||||
|
filters?: FilterOption[];
|
||||||
|
sort?: SortOption[];
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
customOperators?: CustomOperator[];
|
||||||
|
computedColumns?: ComputedColumn[];
|
||||||
|
parameters?: Parameter[];
|
||||||
|
cursor_forward?: string;
|
||||||
|
cursor_backward?: string;
|
||||||
|
fetch_row_number?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RequestBody {
|
||||||
|
operation: Operation;
|
||||||
|
id?: number | string | string[];
|
||||||
|
data?: any | any[];
|
||||||
|
options?: Options;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Metadata {
|
||||||
|
total: number;
|
||||||
|
count: number;
|
||||||
|
filtered: number;
|
||||||
|
limit: number;
|
||||||
|
offset: number;
|
||||||
|
row_number?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface APIError {
|
||||||
|
code: string;
|
||||||
|
message: string;
|
||||||
|
details?: any;
|
||||||
|
detail?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface APIResponse<T = any> {
|
||||||
|
success: boolean;
|
||||||
|
data: T;
|
||||||
|
metadata?: Metadata;
|
||||||
|
error?: APIError;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Column {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
is_nullable: boolean;
|
||||||
|
is_primary: boolean;
|
||||||
|
is_unique: boolean;
|
||||||
|
has_index: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TableMetadata {
|
||||||
|
schema: string;
|
||||||
|
table: string;
|
||||||
|
columns: Column[];
|
||||||
|
relations: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClientConfig {
|
||||||
|
baseUrl: string;
|
||||||
|
token?: string;
|
||||||
|
}
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
import { getMetadata, read, create, update, deleteEntity } from "./api";
|
|
||||||
import { ClientConfig } from "./types";
|
|
||||||
|
|
||||||
// Usage Examples
|
|
||||||
const config: ClientConfig = {
|
|
||||||
baseUrl: 'http://api.example.com/v1',
|
|
||||||
token: 'your-token-here'
|
|
||||||
};
|
|
||||||
|
|
||||||
// Example usage
|
|
||||||
const examples = async () => {
|
|
||||||
// Get metadata
|
|
||||||
const metadata = await getMetadata(config, 'test', 'employees');
|
|
||||||
|
|
||||||
|
|
||||||
// Read with relations
|
|
||||||
const employees = await read(config, 'test', 'employees', undefined, {
|
|
||||||
preload: [
|
|
||||||
{
|
|
||||||
relation: 'department',
|
|
||||||
columns: ['id', 'name']
|
|
||||||
}
|
|
||||||
],
|
|
||||||
filters: [
|
|
||||||
{
|
|
||||||
column: 'status',
|
|
||||||
operator: 'eq',
|
|
||||||
value: 'active'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create single record
|
|
||||||
const newEmployee = await create(config, 'test', 'employees', {
|
|
||||||
first_name: 'John',
|
|
||||||
last_name: 'Doe',
|
|
||||||
email: 'john@example.com'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Bulk create
|
|
||||||
const newEmployees = await create(config, 'test', 'employees', [
|
|
||||||
{
|
|
||||||
first_name: 'Jane',
|
|
||||||
last_name: 'Smith',
|
|
||||||
email: 'jane@example.com'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
first_name: 'Bob',
|
|
||||||
last_name: 'Johnson',
|
|
||||||
email: 'bob@example.com'
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Update single record
|
|
||||||
const updatedEmployee = await update(config, 'test', 'employees',
|
|
||||||
{ status: 'inactive' },
|
|
||||||
'emp123'
|
|
||||||
);
|
|
||||||
|
|
||||||
// Bulk update
|
|
||||||
const updatedEmployees = await update(config, 'test', 'employees',
|
|
||||||
{ department_id: 'dept2' },
|
|
||||||
['emp1', 'emp2', 'emp3']
|
|
||||||
);
|
|
||||||
|
|
||||||
// Delete
|
|
||||||
await deleteEntity(config, 'test', 'employees', 'emp123');
|
|
||||||
};
|
|
||||||
345
resolvespec-js/src/headerspec/client.ts
Normal file
345
resolvespec-js/src/headerspec/client.ts
Normal file
@@ -0,0 +1,345 @@
|
|||||||
|
import type {
|
||||||
|
APIResponse,
|
||||||
|
ClientConfig,
|
||||||
|
CustomOperator,
|
||||||
|
FilterOption,
|
||||||
|
Options,
|
||||||
|
PreloadOption,
|
||||||
|
SortOption,
|
||||||
|
} from "../common/types";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode a value with base64 and ZIP_ prefix for complex header values.
|
||||||
|
*/
|
||||||
|
export function encodeHeaderValue(value: string): string {
|
||||||
|
if (typeof btoa === "function") {
|
||||||
|
return "ZIP_" + btoa(value);
|
||||||
|
}
|
||||||
|
return "ZIP_" + Buffer.from(value, "utf-8").toString("base64");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decode a header value that may be base64 encoded with ZIP_ or __ prefix.
|
||||||
|
*/
|
||||||
|
export function decodeHeaderValue(value: string): string {
|
||||||
|
let code = value;
|
||||||
|
|
||||||
|
if (code.startsWith("ZIP_")) {
|
||||||
|
code = code.slice(4).replace(/[\n\r ]/g, "");
|
||||||
|
code = decodeBase64(code);
|
||||||
|
} else if (code.startsWith("__")) {
|
||||||
|
code = code.slice(2).replace(/[\n\r ]/g, "");
|
||||||
|
code = decodeBase64(code);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle nested encoding
|
||||||
|
if (code.startsWith("ZIP_") || code.startsWith("__")) {
|
||||||
|
code = decodeHeaderValue(code);
|
||||||
|
}
|
||||||
|
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeBase64(str: string): string {
|
||||||
|
if (typeof atob === "function") {
|
||||||
|
return atob(str);
|
||||||
|
}
|
||||||
|
return Buffer.from(str, "base64").toString("utf-8");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build HTTP headers from Options, matching Go's restheadspec handler conventions.
|
||||||
|
*
|
||||||
|
* Header mapping:
|
||||||
|
* - X-Select-Fields: comma-separated columns
|
||||||
|
* - X-Not-Select-Fields: comma-separated omit_columns
|
||||||
|
* - X-FieldFilter-{col}: exact match (eq)
|
||||||
|
* - X-SearchOp-{operator}-{col}: AND filter
|
||||||
|
* - X-SearchOr-{operator}-{col}: OR filter
|
||||||
|
* - X-Sort: +col (asc), -col (desc)
|
||||||
|
* - X-Limit, X-Offset: pagination
|
||||||
|
* - X-Cursor-Forward, X-Cursor-Backward: cursor pagination
|
||||||
|
* - X-Preload: RelationName:field1,field2 pipe-separated
|
||||||
|
* - X-Fetch-RowNumber: row number fetch
|
||||||
|
* - X-CQL-SEL-{col}: computed columns
|
||||||
|
* - X-Custom-SQL-W: custom operators (AND)
|
||||||
|
*/
|
||||||
|
export function buildHeaders(options: Options): Record<string, string> {
|
||||||
|
const headers: Record<string, string> = {};
|
||||||
|
|
||||||
|
// Column selection
|
||||||
|
if (options.columns?.length) {
|
||||||
|
headers["X-Select-Fields"] = options.columns.join(",");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.omit_columns?.length) {
|
||||||
|
headers["X-Not-Select-Fields"] = options.omit_columns.join(",");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filters
|
||||||
|
if (options.filters?.length) {
|
||||||
|
for (const filter of options.filters) {
|
||||||
|
const logicOp = filter.logic_operator ?? "AND";
|
||||||
|
const op = mapOperatorToHeaderOp(filter.operator);
|
||||||
|
const valueStr = formatFilterValue(filter);
|
||||||
|
|
||||||
|
if (filter.operator === "eq" && logicOp === "AND") {
|
||||||
|
// Simple field filter shorthand
|
||||||
|
headers[`X-FieldFilter-${filter.column}`] = valueStr;
|
||||||
|
} else if (logicOp === "OR") {
|
||||||
|
headers[`X-SearchOr-${op}-${filter.column}`] = valueStr;
|
||||||
|
} else {
|
||||||
|
headers[`X-SearchOp-${op}-${filter.column}`] = valueStr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort
|
||||||
|
if (options.sort?.length) {
|
||||||
|
const sortParts = options.sort.map((s: SortOption) => {
|
||||||
|
const dir = s.direction.toUpperCase();
|
||||||
|
return dir === "DESC" ? `-${s.column}` : `+${s.column}`;
|
||||||
|
});
|
||||||
|
headers["X-Sort"] = sortParts.join(",");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pagination
|
||||||
|
if (options.limit !== undefined) {
|
||||||
|
headers["X-Limit"] = String(options.limit);
|
||||||
|
}
|
||||||
|
if (options.offset !== undefined) {
|
||||||
|
headers["X-Offset"] = String(options.offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cursor pagination
|
||||||
|
if (options.cursor_forward) {
|
||||||
|
headers["X-Cursor-Forward"] = options.cursor_forward;
|
||||||
|
}
|
||||||
|
if (options.cursor_backward) {
|
||||||
|
headers["X-Cursor-Backward"] = options.cursor_backward;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Preload
|
||||||
|
if (options.preload?.length) {
|
||||||
|
const parts = options.preload.map((p: PreloadOption) => {
|
||||||
|
if (p.columns?.length) {
|
||||||
|
return `${p.relation}:${p.columns.join(",")}`;
|
||||||
|
}
|
||||||
|
return p.relation;
|
||||||
|
});
|
||||||
|
headers["X-Preload"] = parts.join("|");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch row number
|
||||||
|
if (options.fetch_row_number) {
|
||||||
|
headers["X-Fetch-RowNumber"] = options.fetch_row_number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Computed columns
|
||||||
|
if (options.computedColumns?.length) {
|
||||||
|
for (const cc of options.computedColumns) {
|
||||||
|
headers[`X-CQL-SEL-${cc.name}`] = cc.expression;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom operators -> X-Custom-SQL-W
|
||||||
|
if (options.customOperators?.length) {
|
||||||
|
const sqlParts = options.customOperators.map(
|
||||||
|
(co: CustomOperator) => co.sql,
|
||||||
|
);
|
||||||
|
headers["X-Custom-SQL-W"] = sqlParts.join(" AND ");
|
||||||
|
}
|
||||||
|
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapOperatorToHeaderOp(operator: string): string {
|
||||||
|
switch (operator) {
|
||||||
|
case "eq":
|
||||||
|
return "equals";
|
||||||
|
case "neq":
|
||||||
|
return "notequals";
|
||||||
|
case "gt":
|
||||||
|
return "greaterthan";
|
||||||
|
case "gte":
|
||||||
|
return "greaterthanorequal";
|
||||||
|
case "lt":
|
||||||
|
return "lessthan";
|
||||||
|
case "lte":
|
||||||
|
return "lessthanorequal";
|
||||||
|
case "like":
|
||||||
|
case "ilike":
|
||||||
|
case "contains":
|
||||||
|
return "contains";
|
||||||
|
case "startswith":
|
||||||
|
return "beginswith";
|
||||||
|
case "endswith":
|
||||||
|
return "endswith";
|
||||||
|
case "in":
|
||||||
|
return "in";
|
||||||
|
case "between":
|
||||||
|
return "between";
|
||||||
|
case "between_inclusive":
|
||||||
|
return "betweeninclusive";
|
||||||
|
case "is_null":
|
||||||
|
return "empty";
|
||||||
|
case "is_not_null":
|
||||||
|
return "notempty";
|
||||||
|
default:
|
||||||
|
return operator;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatFilterValue(filter: FilterOption): string {
|
||||||
|
if (filter.value === null || filter.value === undefined) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
if (Array.isArray(filter.value)) {
|
||||||
|
return filter.value.join(",");
|
||||||
|
}
|
||||||
|
return String(filter.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
const instances = new Map<string, HeaderSpecClient>();
|
||||||
|
|
||||||
|
export function getHeaderSpecClient(config: ClientConfig): HeaderSpecClient {
|
||||||
|
const key = config.baseUrl;
|
||||||
|
let instance = instances.get(key);
|
||||||
|
if (!instance) {
|
||||||
|
instance = new HeaderSpecClient(config);
|
||||||
|
instances.set(key, instance);
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HeaderSpec REST client.
|
||||||
|
* Sends query options via HTTP headers instead of request body, matching the Go restheadspec handler.
|
||||||
|
*
|
||||||
|
* HTTP methods: GET=read, POST=create, PUT=update, DELETE=delete
|
||||||
|
*/
|
||||||
|
export class HeaderSpecClient {
|
||||||
|
private config: ClientConfig;
|
||||||
|
|
||||||
|
constructor(config: ClientConfig) {
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildUrl(schema: string, entity: string, id?: string): string {
|
||||||
|
let url = `${this.config.baseUrl}/${schema}/${entity}`;
|
||||||
|
if (id) {
|
||||||
|
url += `/${id}`;
|
||||||
|
}
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
private baseHeaders(): Record<string, string> {
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
};
|
||||||
|
if (this.config.token) {
|
||||||
|
headers["Authorization"] = `Bearer ${this.config.token}`;
|
||||||
|
}
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchWithError<T>(
|
||||||
|
url: string,
|
||||||
|
init: RequestInit,
|
||||||
|
): Promise<APIResponse<T>> {
|
||||||
|
const response = await fetch(url, init);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(
|
||||||
|
data.error?.message ||
|
||||||
|
`${response.statusText} ` + `(${response.status})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: data,
|
||||||
|
success: true,
|
||||||
|
error: data.error ? data.error : undefined,
|
||||||
|
metadata: {
|
||||||
|
count: response.headers.get("content-range")
|
||||||
|
? Number(response.headers.get("content-range")?.split("/")[1])
|
||||||
|
: 0,
|
||||||
|
total: response.headers.get("content-range")
|
||||||
|
? Number(response.headers.get("content-range")?.split("/")[1])
|
||||||
|
: 0,
|
||||||
|
filtered: response.headers.get("content-range")
|
||||||
|
? Number(response.headers.get("content-range")?.split("/")[1])
|
||||||
|
: 0,
|
||||||
|
offset: response.headers.get("content-range")
|
||||||
|
? Number(
|
||||||
|
response.headers
|
||||||
|
.get("content-range")
|
||||||
|
?.split("/")[0]
|
||||||
|
.split("-")[0],
|
||||||
|
)
|
||||||
|
: 0,
|
||||||
|
limit: response.headers.get("x-limit")
|
||||||
|
? Number(response.headers.get("x-limit"))
|
||||||
|
: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async read<T = any>(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
id?: string,
|
||||||
|
options?: Options,
|
||||||
|
): Promise<APIResponse<T>> {
|
||||||
|
const url = this.buildUrl(schema, entity, id);
|
||||||
|
const optHeaders = options ? buildHeaders(options) : {};
|
||||||
|
return this.fetchWithError<T>(url, {
|
||||||
|
method: "GET",
|
||||||
|
headers: { ...this.baseHeaders(), ...optHeaders },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async create<T = any>(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
data: any,
|
||||||
|
options?: Options,
|
||||||
|
): Promise<APIResponse<T>> {
|
||||||
|
const url = this.buildUrl(schema, entity);
|
||||||
|
const optHeaders = options ? buildHeaders(options) : {};
|
||||||
|
return this.fetchWithError<T>(url, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { ...this.baseHeaders(), ...optHeaders },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async update<T = any>(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
id: string,
|
||||||
|
data: any,
|
||||||
|
options?: Options,
|
||||||
|
): Promise<APIResponse<T>> {
|
||||||
|
const url = this.buildUrl(schema, entity, id);
|
||||||
|
const optHeaders = options ? buildHeaders(options) : {};
|
||||||
|
return this.fetchWithError<T>(url, {
|
||||||
|
method: "PUT",
|
||||||
|
headers: { ...this.baseHeaders(), ...optHeaders },
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
id: string,
|
||||||
|
): Promise<APIResponse<void>> {
|
||||||
|
const url = this.buildUrl(schema, entity, id);
|
||||||
|
return this.fetchWithError<void>(url, {
|
||||||
|
method: "DELETE",
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
7
resolvespec-js/src/headerspec/index.ts
Normal file
7
resolvespec-js/src/headerspec/index.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export {
|
||||||
|
HeaderSpecClient,
|
||||||
|
getHeaderSpecClient,
|
||||||
|
buildHeaders,
|
||||||
|
encodeHeaderValue,
|
||||||
|
decodeHeaderValue,
|
||||||
|
} from './client';
|
||||||
@@ -1,7 +1,11 @@
|
|||||||
// Types
|
// Common types
|
||||||
export * from './types';
|
export * from './common';
|
||||||
export * from './websocket-types';
|
|
||||||
|
|
||||||
// WebSocket Client
|
// REST client (ResolveSpec)
|
||||||
export { WebSocketClient } from './websocket-client';
|
export * from './resolvespec';
|
||||||
export type { WebSocketClient as default } from './websocket-client';
|
|
||||||
|
// WebSocket client
|
||||||
|
export * from './websocketspec';
|
||||||
|
|
||||||
|
// HeaderSpec client
|
||||||
|
export * from './headerspec';
|
||||||
|
|||||||
141
resolvespec-js/src/resolvespec/client.ts
Normal file
141
resolvespec-js/src/resolvespec/client.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import type { ClientConfig, APIResponse, TableMetadata, Options, RequestBody } from '../common/types';
|
||||||
|
|
||||||
|
const instances = new Map<string, ResolveSpecClient>();
|
||||||
|
|
||||||
|
export function getResolveSpecClient(config: ClientConfig): ResolveSpecClient {
|
||||||
|
const key = config.baseUrl;
|
||||||
|
let instance = instances.get(key);
|
||||||
|
if (!instance) {
|
||||||
|
instance = new ResolveSpecClient(config);
|
||||||
|
instances.set(key, instance);
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ResolveSpecClient {
|
||||||
|
private config: ClientConfig;
|
||||||
|
|
||||||
|
constructor(config: ClientConfig) {
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildUrl(schema: string, entity: string, id?: string): string {
|
||||||
|
let url = `${this.config.baseUrl}/${schema}/${entity}`;
|
||||||
|
if (id) {
|
||||||
|
url += `/${id}`;
|
||||||
|
}
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
private baseHeaders(): HeadersInit {
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.config.token) {
|
||||||
|
headers['Authorization'] = `Bearer ${this.config.token}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchWithError<T>(url: string, options: RequestInit): Promise<APIResponse<T>> {
|
||||||
|
const response = await fetch(url, options);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(data.error?.message || 'An error occurred');
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getMetadata(schema: string, entity: string): Promise<APIResponse<TableMetadata>> {
|
||||||
|
const url = this.buildUrl(schema, entity);
|
||||||
|
return this.fetchWithError<TableMetadata>(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async read<T = any>(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
id?: number | string | string[],
|
||||||
|
options?: Options
|
||||||
|
): Promise<APIResponse<T>> {
|
||||||
|
const urlId = typeof id === 'number' || typeof id === 'string' ? String(id) : undefined;
|
||||||
|
const url = this.buildUrl(schema, entity, urlId);
|
||||||
|
const body: RequestBody = {
|
||||||
|
operation: 'read',
|
||||||
|
id: Array.isArray(id) ? id : undefined,
|
||||||
|
options,
|
||||||
|
};
|
||||||
|
|
||||||
|
return this.fetchWithError<T>(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async create<T = any>(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
data: any | any[],
|
||||||
|
options?: Options
|
||||||
|
): Promise<APIResponse<T>> {
|
||||||
|
const url = this.buildUrl(schema, entity);
|
||||||
|
const body: RequestBody = {
|
||||||
|
operation: 'create',
|
||||||
|
data,
|
||||||
|
options,
|
||||||
|
};
|
||||||
|
|
||||||
|
return this.fetchWithError<T>(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async update<T = any>(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
data: any | any[],
|
||||||
|
id?: number | string | string[],
|
||||||
|
options?: Options
|
||||||
|
): Promise<APIResponse<T>> {
|
||||||
|
const urlId = typeof id === 'number' || typeof id === 'string' ? String(id) : undefined;
|
||||||
|
const url = this.buildUrl(schema, entity, urlId);
|
||||||
|
const body: RequestBody = {
|
||||||
|
operation: 'update',
|
||||||
|
id: Array.isArray(id) ? id : undefined,
|
||||||
|
data,
|
||||||
|
options,
|
||||||
|
};
|
||||||
|
|
||||||
|
return this.fetchWithError<T>(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(
|
||||||
|
schema: string,
|
||||||
|
entity: string,
|
||||||
|
id: number | string
|
||||||
|
): Promise<APIResponse<void>> {
|
||||||
|
const url = this.buildUrl(schema, entity, String(id));
|
||||||
|
const body: RequestBody = {
|
||||||
|
operation: 'delete',
|
||||||
|
};
|
||||||
|
|
||||||
|
return this.fetchWithError<void>(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.baseHeaders(),
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
1
resolvespec-js/src/resolvespec/index.ts
Normal file
1
resolvespec-js/src/resolvespec/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export { ResolveSpecClient, getResolveSpecClient } from './client';
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
// Types
|
|
||||||
export type Operator = 'eq' | 'neq' | 'gt' | 'gte' | 'lt' | 'lte' | 'like' | 'ilike' | 'in';
|
|
||||||
export type Operation = 'read' | 'create' | 'update' | 'delete';
|
|
||||||
export type SortDirection = 'asc' | 'desc';
|
|
||||||
|
|
||||||
export interface PreloadOption {
|
|
||||||
relation: string;
|
|
||||||
columns?: string[];
|
|
||||||
filters?: FilterOption[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FilterOption {
|
|
||||||
column: string;
|
|
||||||
operator: Operator;
|
|
||||||
value: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SortOption {
|
|
||||||
column: string;
|
|
||||||
direction: SortDirection;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CustomOperator {
|
|
||||||
name: string;
|
|
||||||
sql: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ComputedColumn {
|
|
||||||
name: string;
|
|
||||||
expression: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Options {
|
|
||||||
preload?: PreloadOption[];
|
|
||||||
columns?: string[];
|
|
||||||
filters?: FilterOption[];
|
|
||||||
sort?: SortOption[];
|
|
||||||
limit?: number;
|
|
||||||
offset?: number;
|
|
||||||
customOperators?: CustomOperator[];
|
|
||||||
computedColumns?: ComputedColumn[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RequestBody {
|
|
||||||
operation: Operation;
|
|
||||||
id?: string | string[];
|
|
||||||
data?: any | any[];
|
|
||||||
options?: Options;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface APIResponse<T = any> {
|
|
||||||
success: boolean;
|
|
||||||
data: T;
|
|
||||||
metadata?: {
|
|
||||||
total: number;
|
|
||||||
filtered: number;
|
|
||||||
limit: number;
|
|
||||||
offset: number;
|
|
||||||
};
|
|
||||||
error?: {
|
|
||||||
code: string;
|
|
||||||
message: string;
|
|
||||||
details?: any;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Column {
|
|
||||||
name: string;
|
|
||||||
type: string;
|
|
||||||
is_nullable: boolean;
|
|
||||||
is_primary: boolean;
|
|
||||||
is_unique: boolean;
|
|
||||||
has_index: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TableMetadata {
|
|
||||||
schema: string;
|
|
||||||
table: string;
|
|
||||||
columns: Column[];
|
|
||||||
relations: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ClientConfig {
|
|
||||||
baseUrl: string;
|
|
||||||
token?: string;
|
|
||||||
}
|
|
||||||
@@ -1,427 +0,0 @@
|
|||||||
import { WebSocketClient } from './websocket-client';
|
|
||||||
import type { WSNotificationMessage } from './websocket-types';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 1: Basic Usage
|
|
||||||
*/
|
|
||||||
export async function basicUsageExample() {
|
|
||||||
// Create client
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws',
|
|
||||||
reconnect: true,
|
|
||||||
debug: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Connect
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Read users
|
|
||||||
const users = await client.read('users', {
|
|
||||||
schema: 'public',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'active' }
|
|
||||||
],
|
|
||||||
limit: 10,
|
|
||||||
sort: [
|
|
||||||
{ column: 'name', direction: 'asc' }
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('Users:', users);
|
|
||||||
|
|
||||||
// Create a user
|
|
||||||
const newUser = await client.create('users', {
|
|
||||||
name: 'John Doe',
|
|
||||||
email: 'john@example.com',
|
|
||||||
status: 'active'
|
|
||||||
}, { schema: 'public' });
|
|
||||||
|
|
||||||
console.log('Created user:', newUser);
|
|
||||||
|
|
||||||
// Update user
|
|
||||||
const updatedUser = await client.update('users', '123', {
|
|
||||||
name: 'John Updated'
|
|
||||||
}, { schema: 'public' });
|
|
||||||
|
|
||||||
console.log('Updated user:', updatedUser);
|
|
||||||
|
|
||||||
// Delete user
|
|
||||||
await client.delete('users', '123', { schema: 'public' });
|
|
||||||
|
|
||||||
// Disconnect
|
|
||||||
client.disconnect();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 2: Real-time Subscriptions
|
|
||||||
*/
|
|
||||||
export async function subscriptionExample() {
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws',
|
|
||||||
debug: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Subscribe to user changes
|
|
||||||
const subscriptionId = await client.subscribe(
|
|
||||||
'users',
|
|
||||||
(notification: WSNotificationMessage) => {
|
|
||||||
console.log('User changed:', notification.operation, notification.data);
|
|
||||||
|
|
||||||
switch (notification.operation) {
|
|
||||||
case 'create':
|
|
||||||
console.log('New user created:', notification.data);
|
|
||||||
break;
|
|
||||||
case 'update':
|
|
||||||
console.log('User updated:', notification.data);
|
|
||||||
break;
|
|
||||||
case 'delete':
|
|
||||||
console.log('User deleted:', notification.data);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
schema: 'public',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'active' }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log('Subscribed with ID:', subscriptionId);
|
|
||||||
|
|
||||||
// Later: unsubscribe
|
|
||||||
setTimeout(async () => {
|
|
||||||
await client.unsubscribe(subscriptionId);
|
|
||||||
console.log('Unsubscribed');
|
|
||||||
client.disconnect();
|
|
||||||
}, 60000);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 3: Event Handling
|
|
||||||
*/
|
|
||||||
export async function eventHandlingExample() {
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Listen to connection events
|
|
||||||
client.on('connect', () => {
|
|
||||||
console.log('Connected!');
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('disconnect', (event) => {
|
|
||||||
console.log('Disconnected:', event.code, event.reason);
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('error', (error) => {
|
|
||||||
console.error('WebSocket error:', error);
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('stateChange', (state) => {
|
|
||||||
console.log('State changed to:', state);
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('message', (message) => {
|
|
||||||
console.log('Received message:', message);
|
|
||||||
});
|
|
||||||
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Your operations here...
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 4: Multiple Subscriptions
|
|
||||||
*/
|
|
||||||
export async function multipleSubscriptionsExample() {
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws',
|
|
||||||
debug: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Subscribe to users
|
|
||||||
const userSubId = await client.subscribe(
|
|
||||||
'users',
|
|
||||||
(notification) => {
|
|
||||||
console.log('[Users]', notification.operation, notification.data);
|
|
||||||
},
|
|
||||||
{ schema: 'public' }
|
|
||||||
);
|
|
||||||
|
|
||||||
// Subscribe to posts
|
|
||||||
const postSubId = await client.subscribe(
|
|
||||||
'posts',
|
|
||||||
(notification) => {
|
|
||||||
console.log('[Posts]', notification.operation, notification.data);
|
|
||||||
},
|
|
||||||
{
|
|
||||||
schema: 'public',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'published' }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Subscribe to comments
|
|
||||||
const commentSubId = await client.subscribe(
|
|
||||||
'comments',
|
|
||||||
(notification) => {
|
|
||||||
console.log('[Comments]', notification.operation, notification.data);
|
|
||||||
},
|
|
||||||
{ schema: 'public' }
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log('Active subscriptions:', client.getSubscriptions());
|
|
||||||
|
|
||||||
// Clean up after 60 seconds
|
|
||||||
setTimeout(async () => {
|
|
||||||
await client.unsubscribe(userSubId);
|
|
||||||
await client.unsubscribe(postSubId);
|
|
||||||
await client.unsubscribe(commentSubId);
|
|
||||||
client.disconnect();
|
|
||||||
}, 60000);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 5: Advanced Queries
|
|
||||||
*/
|
|
||||||
export async function advancedQueriesExample() {
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws'
|
|
||||||
});
|
|
||||||
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Complex query with filters, sorting, pagination, and preloading
|
|
||||||
const posts = await client.read('posts', {
|
|
||||||
schema: 'public',
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'published' },
|
|
||||||
{ column: 'views', operator: 'gte', value: 100 }
|
|
||||||
],
|
|
||||||
columns: ['id', 'title', 'content', 'user_id', 'created_at'],
|
|
||||||
sort: [
|
|
||||||
{ column: 'created_at', direction: 'desc' },
|
|
||||||
{ column: 'views', direction: 'desc' }
|
|
||||||
],
|
|
||||||
preload: [
|
|
||||||
{
|
|
||||||
relation: 'user',
|
|
||||||
columns: ['id', 'name', 'email']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
relation: 'comments',
|
|
||||||
columns: ['id', 'content', 'user_id'],
|
|
||||||
filters: [
|
|
||||||
{ column: 'status', operator: 'eq', value: 'approved' }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
limit: 20,
|
|
||||||
offset: 0
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('Posts:', posts);
|
|
||||||
|
|
||||||
// Get single record by ID
|
|
||||||
const post = await client.read('posts', {
|
|
||||||
schema: 'public',
|
|
||||||
record_id: '123'
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('Single post:', post);
|
|
||||||
|
|
||||||
client.disconnect();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 6: Error Handling
|
|
||||||
*/
|
|
||||||
export async function errorHandlingExample() {
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws',
|
|
||||||
reconnect: true,
|
|
||||||
maxReconnectAttempts: 5
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('error', (error) => {
|
|
||||||
console.error('Connection error:', error);
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('stateChange', (state) => {
|
|
||||||
console.log('Connection state:', state);
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Try to read non-existent entity
|
|
||||||
await client.read('nonexistent', { schema: 'public' });
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Read error:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Try to create invalid record
|
|
||||||
await client.create('users', {
|
|
||||||
// Missing required fields
|
|
||||||
}, { schema: 'public' });
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Create error:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Connection failed:', error);
|
|
||||||
} finally {
|
|
||||||
client.disconnect();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 7: React Integration
|
|
||||||
*/
|
|
||||||
export function reactIntegrationExample() {
|
|
||||||
const exampleCode = `
|
|
||||||
import { useEffect, useState } from 'react';
|
|
||||||
import { WebSocketClient } from '@warkypublic/resolvespec-js';
|
|
||||||
|
|
||||||
export function useWebSocket(url: string) {
|
|
||||||
const [client] = useState(() => new WebSocketClient({ url }));
|
|
||||||
const [isConnected, setIsConnected] = useState(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
client.on('connect', () => setIsConnected(true));
|
|
||||||
client.on('disconnect', () => setIsConnected(false));
|
|
||||||
|
|
||||||
client.connect();
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
client.disconnect();
|
|
||||||
};
|
|
||||||
}, [client]);
|
|
||||||
|
|
||||||
return { client, isConnected };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function UsersComponent() {
|
|
||||||
const { client, isConnected } = useWebSocket('ws://localhost:8080/ws');
|
|
||||||
const [users, setUsers] = useState([]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (!isConnected) return;
|
|
||||||
|
|
||||||
// Subscribe to user changes
|
|
||||||
const subscribeToUsers = async () => {
|
|
||||||
const subId = await client.subscribe('users', (notification) => {
|
|
||||||
if (notification.operation === 'create') {
|
|
||||||
setUsers(prev => [...prev, notification.data]);
|
|
||||||
} else if (notification.operation === 'update') {
|
|
||||||
setUsers(prev => prev.map(u =>
|
|
||||||
u.id === notification.data.id ? notification.data : u
|
|
||||||
));
|
|
||||||
} else if (notification.operation === 'delete') {
|
|
||||||
setUsers(prev => prev.filter(u => u.id !== notification.data.id));
|
|
||||||
}
|
|
||||||
}, { schema: 'public' });
|
|
||||||
|
|
||||||
// Load initial users
|
|
||||||
const initialUsers = await client.read('users', {
|
|
||||||
schema: 'public',
|
|
||||||
filters: [{ column: 'status', operator: 'eq', value: 'active' }]
|
|
||||||
});
|
|
||||||
setUsers(initialUsers);
|
|
||||||
|
|
||||||
return () => client.unsubscribe(subId);
|
|
||||||
};
|
|
||||||
|
|
||||||
subscribeToUsers();
|
|
||||||
}, [client, isConnected]);
|
|
||||||
|
|
||||||
const createUser = async (name: string, email: string) => {
|
|
||||||
await client.create('users', { name, email, status: 'active' }, {
|
|
||||||
schema: 'public'
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div>
|
|
||||||
<h2>Users ({users.length})</h2>
|
|
||||||
{isConnected ? '🟢 Connected' : '🔴 Disconnected'}
|
|
||||||
{/* Render users... */}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
console.log(exampleCode);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Example 8: TypeScript with Typed Models
|
|
||||||
*/
|
|
||||||
export async function typedModelsExample() {
|
|
||||||
// Define your models
|
|
||||||
interface User {
|
|
||||||
id: number;
|
|
||||||
name: string;
|
|
||||||
email: string;
|
|
||||||
status: 'active' | 'inactive';
|
|
||||||
created_at: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Post {
|
|
||||||
id: number;
|
|
||||||
title: string;
|
|
||||||
content: string;
|
|
||||||
user_id: number;
|
|
||||||
status: 'draft' | 'published';
|
|
||||||
views: number;
|
|
||||||
user?: User;
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new WebSocketClient({
|
|
||||||
url: 'ws://localhost:8080/ws'
|
|
||||||
});
|
|
||||||
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Type-safe operations
|
|
||||||
const users = await client.read<User[]>('users', {
|
|
||||||
schema: 'public',
|
|
||||||
filters: [{ column: 'status', operator: 'eq', value: 'active' }]
|
|
||||||
});
|
|
||||||
|
|
||||||
const newUser = await client.create<User>('users', {
|
|
||||||
name: 'Alice',
|
|
||||||
email: 'alice@example.com',
|
|
||||||
status: 'active'
|
|
||||||
}, { schema: 'public' });
|
|
||||||
|
|
||||||
const posts = await client.read<Post[]>('posts', {
|
|
||||||
schema: 'public',
|
|
||||||
preload: [
|
|
||||||
{
|
|
||||||
relation: 'user',
|
|
||||||
columns: ['id', 'name', 'email']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
// Type-safe subscriptions
|
|
||||||
await client.subscribe(
|
|
||||||
'users',
|
|
||||||
(notification) => {
|
|
||||||
const user = notification.data as User;
|
|
||||||
console.log('User changed:', user.name, user.email);
|
|
||||||
},
|
|
||||||
{ schema: 'public' }
|
|
||||||
);
|
|
||||||
|
|
||||||
client.disconnect();
|
|
||||||
}
|
|
||||||
@@ -8,10 +8,22 @@ import type {
|
|||||||
WSOperation,
|
WSOperation,
|
||||||
WSOptions,
|
WSOptions,
|
||||||
Subscription,
|
Subscription,
|
||||||
SubscriptionOptions,
|
|
||||||
ConnectionState,
|
ConnectionState,
|
||||||
WebSocketClientEvents
|
WebSocketClientEvents
|
||||||
} from './websocket-types';
|
} from './types';
|
||||||
|
import type { FilterOption, SortOption, PreloadOption } from '../common/types';
|
||||||
|
|
||||||
|
const instances = new Map<string, WebSocketClient>();
|
||||||
|
|
||||||
|
export function getWebSocketClient(config: WebSocketClientConfig): WebSocketClient {
|
||||||
|
const key = config.url;
|
||||||
|
let instance = instances.get(key);
|
||||||
|
if (!instance) {
|
||||||
|
instance = new WebSocketClient(config);
|
||||||
|
instances.set(key, instance);
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
export class WebSocketClient {
|
export class WebSocketClient {
|
||||||
private ws: WebSocket | null = null;
|
private ws: WebSocket | null = null;
|
||||||
@@ -36,9 +48,6 @@ export class WebSocketClient {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Connect to WebSocket server
|
|
||||||
*/
|
|
||||||
async connect(): Promise<void> {
|
async connect(): Promise<void> {
|
||||||
if (this.ws?.readyState === WebSocket.OPEN) {
|
if (this.ws?.readyState === WebSocket.OPEN) {
|
||||||
this.log('Already connected');
|
this.log('Already connected');
|
||||||
@@ -78,7 +87,6 @@ export class WebSocketClient {
|
|||||||
this.setState('disconnected');
|
this.setState('disconnected');
|
||||||
this.emit('disconnect', event);
|
this.emit('disconnect', event);
|
||||||
|
|
||||||
// Attempt reconnection if enabled and not manually closed
|
|
||||||
if (this.config.reconnect && !this.isManualClose && this.reconnectAttempts < this.config.maxReconnectAttempts) {
|
if (this.config.reconnect && !this.isManualClose && this.reconnectAttempts < this.config.maxReconnectAttempts) {
|
||||||
this.reconnectAttempts++;
|
this.reconnectAttempts++;
|
||||||
this.log(`Reconnection attempt ${this.reconnectAttempts}/${this.config.maxReconnectAttempts}`);
|
this.log(`Reconnection attempt ${this.reconnectAttempts}/${this.config.maxReconnectAttempts}`);
|
||||||
@@ -97,9 +105,6 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Disconnect from WebSocket server
|
|
||||||
*/
|
|
||||||
disconnect(): void {
|
disconnect(): void {
|
||||||
this.isManualClose = true;
|
this.isManualClose = true;
|
||||||
|
|
||||||
@@ -120,9 +125,6 @@ export class WebSocketClient {
|
|||||||
this.messageHandlers.clear();
|
this.messageHandlers.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Send a CRUD request and wait for response
|
|
||||||
*/
|
|
||||||
async request<T = any>(
|
async request<T = any>(
|
||||||
operation: WSOperation,
|
operation: WSOperation,
|
||||||
entity: string,
|
entity: string,
|
||||||
@@ -148,7 +150,6 @@ export class WebSocketClient {
|
|||||||
};
|
};
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
// Set up response handler
|
|
||||||
this.messageHandlers.set(id, (response: WSResponseMessage) => {
|
this.messageHandlers.set(id, (response: WSResponseMessage) => {
|
||||||
if (response.success) {
|
if (response.success) {
|
||||||
resolve(response.data);
|
resolve(response.data);
|
||||||
@@ -157,10 +158,8 @@ export class WebSocketClient {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Send message
|
|
||||||
this.send(message);
|
this.send(message);
|
||||||
|
|
||||||
// Timeout after 30 seconds
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
if (this.messageHandlers.has(id)) {
|
if (this.messageHandlers.has(id)) {
|
||||||
this.messageHandlers.delete(id);
|
this.messageHandlers.delete(id);
|
||||||
@@ -170,16 +169,13 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Read records
|
|
||||||
*/
|
|
||||||
async read<T = any>(entity: string, options?: {
|
async read<T = any>(entity: string, options?: {
|
||||||
schema?: string;
|
schema?: string;
|
||||||
record_id?: string;
|
record_id?: string;
|
||||||
filters?: import('./types').FilterOption[];
|
filters?: FilterOption[];
|
||||||
columns?: string[];
|
columns?: string[];
|
||||||
sort?: import('./types').SortOption[];
|
sort?: SortOption[];
|
||||||
preload?: import('./types').PreloadOption[];
|
preload?: PreloadOption[];
|
||||||
limit?: number;
|
limit?: number;
|
||||||
offset?: number;
|
offset?: number;
|
||||||
}): Promise<T> {
|
}): Promise<T> {
|
||||||
@@ -197,9 +193,6 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a record
|
|
||||||
*/
|
|
||||||
async create<T = any>(entity: string, data: any, options?: {
|
async create<T = any>(entity: string, data: any, options?: {
|
||||||
schema?: string;
|
schema?: string;
|
||||||
}): Promise<T> {
|
}): Promise<T> {
|
||||||
@@ -209,9 +202,6 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Update a record
|
|
||||||
*/
|
|
||||||
async update<T = any>(entity: string, id: string, data: any, options?: {
|
async update<T = any>(entity: string, id: string, data: any, options?: {
|
||||||
schema?: string;
|
schema?: string;
|
||||||
}): Promise<T> {
|
}): Promise<T> {
|
||||||
@@ -222,9 +212,6 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a record
|
|
||||||
*/
|
|
||||||
async delete(entity: string, id: string, options?: {
|
async delete(entity: string, id: string, options?: {
|
||||||
schema?: string;
|
schema?: string;
|
||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
@@ -234,9 +221,6 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get metadata for an entity
|
|
||||||
*/
|
|
||||||
async meta<T = any>(entity: string, options?: {
|
async meta<T = any>(entity: string, options?: {
|
||||||
schema?: string;
|
schema?: string;
|
||||||
}): Promise<T> {
|
}): Promise<T> {
|
||||||
@@ -245,15 +229,12 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Subscribe to entity changes
|
|
||||||
*/
|
|
||||||
async subscribe(
|
async subscribe(
|
||||||
entity: string,
|
entity: string,
|
||||||
callback: (notification: WSNotificationMessage) => void,
|
callback: (notification: WSNotificationMessage) => void,
|
||||||
options?: {
|
options?: {
|
||||||
schema?: string;
|
schema?: string;
|
||||||
filters?: import('./types').FilterOption[];
|
filters?: FilterOption[];
|
||||||
}
|
}
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
this.ensureConnected();
|
this.ensureConnected();
|
||||||
@@ -275,7 +256,6 @@ export class WebSocketClient {
|
|||||||
if (response.success && response.data?.subscription_id) {
|
if (response.success && response.data?.subscription_id) {
|
||||||
const subscriptionId = response.data.subscription_id;
|
const subscriptionId = response.data.subscription_id;
|
||||||
|
|
||||||
// Store subscription
|
|
||||||
this.subscriptions.set(subscriptionId, {
|
this.subscriptions.set(subscriptionId, {
|
||||||
id: subscriptionId,
|
id: subscriptionId,
|
||||||
entity,
|
entity,
|
||||||
@@ -293,7 +273,6 @@ export class WebSocketClient {
|
|||||||
|
|
||||||
this.send(message);
|
this.send(message);
|
||||||
|
|
||||||
// Timeout
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
if (this.messageHandlers.has(id)) {
|
if (this.messageHandlers.has(id)) {
|
||||||
this.messageHandlers.delete(id);
|
this.messageHandlers.delete(id);
|
||||||
@@ -303,9 +282,6 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Unsubscribe from entity changes
|
|
||||||
*/
|
|
||||||
async unsubscribe(subscriptionId: string): Promise<void> {
|
async unsubscribe(subscriptionId: string): Promise<void> {
|
||||||
this.ensureConnected();
|
this.ensureConnected();
|
||||||
|
|
||||||
@@ -330,7 +306,6 @@ export class WebSocketClient {
|
|||||||
|
|
||||||
this.send(message);
|
this.send(message);
|
||||||
|
|
||||||
// Timeout
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
if (this.messageHandlers.has(id)) {
|
if (this.messageHandlers.has(id)) {
|
||||||
this.messageHandlers.delete(id);
|
this.messageHandlers.delete(id);
|
||||||
@@ -340,37 +315,22 @@ export class WebSocketClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get list of active subscriptions
|
|
||||||
*/
|
|
||||||
getSubscriptions(): Subscription[] {
|
getSubscriptions(): Subscription[] {
|
||||||
return Array.from(this.subscriptions.values());
|
return Array.from(this.subscriptions.values());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get connection state
|
|
||||||
*/
|
|
||||||
getState(): ConnectionState {
|
getState(): ConnectionState {
|
||||||
return this.state;
|
return this.state;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if connected
|
|
||||||
*/
|
|
||||||
isConnected(): boolean {
|
isConnected(): boolean {
|
||||||
return this.ws?.readyState === WebSocket.OPEN;
|
return this.ws?.readyState === WebSocket.OPEN;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Add event listener
|
|
||||||
*/
|
|
||||||
on<K extends keyof WebSocketClientEvents>(event: K, callback: WebSocketClientEvents[K]): void {
|
on<K extends keyof WebSocketClientEvents>(event: K, callback: WebSocketClientEvents[K]): void {
|
||||||
this.eventListeners[event] = callback as any;
|
this.eventListeners[event] = callback as any;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove event listener
|
|
||||||
*/
|
|
||||||
off<K extends keyof WebSocketClientEvents>(event: K): void {
|
off<K extends keyof WebSocketClientEvents>(event: K): void {
|
||||||
delete this.eventListeners[event];
|
delete this.eventListeners[event];
|
||||||
}
|
}
|
||||||
@@ -384,7 +344,6 @@ export class WebSocketClient {
|
|||||||
|
|
||||||
this.emit('message', message);
|
this.emit('message', message);
|
||||||
|
|
||||||
// Handle different message types
|
|
||||||
switch (message.type) {
|
switch (message.type) {
|
||||||
case 'response':
|
case 'response':
|
||||||
this.handleResponse(message as WSResponseMessage);
|
this.handleResponse(message as WSResponseMessage);
|
||||||
@@ -395,7 +354,6 @@ export class WebSocketClient {
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
case 'pong':
|
case 'pong':
|
||||||
// Heartbeat response
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
2
resolvespec-js/src/websocketspec/index.ts
Normal file
2
resolvespec-js/src/websocketspec/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './types';
|
||||||
|
export { WebSocketClient, getWebSocketClient } from './client';
|
||||||
@@ -1,17 +1,24 @@
|
|||||||
|
import type { FilterOption, SortOption, PreloadOption, Parameter } from '../common/types';
|
||||||
|
|
||||||
|
// Re-export common types
|
||||||
|
export type { FilterOption, SortOption, PreloadOption, Operator, SortDirection } from '../common/types';
|
||||||
|
|
||||||
// WebSocket Message Types
|
// WebSocket Message Types
|
||||||
export type MessageType = 'request' | 'response' | 'notification' | 'subscription' | 'error' | 'ping' | 'pong';
|
export type MessageType = 'request' | 'response' | 'notification' | 'subscription' | 'error' | 'ping' | 'pong';
|
||||||
export type WSOperation = 'read' | 'create' | 'update' | 'delete' | 'subscribe' | 'unsubscribe' | 'meta';
|
export type WSOperation = 'read' | 'create' | 'update' | 'delete' | 'subscribe' | 'unsubscribe' | 'meta';
|
||||||
|
|
||||||
// Re-export common types
|
|
||||||
export type { FilterOption, SortOption, PreloadOption, Operator, SortDirection } from './types';
|
|
||||||
|
|
||||||
export interface WSOptions {
|
export interface WSOptions {
|
||||||
filters?: import('./types').FilterOption[];
|
filters?: FilterOption[];
|
||||||
columns?: string[];
|
columns?: string[];
|
||||||
preload?: import('./types').PreloadOption[];
|
omit_columns?: string[];
|
||||||
sort?: import('./types').SortOption[];
|
preload?: PreloadOption[];
|
||||||
|
sort?: SortOption[];
|
||||||
limit?: number;
|
limit?: number;
|
||||||
offset?: number;
|
offset?: number;
|
||||||
|
parameters?: Parameter[];
|
||||||
|
cursor_forward?: string;
|
||||||
|
cursor_backward?: string;
|
||||||
|
fetch_row_number?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WSMessage {
|
export interface WSMessage {
|
||||||
@@ -78,7 +85,7 @@ export interface WSSubscriptionMessage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface SubscriptionOptions {
|
export interface SubscriptionOptions {
|
||||||
filters?: import('./types').FilterOption[];
|
filters?: FilterOption[];
|
||||||
onNotification?: (notification: WSNotificationMessage) => void;
|
onNotification?: (notification: WSNotificationMessage) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
21
resolvespec-js/tsconfig.json
Normal file
21
resolvespec-js/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2020",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"strict": true,
|
||||||
|
"declaration": true,
|
||||||
|
"declarationMap": true,
|
||||||
|
"sourceMap": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"rootDir": "src",
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"resolveJsonModule": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"lib": ["ES2020", "DOM"]
|
||||||
|
},
|
||||||
|
"include": ["src"],
|
||||||
|
"exclude": ["node_modules", "dist", "src/__tests__"]
|
||||||
|
}
|
||||||
20
resolvespec-js/vite.config.ts
Normal file
20
resolvespec-js/vite.config.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { defineConfig } from 'vite';
|
||||||
|
import dts from 'vite-plugin-dts';
|
||||||
|
import { resolve } from 'path';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
plugins: [
|
||||||
|
dts({ rollupTypes: true }),
|
||||||
|
],
|
||||||
|
build: {
|
||||||
|
lib: {
|
||||||
|
entry: resolve(__dirname, 'src/index.ts'),
|
||||||
|
name: 'ResolveSpec',
|
||||||
|
formats: ['es', 'cjs'],
|
||||||
|
fileName: (format) => `index.${format === 'es' ? 'js' : 'cjs'}`,
|
||||||
|
},
|
||||||
|
rollupOptions: {
|
||||||
|
external: ['uuid', 'semver'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
@@ -1,5 +1,50 @@
|
|||||||
# Python Implementation of the ResolveSpec API
|
# ResolveSpec Python Client - TODO
|
||||||
|
|
||||||
# Server
|
## Client Implementation & Testing
|
||||||
|
|
||||||
# Client
|
### 1. ResolveSpec Client API
|
||||||
|
|
||||||
|
- [ ] Core API implementation (read, create, update, delete, get_metadata)
|
||||||
|
- [ ] Unit tests for API functions
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
- [ ] Error handling and edge cases
|
||||||
|
|
||||||
|
### 2. HeaderSpec Client API
|
||||||
|
|
||||||
|
- [ ] Client API implementation
|
||||||
|
- [ ] Unit tests
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
|
||||||
|
### 3. FunctionSpec Client API
|
||||||
|
|
||||||
|
- [ ] Client API implementation
|
||||||
|
- [ ] Unit tests
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
|
||||||
|
### 4. WebSocketSpec Client API
|
||||||
|
|
||||||
|
- [ ] WebSocketClient class implementation (read, create, update, delete, meta, subscribe, unsubscribe)
|
||||||
|
- [ ] Unit tests for WebSocketClient
|
||||||
|
- [ ] Connection handling tests
|
||||||
|
- [ ] Subscription tests
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
|
||||||
|
### 5. Testing Infrastructure
|
||||||
|
|
||||||
|
- [ ] Set up test framework (pytest)
|
||||||
|
- [ ] Configure test coverage reporting (pytest-cov)
|
||||||
|
- [ ] Add test utilities and fixtures
|
||||||
|
- [ ] Create test documentation
|
||||||
|
- [ ] Package and publish to PyPI
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
- [ ] API reference documentation
|
||||||
|
- [ ] Usage examples for each client API
|
||||||
|
- [ ] Installation guide
|
||||||
|
- [ ] Contributing guidelines
|
||||||
|
- [ ] README with quick start
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Last Updated:** 2026-02-07
|
||||||
114
todo.md
114
todo.md
@@ -2,36 +2,98 @@
|
|||||||
|
|
||||||
This document tracks incomplete features and improvements for the ResolveSpec project.
|
This document tracks incomplete features and improvements for the ResolveSpec project.
|
||||||
|
|
||||||
|
## In Progress
|
||||||
|
|
||||||
|
### Database Layer
|
||||||
|
|
||||||
|
- [x] SQLite schema translation (schema.table → schema_table)
|
||||||
|
- [x] Driver name normalization across adapters
|
||||||
|
- [x] Database Connection Manager (dbmanager) package
|
||||||
|
|
||||||
### Documentation
|
### Documentation
|
||||||
- Ensure all new features are documented in README.md
|
|
||||||
- Update examples to showcase new functionality
|
|
||||||
- Add migration notes if any breaking changes are introduced
|
|
||||||
|
|
||||||
|
- [x] Add dbmanager to README
|
||||||
|
- [x] Add WebSocketSpec to top-level intro
|
||||||
|
- [x] Add MQTTSpec to top-level intro
|
||||||
|
- [x] Remove migration sections from README
|
||||||
|
- [ ] Complete API reference documentation
|
||||||
|
- [ ] Add examples for all supported databases
|
||||||
|
|
||||||
### 8.
|
## Planned Features
|
||||||
|
|
||||||
1. **Test Coverage**: Increase from 20% to 70%+
|
### ResolveSpec JS Client Implementation & Testing
|
||||||
- Add integration tests for CRUD operations
|
|
||||||
- Add unit tests for security providers
|
1. **ResolveSpec Client API (resolvespec-js)**
|
||||||
- Add concurrency tests for model registry
|
- [x] Core API implementation (read, create, update, delete, getMetadata)
|
||||||
|
- [ ] Unit tests for API functions
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
- [ ] Error handling and edge cases
|
||||||
|
|
||||||
|
2. **HeaderSpec Client API (resolvespec-js)**
|
||||||
|
- [ ] Client API implementation
|
||||||
|
- [ ] Unit tests
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
|
||||||
|
3. **FunctionSpec Client API (resolvespec-js)**
|
||||||
|
- [ ] Client API implementation
|
||||||
|
- [ ] Unit tests
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
|
||||||
|
4. **WebSocketSpec Client API (resolvespec-js)**
|
||||||
|
- [x] WebSocketClient class implementation (read, create, update, delete, meta, subscribe, unsubscribe)
|
||||||
|
- [ ] Unit tests for WebSocketClient
|
||||||
|
- [ ] Connection handling tests
|
||||||
|
- [ ] Subscription tests
|
||||||
|
- [ ] Integration tests with server
|
||||||
|
|
||||||
|
5. **resolvespec-js Testing Infrastructure**
|
||||||
|
- [ ] Set up test framework (Jest or Vitest)
|
||||||
|
- [ ] Configure test coverage reporting
|
||||||
|
- [ ] Add test utilities and mocks
|
||||||
|
- [ ] Create test documentation
|
||||||
|
|
||||||
|
### ResolveSpec Python Client Implementation & Testing
|
||||||
|
|
||||||
|
See [`resolvespec-python/todo.md`](./resolvespec-python/todo.md) for detailed Python client implementation tasks.
|
||||||
|
|
||||||
|
### Core Functionality
|
||||||
|
|
||||||
|
1. **Enhanced Preload Filtering**
|
||||||
|
- [ ] Column selection for nested preloads
|
||||||
|
- [ ] Advanced filtering conditions for relations
|
||||||
|
- [ ] Performance optimization for deep nesting
|
||||||
|
|
||||||
|
2. **Advanced Query Features**
|
||||||
|
- [ ] Custom SQL join support
|
||||||
|
- [ ] Computed column improvements
|
||||||
|
- [ ] Recursive query support
|
||||||
|
|
||||||
|
3. **Testing & Quality**
|
||||||
|
- [ ] Increase test coverage to 70%+
|
||||||
|
- [ ] Add integration tests for all ORMs
|
||||||
|
- [ ] Add concurrency tests for thread safety
|
||||||
|
- [ ] Performance benchmarks
|
||||||
|
|
||||||
|
### Infrastructure
|
||||||
|
|
||||||
|
- [ ] Improved error handling and reporting
|
||||||
|
- [ ] Enhanced logging capabilities
|
||||||
|
- [ ] Additional monitoring metrics
|
||||||
|
- [ ] Performance profiling tools
|
||||||
|
|
||||||
|
## Documentation Tasks
|
||||||
|
|
||||||
|
- [ ] Complete API reference
|
||||||
|
- [ ] Add troubleshooting guides
|
||||||
|
- [ ] Create architecture diagrams
|
||||||
|
- [ ] Expand database adapter documentation
|
||||||
|
|
||||||
|
## Known Issues
|
||||||
|
|
||||||
|
- [ ] Long preload alias names may exceed PostgreSQL identifier limit
|
||||||
|
- [ ] Some edge cases in computed column handling
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Priority Ranking
|
**Last Updated:** 2026-02-07
|
||||||
|
**Updated:** Added resolvespec-js client testing and implementation tasks
|
||||||
1. **High Priority**
|
|
||||||
- Column Selection and Filtering for Preloads (#1)
|
|
||||||
- Proper Condition Handling for Bun Preloads (#4)
|
|
||||||
|
|
||||||
2. **Medium Priority**
|
|
||||||
- Custom SQL Join Support (#3)
|
|
||||||
- Recursive JSON Cleaning (#2)
|
|
||||||
|
|
||||||
3. **Low Priority**
|
|
||||||
- Modernize Go Type Declarations (#5)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
**Last Updated:** 2025-12-09
|
|
||||||
|
|||||||
Reference in New Issue
Block a user