Compare commits
32 Commits
532d1560a3
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 9e6d05e055 | |||
|
|
442cc3ef53 | ||
|
|
5e54167009 | ||
|
|
65715f7ad3 | ||
| 537e65ea6d | |||
| e208c62df3 | |||
| f6a86e3933 | |||
| a4193b295a | |||
| b17241b928 | |||
| 63f8dcacb6 | |||
| 927a118338 | |||
| b39cd3ba72 | |||
| db7b152852 | |||
| da7220ad64 | |||
| 71845d38d3 | |||
| bdc78cc2a3 | |||
| 6c5e3918dc | |||
| cd14be0666 | |||
| 20122a5f53 | |||
|
|
8e74dc9284 | ||
| 1c9741373e | |||
| 3e832eea98 | |||
| c4d260d971 | |||
| 27cd494f6d | |||
| 3dfed9c986 | |||
| 512b16f8fe | |||
| 9a9fa4f384 | |||
| 979afc909e | |||
| 55859811be | |||
| 7f9c6f122e | |||
| 14e218d784 | |||
| 1ed67881e6 |
@@ -31,6 +31,9 @@ jobs:
|
|||||||
- name: Download dependencies
|
- name: Download dependencies
|
||||||
run: go mod download
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Tidy modules
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: go test ./...
|
run: go test ./...
|
||||||
|
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -33,4 +33,6 @@ bin/
|
|||||||
OB1/
|
OB1/
|
||||||
ui/node_modules/
|
ui/node_modules/
|
||||||
ui/.svelte-kit/
|
ui/.svelte-kit/
|
||||||
internal/app/ui/dist/
|
internal/app/ui/dist/*
|
||||||
|
!internal/app/ui/dist/placeholder.txt
|
||||||
|
.codex
|
||||||
|
|||||||
10
Dockerfile
10
Dockerfile
@@ -29,7 +29,14 @@ RUN set -eu; \
|
|||||||
-X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \
|
||||||
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \
|
||||||
-X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \
|
||||||
-o /out/amcs-server ./cmd/amcs-server
|
-o /out/amcs-server ./cmd/amcs-server; \
|
||||||
|
CGO_ENABLED=0 GOOS=linux go build -trimpath \
|
||||||
|
-ldflags="-s -w \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Version=${VERSION_TAG} \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \
|
||||||
|
-o /out/amcs-migrate-config ./cmd/amcs-migrate-config
|
||||||
|
|
||||||
FROM debian:bookworm-slim
|
FROM debian:bookworm-slim
|
||||||
|
|
||||||
@@ -41,6 +48,7 @@ RUN apt-get update \
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY --from=builder /out/amcs-server /app/amcs-server
|
COPY --from=builder /out/amcs-server /app/amcs-server
|
||||||
|
COPY --from=builder /out/amcs-migrate-config /app/amcs-migrate-config
|
||||||
COPY --chown=appuser:appuser configs /app/configs
|
COPY --chown=appuser:appuser configs /app/configs
|
||||||
|
|
||||||
USER appuser
|
USER appuser
|
||||||
|
|||||||
70
Makefile
70
Makefile
@@ -4,7 +4,10 @@ SERVER_BIN := $(BIN_DIR)/amcs-server
|
|||||||
CMD_SERVER := ./cmd/amcs-server
|
CMD_SERVER := ./cmd/amcs-server
|
||||||
BUILDINFO_PKG := git.warky.dev/wdevs/amcs/internal/buildinfo
|
BUILDINFO_PKG := git.warky.dev/wdevs/amcs/internal/buildinfo
|
||||||
UI_DIR := $(CURDIR)/ui
|
UI_DIR := $(CURDIR)/ui
|
||||||
|
AMCS_UI_BACKEND ?= http://127.0.0.1:8080
|
||||||
PATCH_INCREMENT ?= 1
|
PATCH_INCREMENT ?= 1
|
||||||
|
RELEASE_VERSION ?=
|
||||||
|
RELEASE_REMOTE ?= origin
|
||||||
VERSION_TAG ?= $(shell git describe --tags --exact-match 2>/dev/null || echo dev)
|
VERSION_TAG ?= $(shell git describe --tags --exact-match 2>/dev/null || echo dev)
|
||||||
COMMIT_SHA ?= $(shell git rev-parse --short HEAD 2>/dev/null || echo unknown)
|
COMMIT_SHA ?= $(shell git rev-parse --short HEAD 2>/dev/null || echo unknown)
|
||||||
BUILD_DATE ?= $(shell date -u +%Y-%m-%dT%H:%M:%SZ)
|
BUILD_DATE ?= $(shell date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||||
@@ -12,6 +15,7 @@ RELSPEC ?= $(shell command -v relspec 2>/dev/null || echo $(HOME)/go/bin/relspec
|
|||||||
SCHEMA_FILES := $(sort $(wildcard schema/*.dbml))
|
SCHEMA_FILES := $(sort $(wildcard schema/*.dbml))
|
||||||
MERGE_TARGET_TMP := $(CURDIR)/.cache/schema.merge-target.dbml
|
MERGE_TARGET_TMP := $(CURDIR)/.cache/schema.merge-target.dbml
|
||||||
GENERATED_SCHEMA_MIGRATION := migrations/020_generated_schema.sql
|
GENERATED_SCHEMA_MIGRATION := migrations/020_generated_schema.sql
|
||||||
|
GENERATED_MODELS_DIR := internal/generatedmodels
|
||||||
PNPM ?= pnpm
|
PNPM ?= pnpm
|
||||||
LDFLAGS := -s -w \
|
LDFLAGS := -s -w \
|
||||||
-X $(BUILDINFO_PKG).Version=$(VERSION_TAG) \
|
-X $(BUILDINFO_PKG).Version=$(VERSION_TAG) \
|
||||||
@@ -19,10 +23,27 @@ LDFLAGS := -s -w \
|
|||||||
-X $(BUILDINFO_PKG).Commit=$(COMMIT_SHA) \
|
-X $(BUILDINFO_PKG).Commit=$(COMMIT_SHA) \
|
||||||
-X $(BUILDINFO_PKG).BuildDate=$(BUILD_DATE)
|
-X $(BUILDINFO_PKG).BuildDate=$(BUILD_DATE)
|
||||||
|
|
||||||
.PHONY: all build clean migrate release-version test generate-migrations check-schema-drift build-cli ui-install ui-build ui-dev ui-check
|
.PHONY: all build clean migrate release-version release-build test generate-migrations generate-models check-schema-drift build-cli ui-install ui-build ui-dev ui-check help
|
||||||
|
|
||||||
all: build
|
all: build
|
||||||
|
|
||||||
|
help:
|
||||||
|
@echo "Available targets:"
|
||||||
|
@echo " build Build server binary (includes UI build)"
|
||||||
|
@echo " build-cli Build CLI binary"
|
||||||
|
@echo " test Run all tests (includes UI check)"
|
||||||
|
@echo " clean Remove build artifacts"
|
||||||
|
@echo " migrate Run database migrations"
|
||||||
|
@echo " release-version Tag and push a release (auto patch bump or RELEASE_VERSION=vX.Y.Z)"
|
||||||
|
@echo " release-build Build with a specific release tag (RELEASE_VERSION=vX.Y.Z)"
|
||||||
|
@echo " generate-migrations Generate SQL migration from DBML schema files"
|
||||||
|
@echo " generate-models Generate Go models from DBML schema"
|
||||||
|
@echo " check-schema-drift Verify generated migration matches current schema"
|
||||||
|
@echo " ui-install Install UI dependencies"
|
||||||
|
@echo " ui-build Build UI assets"
|
||||||
|
@echo " ui-dev Start UI dev server with local API proxy"
|
||||||
|
@echo " ui-check Run UI type checks"
|
||||||
|
|
||||||
build: ui-build
|
build: ui-build
|
||||||
@mkdir -p $(BIN_DIR)
|
@mkdir -p $(BIN_DIR)
|
||||||
go build -ldflags "$(LDFLAGS)" -o $(SERVER_BIN) $(CMD_SERVER)
|
go build -ldflags "$(LDFLAGS)" -o $(SERVER_BIN) $(CMD_SERVER)
|
||||||
@@ -34,7 +55,7 @@ ui-build: ui-install
|
|||||||
cd $(UI_DIR) && $(PNPM) run build
|
cd $(UI_DIR) && $(PNPM) run build
|
||||||
|
|
||||||
ui-dev: ui-install
|
ui-dev: ui-install
|
||||||
cd $(UI_DIR) && $(PNPM) run dev
|
cd $(UI_DIR) && VITE_API_URL=/api AMCS_UI_BACKEND=$(AMCS_UI_BACKEND) $(PNPM) run dev
|
||||||
|
|
||||||
ui-check: ui-install
|
ui-check: ui-install
|
||||||
cd $(UI_DIR) && $(PNPM) run check
|
cd $(UI_DIR) && $(PNPM) run check
|
||||||
@@ -47,22 +68,41 @@ release-version:
|
|||||||
@case "$(PATCH_INCREMENT)" in \
|
@case "$(PATCH_INCREMENT)" in \
|
||||||
''|*[!0-9]*|0) echo "PATCH_INCREMENT must be a positive integer" >&2; exit 1 ;; \
|
''|*[!0-9]*|0) echo "PATCH_INCREMENT must be a positive integer" >&2; exit 1 ;; \
|
||||||
esac
|
esac
|
||||||
@latest=$$(git tag --list 'v[0-9]*.[0-9]*.[0-9]*' --sort=-v:refname | head -n 1); \
|
@if ! git diff --quiet || ! git diff --cached --quiet; then \
|
||||||
if [ -z "$$latest" ]; then latest="v0.0.0"; fi; \
|
echo "Refusing to release from a dirty working tree. Commit or stash changes first." >&2; \
|
||||||
version=$${latest#v}; \
|
exit 1; \
|
||||||
major=$${version%%.*}; \
|
fi
|
||||||
rest=$${version#*.}; \
|
@next_tag="$(RELEASE_VERSION)"; \
|
||||||
minor=$${rest%%.*}; \
|
if [ -z "$$next_tag" ]; then \
|
||||||
patch=$${rest##*.}; \
|
latest=$$(git tag --list 'v[0-9]*.[0-9]*.[0-9]*' --sort=-v:refname | head -n 1); \
|
||||||
next_patch=$$((patch + $(PATCH_INCREMENT))); \
|
if [ -z "$$latest" ]; then latest="v0.0.0"; fi; \
|
||||||
next_tag="v$$major.$$minor.$$next_patch"; \
|
version=$${latest#v}; \
|
||||||
|
major=$${version%%.*}; \
|
||||||
|
rest=$${version#*.}; \
|
||||||
|
minor=$${rest%%.*}; \
|
||||||
|
patch=$${rest##*.}; \
|
||||||
|
next_patch=$$((patch + $(PATCH_INCREMENT))); \
|
||||||
|
next_tag="v$$major.$$minor.$$next_patch"; \
|
||||||
|
fi; \
|
||||||
|
case "$$next_tag" in \
|
||||||
|
v[0-9]*.[0-9]*.[0-9]*) ;; \
|
||||||
|
*) echo "RELEASE_VERSION must look like vX.Y.Z (got '$$next_tag')" >&2; exit 1 ;; \
|
||||||
|
esac; \
|
||||||
if git rev-parse -q --verify "refs/tags/$$next_tag" >/dev/null; then \
|
if git rev-parse -q --verify "refs/tags/$$next_tag" >/dev/null; then \
|
||||||
echo "$$next_tag already exists" >&2; \
|
echo "$$next_tag already exists" >&2; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
fi; \
|
fi; \
|
||||||
git tag -a "$$next_tag" -m "Release $$next_tag"; \
|
git tag -a "$$next_tag" -m "Release $$next_tag"; \
|
||||||
git push origin "$$next_tag"; \
|
git push $(RELEASE_REMOTE) "$$next_tag"; \
|
||||||
echo "$$next_tag"
|
$(MAKE) release-build RELEASE_VERSION="$$next_tag"; \
|
||||||
|
echo "Released $$next_tag"
|
||||||
|
|
||||||
|
release-build:
|
||||||
|
@case "$(RELEASE_VERSION)" in \
|
||||||
|
v[0-9]*.[0-9]*.[0-9]*) ;; \
|
||||||
|
*) echo "RELEASE_VERSION must look like vX.Y.Z" >&2; exit 1 ;; \
|
||||||
|
esac
|
||||||
|
@$(MAKE) build build-cli VERSION_TAG="$(RELEASE_VERSION)"
|
||||||
|
|
||||||
migrate:
|
migrate:
|
||||||
./scripts/migrate.sh
|
./scripts/migrate.sh
|
||||||
@@ -78,6 +118,10 @@ generate-migrations:
|
|||||||
@schema_list=$$(printf '%s\n' $(SCHEMA_FILES) | paste -sd, -); \
|
@schema_list=$$(printf '%s\n' $(SCHEMA_FILES) | paste -sd, -); \
|
||||||
$(RELSPEC) merge --target dbml --target-path $(MERGE_TARGET_TMP) --source dbml --from-list "$$schema_list" --output pgsql --output-path $(GENERATED_SCHEMA_MIGRATION)
|
$(RELSPEC) merge --target dbml --target-path $(MERGE_TARGET_TMP) --source dbml --from-list "$$schema_list" --output pgsql --output-path $(GENERATED_SCHEMA_MIGRATION)
|
||||||
|
|
||||||
|
generate-models:
|
||||||
|
@test -n "$(SCHEMA_FILES)" || (echo "No DBML schema files found in schema/" >&2; exit 1)
|
||||||
|
@./scripts/generate-models.sh
|
||||||
|
|
||||||
check-schema-drift:
|
check-schema-drift:
|
||||||
@test -f $(GENERATED_SCHEMA_MIGRATION) || (echo "$(GENERATED_SCHEMA_MIGRATION) is missing; run make generate-migrations" >&2; exit 1)
|
@test -f $(GENERATED_SCHEMA_MIGRATION) || (echo "$(GENERATED_SCHEMA_MIGRATION) is missing; run make generate-migrations" >&2; exit 1)
|
||||||
@command -v $(RELSPEC) >/dev/null 2>&1 || (echo "relspec not found; install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest" >&2; exit 1)
|
@command -v $(RELSPEC) >/dev/null 2>&1 || (echo "relspec not found; install git.warky.dev/wdevs/relspecgo/cmd/relspec@latest" >&2; exit 1)
|
||||||
|
|||||||
127
README.md
127
README.md
@@ -1,24 +1,18 @@
|
|||||||
# Avalon Memory Crystal Server (amcs)
|
# AMCS Directory
|
||||||
|
|
||||||

|
This is the AMCS (Avalon Memory Control Service) directory.
|
||||||
|
|
||||||
A Go MCP server for capturing and retrieving thoughts, memory, and project context. Exposes tools over Streamable HTTP, backed by Postgres with pgvector for semantic search.
|
## Purpose
|
||||||
|
|
||||||
## What it does
|
The AMCS directory is used to store configuration and code for the Avalon Memory Control Service, which handles...
|
||||||
|
|
||||||
- **Capture** thoughts with automatic embedding and metadata extraction
|
## Structure
|
||||||
- **Search** thoughts semantically via vector similarity
|
|
||||||
- **Organise** thoughts into projects and retrieve full project context
|
|
||||||
- **Summarise** and recall memory across topics and time windows
|
|
||||||
- **Link** related thoughts and traverse relationships
|
|
||||||
|
|
||||||
## Stack
|
- `configs/` - Configuration files
|
||||||
|
- `scripts/` - Scripts for managing the system
|
||||||
|
- `assets/` - Asset files
|
||||||
|
|
||||||
- Go — MCP server over Streamable HTTP
|
## Next Steps
|
||||||
- Postgres + pgvector — storage and vector search
|
|
||||||
- LiteLLM — primary hosted AI provider (embeddings + metadata extraction)
|
|
||||||
- OpenRouter — default upstream behind LiteLLM
|
|
||||||
- Ollama — supported local or self-hosted OpenAI-compatible provider
|
|
||||||
|
|
||||||
## Tools
|
## Tools
|
||||||
|
|
||||||
@@ -37,6 +31,9 @@ A Go MCP server for capturing and retrieving thoughts, memory, and project conte
|
|||||||
| `get_project_context` | Recent + semantic context for a project; uses explicit `project` or the active session project |
|
| `get_project_context` | Recent + semantic context for a project; uses explicit `project` or the active session project |
|
||||||
| `set_active_project` | Set session project scope; requires a stateful MCP session |
|
| `set_active_project` | Set session project scope; requires a stateful MCP session |
|
||||||
| `get_active_project` | Get current session project |
|
| `get_active_project` | Get current session project |
|
||||||
|
| `add_learning` | Create a curated learning record distinct from raw thoughts |
|
||||||
|
| `get_learning` | Retrieve a structured learning by ID |
|
||||||
|
| `list_learnings` | List structured learnings by project/category/area/status/priority/tag/query |
|
||||||
| `summarize_thoughts` | LLM prose summary over a filtered set |
|
| `summarize_thoughts` | LLM prose summary over a filtered set |
|
||||||
| `recall_context` | Semantic + recency context block for injection |
|
| `recall_context` | Semantic + recency context block for injection |
|
||||||
| `link_thoughts` | Create a typed relationship between thoughts |
|
| `link_thoughts` | Create a typed relationship between thoughts |
|
||||||
@@ -72,6 +69,17 @@ A Go MCP server for capturing and retrieving thoughts, memory, and project conte
|
|||||||
| `describe_tools` | List all available MCP tools with names, descriptions, categories, and model-authored usage notes; call this at the start of a session to orient yourself |
|
| `describe_tools` | List all available MCP tools with names, descriptions, categories, and model-authored usage notes; call this at the start of a session to orient yourself |
|
||||||
| `annotate_tool` | Persist your own usage notes for a specific tool; notes are returned by `describe_tools` in future sessions |
|
| `annotate_tool` | Persist your own usage notes for a specific tool; notes are returned by `describe_tools` in future sessions |
|
||||||
|
|
||||||
|
## Learnings
|
||||||
|
|
||||||
|
Learnings are curated, structured memory records for durable insights you want to keep distinct from raw thoughts. Use them for normalized lessons, decisions, and evidence-backed findings that should be easy to retrieve and review over time.
|
||||||
|
|
||||||
|
Compared with `capture_thought`, learnings are more explicit and reviewable: they include a required `summary`, optional `details`, and structured fields like `category`, `area`, `status`, `priority`, `confidence`, and `tags`, plus optional links to a `project`, `related_thought_id`, or `related_skill_id`.
|
||||||
|
|
||||||
|
Use:
|
||||||
|
- `add_learning` to create a curated learning.
|
||||||
|
- `get_learning` to fetch one by ID.
|
||||||
|
- `list_learnings` to filter curated learnings across project and status dimensions.
|
||||||
|
|
||||||
## Self-Documenting Tools
|
## Self-Documenting Tools
|
||||||
|
|
||||||
AMCS includes a built-in tool directory that models can read and annotate.
|
AMCS includes a built-in tool directory that models can read and annotate.
|
||||||
@@ -244,12 +252,25 @@ Link existing skills and guardrails to a project so they are automatically avail
|
|||||||
Config is YAML-driven. Copy `configs/config.example.yaml` and set:
|
Config is YAML-driven. Copy `configs/config.example.yaml` and set:
|
||||||
|
|
||||||
- `database.url` — Postgres connection string
|
- `database.url` — Postgres connection string
|
||||||
- `auth.mode` — `api_keys` or `oauth_client_credentials`
|
- `auth.keys` — static API keys for MCP access via `x-brain-key` or `Authorization: Bearer <key>`
|
||||||
- `auth.keys` — API keys for MCP access via `x-brain-key` or `Authorization: Bearer <key>` when `auth.mode=api_keys`
|
- `auth.oauth.clients` — optional OAuth client credentials registry
|
||||||
- `auth.oauth.clients` — client registry when `auth.mode=oauth_client_credentials`
|
- `ai.providers` — named provider definitions (`litellm`, `ollama`, `openrouter`)
|
||||||
|
- `ai.embeddings.primary` / `ai.metadata.primary` — primary role targets (`provider` + `model`)
|
||||||
|
- `ai.embeddings.fallbacks` / `ai.metadata.fallbacks` — sequential fallback targets
|
||||||
- `mcp.version` is build-generated and should not be set in config
|
- `mcp.version` is build-generated and should not be set in config
|
||||||
|
|
||||||
**OAuth Client Credentials flow** (`auth.mode=oauth_client_credentials`):
|
Config schema is versioned. Current schema version is `2`.
|
||||||
|
|
||||||
|
Use the migration helper to rewrite legacy configs in-place:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go run ./cmd/amcs-migrate-config --config ./configs/dev.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `--dry-run` to print migrated YAML without writing.
|
||||||
|
Server startup migrates older config formats in memory only and does not write files.
|
||||||
|
|
||||||
|
**OAuth Client Credentials flow**:
|
||||||
|
|
||||||
1. Obtain a token — `POST /oauth/token` (public, no auth required):
|
1. Obtain a token — `POST /oauth/token` (public, no auth required):
|
||||||
```
|
```
|
||||||
@@ -267,8 +288,9 @@ Config is YAML-driven. Copy `configs/config.example.yaml` and set:
|
|||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, pass `client_id` and `client_secret` as body parameters instead of `Authorization: Basic`. Direct `Authorization: Basic` credential validation on the MCP endpoint is also supported as a fallback (no token required).
|
Alternatively, pass `client_id` and `client_secret` as body parameters instead of `Authorization: Basic`. Direct `Authorization: Basic` credential validation on the MCP endpoint is also supported as a fallback (no token required).
|
||||||
- `ai.litellm.base_url` and `ai.litellm.api_key` — LiteLLM proxy
|
- `AMCS_LITELLM_BASE_URL` / `AMCS_LITELLM_API_KEY` override all configured LiteLLM providers
|
||||||
- `ai.ollama.base_url` and `ai.ollama.api_key` — Ollama local or remote server
|
- `AMCS_OLLAMA_BASE_URL` / `AMCS_OLLAMA_API_KEY` override all configured Ollama providers
|
||||||
|
- `AMCS_OPENROUTER_API_KEY` overrides all configured OpenRouter providers
|
||||||
|
|
||||||
See `llm/plan.md` for an audited high-level status summary of the original implementation plan, and `llm/todo.md` for the audited backfill/fallback follow-up status.
|
See `llm/plan.md` for an audited high-level status summary of the original implementation plan, and `llm/todo.md` for the audited backfill/fallback follow-up status.
|
||||||
|
|
||||||
@@ -558,7 +580,7 @@ server: https://your-amcs-server
|
|||||||
token: your-bearer-token
|
token: your-bearer-token
|
||||||
```
|
```
|
||||||
|
|
||||||
Env vars override the config file: `AMCS_URL`, `AMCS_TOKEN`. Flags `--server` and `--token` override env vars.
|
Env vars override the config file: `AMCS_SERVER` (preferred), `AMCS_URL` (legacy alias), and `AMCS_TOKEN`. Flags `--server` and `--token` override env vars.
|
||||||
|
|
||||||
### stdio MCP client setup
|
### stdio MCP client setup
|
||||||
|
|
||||||
@@ -572,7 +594,7 @@ With inline credentials (no config file):
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
claude mcp add --transport stdio amcs amcs-cli stdio \
|
claude mcp add --transport stdio amcs amcs-cli stdio \
|
||||||
--env AMCS_URL=https://your-amcs-server \
|
--env AMCS_SERVER=https://your-amcs-server \
|
||||||
--env AMCS_TOKEN=your-bearer-token
|
--env AMCS_TOKEN=your-bearer-token
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -590,6 +612,26 @@ Run the SQL migrations against a local database with:
|
|||||||
|
|
||||||
The web UI now lives in the top-level `ui/` module and is embedded into the Go binary at build time with `go:embed`.
|
The web UI now lives in the top-level `ui/` module and is embedded into the Go binary at build time with `go:embed`.
|
||||||
|
|
||||||
|
### Admin UI deployment model
|
||||||
|
|
||||||
|
AMCS uses a **lightweight embedded SPA panel** model:
|
||||||
|
|
||||||
|
- the Svelte admin app is compiled to static assets
|
||||||
|
- assets are embedded in the server binary and served from `/`
|
||||||
|
- backend APIs (`/api/status`, `/api/rs/*`, admin action routes, OAuth endpoints) stay on the same origin
|
||||||
|
- auth is enforced server-side for all sensitive API routes
|
||||||
|
|
||||||
|
This keeps deployment simple (single binary/container) while preserving SPA ergonomics for operator workflows.
|
||||||
|
|
||||||
|
### UI stack baseline
|
||||||
|
|
||||||
|
The admin frontend baseline is:
|
||||||
|
|
||||||
|
- Svelte 5 for the app shell and pages
|
||||||
|
- ResolveSpec-backed APIs for data access
|
||||||
|
- `@warkypublic/svelix` for admin UX components (including `GridlerFull` and form controllers)
|
||||||
|
- `@warkypublic/artemis-kit` as the default JavaScript tooling dependency baseline in `ui/package.json`
|
||||||
|
|
||||||
**Use `pnpm` for all UI work in this repo.**
|
**Use `pnpm` for all UI work in this repo.**
|
||||||
|
|
||||||
- `make build` — runs the real UI build first, then compiles the Go server
|
- `make build` — runs the real UI build first, then compiles the Go server
|
||||||
@@ -641,29 +683,50 @@ Notes:
|
|||||||
- Database migrations `001` through `005` run automatically when the Postgres volume is created for the first time.
|
- Database migrations `001` through `005` run automatically when the Postgres volume is created for the first time.
|
||||||
- `migrations/006_rls_and_grants.sql` is intentionally skipped during container bootstrap because it contains deployment-specific grants for a role named `amcs_user`.
|
- `migrations/006_rls_and_grants.sql` is intentionally skipped during container bootstrap because it contains deployment-specific grants for a role named `amcs_user`.
|
||||||
|
|
||||||
|
### Run config migration with Compose
|
||||||
|
|
||||||
|
The container image now includes `/app/amcs-migrate-config`.
|
||||||
|
|
||||||
|
Dry-run (prints migrated YAML, does not write files):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose --profile tools run --rm migrate-config --config /app/configs/dev.yaml --dry-run
|
||||||
|
```
|
||||||
|
|
||||||
|
Apply migration in-place (writes file + creates backup):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose --profile tools run --rm migrate-config --config /app/configs/dev.yaml
|
||||||
|
```
|
||||||
|
|
||||||
## Ollama
|
## Ollama
|
||||||
|
|
||||||
Set `ai.provider: "ollama"` to use a local or self-hosted Ollama server through its OpenAI-compatible API.
|
Set your role targets to an Ollama provider to use a local or self-hosted Ollama server through its OpenAI-compatible API.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ai:
|
ai:
|
||||||
provider: "ollama"
|
providers:
|
||||||
|
local:
|
||||||
|
type: "ollama"
|
||||||
|
base_url: "http://localhost:11434/v1"
|
||||||
|
api_key: "ollama"
|
||||||
|
request_headers: {}
|
||||||
embeddings:
|
embeddings:
|
||||||
model: "nomic-embed-text"
|
|
||||||
dimensions: 768
|
dimensions: 768
|
||||||
|
primary:
|
||||||
|
provider: "local"
|
||||||
|
model: "nomic-embed-text"
|
||||||
metadata:
|
metadata:
|
||||||
model: "llama3.2"
|
|
||||||
temperature: 0.1
|
temperature: 0.1
|
||||||
ollama:
|
primary:
|
||||||
base_url: "http://localhost:11434/v1"
|
provider: "local"
|
||||||
api_key: "ollama"
|
model: "llama3.2"
|
||||||
request_headers: {}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
|
|
||||||
- For remote Ollama servers, point `ai.ollama.base_url` at the remote `/v1` endpoint.
|
- For remote Ollama servers, point `ai.providers.<name>.base_url` at the remote `/v1` endpoint.
|
||||||
- The client always sends Bearer auth; Ollama ignores it locally, so `api_key: "ollama"` is a safe default.
|
- The client always sends Bearer auth; Ollama ignores it locally, so `api_key: "ollama"` is a safe default.
|
||||||
- `ai.embeddings.dimensions` must match the embedding model you actually use, or startup will fail the database vector-dimension check.
|
- `ai.embeddings.dimensions` must match the embedding model you actually use, or startup will fail the database vector-dimension check.
|
||||||
|
|||||||
90
changelog.md
Normal file
90
changelog.md
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-04-21
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Config Schema v2 Introduced
|
||||||
|
|
||||||
|
- Refactored configuration to schema version `2` with named AI providers and role-based model chains.
|
||||||
|
- Added support for per-role primary and fallback targets for embeddings and metadata.
|
||||||
|
- Added optional background role overrides for backfill and metadata retry workers.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Automatic v1 -> v2 Migration
|
||||||
|
|
||||||
|
- Added config migration framework with explicit schema versioning.
|
||||||
|
- Implemented `v1 -> v2` migration to transform legacy provider blocks into named providers + role chains.
|
||||||
|
- Loader now auto-migrates older config files, rewrites migrated YAML, and creates timestamped backups.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - AI Registry and Role Runners
|
||||||
|
|
||||||
|
- Added `ai.Registry` to build provider clients from named provider config entries.
|
||||||
|
- Added `EmbeddingRunner` and `MetadataRunner` with sequential fallback execution.
|
||||||
|
- Added target health tracking with cooldowns for transient/permanent/empty-response failures.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - App and Tool Wiring Updates
|
||||||
|
|
||||||
|
- Rewired app startup to use provider registry + role runners for foreground and background flows.
|
||||||
|
- Updated capture, search, summarize, context, recall, backfill, metadata retry, and reparse paths to use new runners.
|
||||||
|
- Preserved environment override behavior for provider credentials/endpoints across matching provider types.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Migrate Config CLI Added
|
||||||
|
|
||||||
|
- Added `cmd/amcs-migrate-config` CLI to migrate config files to the current schema version.
|
||||||
|
- Supports dry-run output and in-place write mode with automatic backup file creation.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Tests and Documentation Updated
|
||||||
|
|
||||||
|
- Added focused tests for config migration, AI registry behavior, and runner fallback behavior.
|
||||||
|
- Updated `configs/config.example.yaml` to the new v2 schema.
|
||||||
|
- Updated README configuration sections and migration guidance to reflect v2 and `amcs-migrate-config` usage.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Uncommitted File Change List
|
||||||
|
|
||||||
|
- Modified: `.gitignore`
|
||||||
|
- Modified: `README.md`
|
||||||
|
- Modified: `configs/config.example.yaml`
|
||||||
|
- Modified: `internal/ai/compat/client.go`
|
||||||
|
- Modified: `internal/ai/compat/client_test.go`
|
||||||
|
- Modified: `internal/app/app.go`
|
||||||
|
- Modified: `internal/config/config.go`
|
||||||
|
- Modified: `internal/config/loader.go`
|
||||||
|
- Modified: `internal/config/loader_test.go`
|
||||||
|
- Modified: `internal/config/validate.go`
|
||||||
|
- Modified: `internal/config/validate_test.go`
|
||||||
|
- Modified: `internal/mcpserver/server.go`
|
||||||
|
- Modified: `internal/mcpserver/streamable_integration_test.go`
|
||||||
|
- Modified: `internal/tools/backfill.go`
|
||||||
|
- Modified: `internal/tools/capture.go`
|
||||||
|
- Modified: `internal/tools/context.go`
|
||||||
|
- Modified: `internal/tools/enrichment_retry.go`
|
||||||
|
- Modified: `internal/tools/links.go`
|
||||||
|
- Modified: `internal/tools/metadata_retry.go`
|
||||||
|
- Modified: `internal/tools/recall.go`
|
||||||
|
- Modified: `internal/tools/reparse_metadata.go`
|
||||||
|
- Modified: `internal/tools/retrieval.go`
|
||||||
|
- Modified: `internal/tools/search.go`
|
||||||
|
- Modified: `internal/tools/summarize.go`
|
||||||
|
- Modified: `internal/tools/update.go`
|
||||||
|
- Deleted: `internal/ai/factory.go`
|
||||||
|
- Deleted: `internal/ai/factory_test.go`
|
||||||
|
- Deleted: `internal/ai/litellm/client.go`
|
||||||
|
- Deleted: `internal/ai/ollama/client.go`
|
||||||
|
- Deleted: `internal/ai/openrouter/client.go`
|
||||||
|
- Deleted: `internal/ai/provider.go`
|
||||||
|
- New: `changelog.md`
|
||||||
|
- New: `cmd/amcs-migrate-config/main.go`
|
||||||
|
- New: `internal/ai/registry.go`
|
||||||
|
- New: `internal/ai/registry_test.go`
|
||||||
|
- New: `internal/ai/runner.go`
|
||||||
|
- New: `internal/ai/runner_test.go`
|
||||||
|
- New: `internal/config/migrate.go`
|
||||||
|
- New: `internal/config/migrate_test.go`
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Docker Support for Config Migration CLI
|
||||||
|
- Added `amcs-migrate-config` binary to the Docker image build output.
|
||||||
|
- Added `migrate-config` service in `docker-compose.yml` under the `tools` profile.
|
||||||
|
- Documented compose-based migration commands (dry-run and in-place apply) in the README.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Startup Migration Write Disabled
|
||||||
|
- Changed config loading to migrate legacy schemas in memory only during startup.
|
||||||
|
- Removed automatic file rewrite and backup creation from the startup config loader.
|
||||||
|
- Added loader log hint to use `amcs-migrate-config` when persistent conversion is needed.
|
||||||
@@ -6,7 +6,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
@@ -17,9 +16,12 @@ var (
|
|||||||
serverFlag string
|
serverFlag string
|
||||||
tokenFlag string
|
tokenFlag string
|
||||||
outputFlag string
|
outputFlag string
|
||||||
|
verbose bool
|
||||||
cfg Config
|
cfg Config
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const cliUserAgent = "amcs-cli/0.0.1"
|
||||||
|
|
||||||
var rootCmd = &cobra.Command{
|
var rootCmd = &cobra.Command{
|
||||||
Use: "amcs-cli",
|
Use: "amcs-cli",
|
||||||
Short: "CLI for connecting to a remote AMCS MCP server",
|
Short: "CLI for connecting to a remote AMCS MCP server",
|
||||||
@@ -42,6 +44,7 @@ func init() {
|
|||||||
rootCmd.PersistentFlags().StringVar(&serverFlag, "server", "", "AMCS server URL")
|
rootCmd.PersistentFlags().StringVar(&serverFlag, "server", "", "AMCS server URL")
|
||||||
rootCmd.PersistentFlags().StringVar(&tokenFlag, "token", "", "AMCS bearer token")
|
rootCmd.PersistentFlags().StringVar(&tokenFlag, "token", "", "AMCS bearer token")
|
||||||
rootCmd.PersistentFlags().StringVar(&outputFlag, "output", "json", "Output format: json or yaml")
|
rootCmd.PersistentFlags().StringVar(&outputFlag, "output", "json", "Output format: json or yaml")
|
||||||
|
rootCmd.PersistentFlags().BoolVar(&verbose, "verbose", false, "Enable verbose logging to stderr")
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadConfig() error {
|
func loadConfig() error {
|
||||||
@@ -54,6 +57,9 @@ func loadConfig() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
cfg = loaded
|
cfg = loaded
|
||||||
|
if v := strings.TrimSpace(os.Getenv("AMCS_SERVER")); v != "" {
|
||||||
|
cfg.Server = v
|
||||||
|
}
|
||||||
if v := strings.TrimSpace(os.Getenv("AMCS_URL")); v != "" {
|
if v := strings.TrimSpace(os.Getenv("AMCS_URL")); v != "" {
|
||||||
cfg.Server = v
|
cfg.Server = v
|
||||||
}
|
}
|
||||||
@@ -75,7 +81,7 @@ func loadConfig() error {
|
|||||||
|
|
||||||
func requireServer() error {
|
func requireServer() error {
|
||||||
if strings.TrimSpace(cfg.Server) == "" {
|
if strings.TrimSpace(cfg.Server) == "" {
|
||||||
return fmt.Errorf("server URL is required; set --server, AMCS_URL, or config server")
|
return fmt.Errorf("server URL is required; set --server, AMCS_SERVER, AMCS_URL, or config server")
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -109,6 +115,9 @@ func (t *bearerTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
|||||||
base = http.DefaultTransport
|
base = http.DefaultTransport
|
||||||
}
|
}
|
||||||
clone := req.Clone(req.Context())
|
clone := req.Clone(req.Context())
|
||||||
|
if strings.TrimSpace(clone.Header.Get("User-Agent")) == "" {
|
||||||
|
clone.Header.Set("User-Agent", cliUserAgent)
|
||||||
|
}
|
||||||
if strings.TrimSpace(t.token) != "" {
|
if strings.TrimSpace(t.token) != "" {
|
||||||
clone.Header.Set("Authorization", "Bearer "+t.token)
|
clone.Header.Set("Authorization", "Bearer "+t.token)
|
||||||
}
|
}
|
||||||
@@ -119,16 +128,24 @@ func connectRemote(ctx context.Context) (*mcp.ClientSession, error) {
|
|||||||
if err := requireServer(); err != nil {
|
if err := requireServer(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
verboseLogf("connecting to %s", endpointURL())
|
||||||
client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil)
|
client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil)
|
||||||
transport := &mcp.StreamableClientTransport{
|
transport := &mcp.StreamableClientTransport{
|
||||||
Endpoint: endpointURL(),
|
Endpoint: endpointURL(),
|
||||||
HTTPClient: newHTTPClient(),
|
HTTPClient: newHTTPClient(),
|
||||||
|
DisableStandaloneSSE: true,
|
||||||
}
|
}
|
||||||
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
session, err := client.Connect(ctx, transport, nil)
|
session, err := client.Connect(ctx, transport, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("connect to AMCS server: %w", err)
|
return nil, fmt.Errorf("connect to AMCS server: %w", err)
|
||||||
}
|
}
|
||||||
|
verboseLogf("connected to %s", endpointURL())
|
||||||
return session, nil
|
return session, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func verboseLogf(format string, args ...any) {
|
||||||
|
if !verbose {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_, _ = fmt.Fprintf(os.Stderr, "[amcs-cli] "+format+"\n", args...)
|
||||||
|
}
|
||||||
|
|||||||
35
cmd/amcs-cli/cmd/root_test.go
Normal file
35
cmd/amcs-cli/cmd/root_test.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBearerTransportFormatsBearerToken(t *testing.T) {
|
||||||
|
const want = "Bearer X"
|
||||||
|
const wantUA = "amcs-cli/0.0.1"
|
||||||
|
|
||||||
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if got := r.Header.Get("Authorization"); got != want {
|
||||||
|
t.Fatalf("Authorization header = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
if got := r.Header.Get("User-Agent"); got != wantUA {
|
||||||
|
t.Fatalf("User-Agent header = %q, want %q", got, wantUA)
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
defer ts.Close()
|
||||||
|
|
||||||
|
client := &http.Client{Transport: &bearerTransport{token: "X"}}
|
||||||
|
req, err := http.NewRequest(http.MethodGet, ts.URL, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRequest() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("client.Do() error = %v", err)
|
||||||
|
}
|
||||||
|
_ = res.Body.Close()
|
||||||
|
}
|
||||||
@@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
@@ -26,14 +25,13 @@ var sseCmd = &cobra.Command{
|
|||||||
HTTPClient: newHTTPClient(),
|
HTTPClient: newHTTPClient(),
|
||||||
}
|
}
|
||||||
|
|
||||||
connectCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
verboseLogf("connecting to SSE endpoint %s", sseEndpointURL())
|
||||||
defer cancel()
|
remote, err := client.Connect(ctx, transport, nil)
|
||||||
|
|
||||||
remote, err := client.Connect(connectCtx, transport, nil)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("connect to AMCS SSE endpoint: %w", err)
|
return fmt.Errorf("connect to AMCS SSE endpoint: %w", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = remote.Close() }()
|
defer func() { _ = remote.Close() }()
|
||||||
|
verboseLogf("connected to SSE endpoint %s", sseEndpointURL())
|
||||||
|
|
||||||
tools, err := remote.ListTools(ctx, nil)
|
tools, err := remote.ListTools(ctx, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -67,6 +65,8 @@ var sseCmd = &cobra.Command{
|
|||||||
return fmt.Errorf("start stdio bridge: %w", err)
|
return fmt.Errorf("start stdio bridge: %w", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = session.Close() }()
|
defer func() { _ = session.Close() }()
|
||||||
|
verboseLogf("sse stdio bridge ready")
|
||||||
|
verboseLogf("waiting for MCP commands on stdin")
|
||||||
|
|
||||||
<-ctx.Done()
|
<-ctx.Done()
|
||||||
return nil
|
return nil
|
||||||
@@ -75,6 +75,9 @@ var sseCmd = &cobra.Command{
|
|||||||
|
|
||||||
func sseEndpointURL() string {
|
func sseEndpointURL() string {
|
||||||
base := strings.TrimRight(strings.TrimSpace(cfg.Server), "/")
|
base := strings.TrimRight(strings.TrimSpace(cfg.Server), "/")
|
||||||
|
if strings.HasSuffix(base, "/mcp") {
|
||||||
|
base = strings.TrimSuffix(base, "/mcp")
|
||||||
|
}
|
||||||
if strings.HasSuffix(base, "/sse") {
|
if strings.HasSuffix(base, "/sse") {
|
||||||
return base
|
return base
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,6 +51,8 @@ var stdioCmd = &cobra.Command{
|
|||||||
return fmt.Errorf("start stdio bridge: %w", err)
|
return fmt.Errorf("start stdio bridge: %w", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = session.Close() }()
|
defer func() { _ = session.Close() }()
|
||||||
|
verboseLogf("stdio bridge connected to remote AMCS and ready")
|
||||||
|
verboseLogf("waiting for MCP commands on stdin")
|
||||||
|
|
||||||
<-ctx.Done()
|
<-ctx.Done()
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
105
cmd/amcs-migrate-config/main.go
Normal file
105
cmd/amcs-migrate-config/main.go
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
var (
|
||||||
|
configPath string
|
||||||
|
dryRun bool
|
||||||
|
toVersion int
|
||||||
|
)
|
||||||
|
flag.StringVar(&configPath, "config", "", "Path to the YAML config file (default: $AMCS_CONFIG or ./configs/dev.yaml)")
|
||||||
|
flag.BoolVar(&dryRun, "dry-run", false, "Print the migrated config to stdout instead of writing it back")
|
||||||
|
flag.IntVar(&toVersion, "to-version", config.CurrentConfigVersion, "Stop migrating after reaching this version")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if toVersion <= 0 || toVersion > config.CurrentConfigVersion {
|
||||||
|
log.Fatalf("invalid -to-version %d (must be between 1 and %d)", toVersion, config.CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
path := config.ResolvePath(configPath)
|
||||||
|
original, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("read config %q: %v", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
raw := map[string]any{}
|
||||||
|
if err := yaml.Unmarshal(original, &raw); err != nil {
|
||||||
|
log.Fatalf("decode config %q: %v", path, err)
|
||||||
|
}
|
||||||
|
if raw == nil {
|
||||||
|
raw = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
applied, err := migrateUpTo(raw, toVersion)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("migrate: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(applied) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s already at version %d; nothing to do\n", path, currentVersion(raw))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
out, err := yaml.Marshal(raw)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("marshal migrated config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, step := range applied {
|
||||||
|
fmt.Fprintf(os.Stderr, "applied migration v%d -> v%d: %s\n", step.From, step.To, step.Describe)
|
||||||
|
}
|
||||||
|
|
||||||
|
if dryRun {
|
||||||
|
_, _ = os.Stdout.Write(out)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
backup := fmt.Sprintf("%s.bak.%d", path, time.Now().Unix())
|
||||||
|
if err := os.WriteFile(backup, original, 0o600); err != nil {
|
||||||
|
log.Fatalf("write backup %q: %v", backup, err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(path, out, 0o600); err != nil {
|
||||||
|
log.Fatalf("write migrated config %q: %v", path, err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "wrote migrated config to %s (backup: %s)\n", path, backup)
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateUpTo runs the migration ladder but stops at the requested version.
|
||||||
|
func migrateUpTo(raw map[string]any, target int) ([]config.ConfigMigration, error) {
|
||||||
|
if currentVersion(raw) >= target {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if target == config.CurrentConfigVersion {
|
||||||
|
return config.Migrate(raw)
|
||||||
|
}
|
||||||
|
// Partial migrations are rare; for now reject anything other than the
|
||||||
|
// current version target since the migration ladder is short.
|
||||||
|
return nil, fmt.Errorf("partial migration to v%d is not supported (use -to-version=%d)", target, config.CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
func currentVersion(raw map[string]any) int {
|
||||||
|
v, ok := raw["version"]
|
||||||
|
if !ok {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
switch n := v.(type) {
|
||||||
|
case int:
|
||||||
|
return n
|
||||||
|
case int64:
|
||||||
|
return int(n)
|
||||||
|
case float64:
|
||||||
|
return int(n)
|
||||||
|
}
|
||||||
|
return 1
|
||||||
|
}
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
version: 2
|
||||||
|
|
||||||
server:
|
server:
|
||||||
host: "0.0.0.0"
|
host: "0.0.0.0"
|
||||||
port: 8080
|
port: 8080
|
||||||
@@ -27,7 +29,7 @@ auth:
|
|||||||
- id: "oauth-client"
|
- id: "oauth-client"
|
||||||
client_id: ""
|
client_id: ""
|
||||||
client_secret: ""
|
client_secret: ""
|
||||||
description: "used when auth.mode=oauth_client_credentials"
|
description: "optional OAuth client credentials"
|
||||||
|
|
||||||
database:
|
database:
|
||||||
url: "postgres://postgres:postgres@localhost:5432/amcs?sslmode=disable"
|
url: "postgres://postgres:postgres@localhost:5432/amcs?sslmode=disable"
|
||||||
@@ -37,33 +39,58 @@ database:
|
|||||||
max_conn_idle_time: "10m"
|
max_conn_idle_time: "10m"
|
||||||
|
|
||||||
ai:
|
ai:
|
||||||
provider: "litellm"
|
providers:
|
||||||
|
default:
|
||||||
|
type: "litellm"
|
||||||
|
base_url: "http://localhost:4000/v1"
|
||||||
|
api_key: "replace-me"
|
||||||
|
request_headers: {}
|
||||||
|
|
||||||
|
ollama_local:
|
||||||
|
type: "ollama"
|
||||||
|
base_url: "http://localhost:11434/v1"
|
||||||
|
api_key: "ollama"
|
||||||
|
request_headers: {}
|
||||||
|
|
||||||
|
openrouter:
|
||||||
|
type: "openrouter"
|
||||||
|
base_url: "https://openrouter.ai/api/v1"
|
||||||
|
api_key: "replace-me"
|
||||||
|
app_name: "amcs"
|
||||||
|
site_url: ""
|
||||||
|
request_headers: {}
|
||||||
|
|
||||||
embeddings:
|
embeddings:
|
||||||
model: "openai/text-embedding-3-small"
|
|
||||||
dimensions: 1536
|
dimensions: 1536
|
||||||
|
primary:
|
||||||
|
provider: "default"
|
||||||
|
model: "openai/text-embedding-3-small"
|
||||||
|
fallbacks:
|
||||||
|
- provider: "ollama_local"
|
||||||
|
model: "nomic-embed-text"
|
||||||
|
|
||||||
metadata:
|
metadata:
|
||||||
model: "gpt-4o-mini"
|
|
||||||
fallback_models: []
|
|
||||||
temperature: 0.1
|
temperature: 0.1
|
||||||
log_conversations: false
|
log_conversations: false
|
||||||
litellm:
|
timeout: "10s"
|
||||||
base_url: "http://localhost:4000/v1"
|
primary:
|
||||||
api_key: "replace-me"
|
provider: "default"
|
||||||
use_responses_api: false
|
model: "gpt-4o-mini"
|
||||||
request_headers: {}
|
fallbacks:
|
||||||
embedding_model: "openrouter/openai/text-embedding-3-small"
|
- provider: "openrouter"
|
||||||
metadata_model: "gpt-4o-mini"
|
model: "openai/gpt-4.1-mini"
|
||||||
fallback_metadata_models: []
|
|
||||||
ollama:
|
# Optional overrides for background jobs (backfill_embeddings,
|
||||||
base_url: "http://localhost:11434/v1"
|
# retry_failed_metadata, reparse_thought_metadata).
|
||||||
api_key: "ollama"
|
background:
|
||||||
request_headers: {}
|
embeddings:
|
||||||
openrouter:
|
primary:
|
||||||
base_url: "https://openrouter.ai/api/v1"
|
provider: "default"
|
||||||
api_key: ""
|
model: "openai/text-embedding-3-small"
|
||||||
app_name: "amcs"
|
metadata:
|
||||||
site_url: ""
|
primary:
|
||||||
extra_headers: {}
|
provider: "default"
|
||||||
|
model: "gpt-4o-mini"
|
||||||
|
|
||||||
capture:
|
capture:
|
||||||
source: "mcp"
|
source: "mcp"
|
||||||
|
|||||||
@@ -25,8 +25,8 @@ auth:
|
|||||||
oauth:
|
oauth:
|
||||||
clients:
|
clients:
|
||||||
- id: "oauth-client"
|
- id: "oauth-client"
|
||||||
client_id: ""
|
client_id: "test_aab32200464910ab697efbd760e7ed2c"
|
||||||
client_secret: ""
|
client_secret: "test_135369559a422b4b93fcb534a4aed2c9"
|
||||||
description: "used when auth.mode=oauth_client_credentials"
|
description: "used when auth.mode=oauth_client_credentials"
|
||||||
|
|
||||||
database:
|
database:
|
||||||
|
|||||||
@@ -36,6 +36,18 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
- "8080:8080"
|
||||||
|
|
||||||
|
migrate-config:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
profiles: ["tools"]
|
||||||
|
restart: "no"
|
||||||
|
volumes:
|
||||||
|
- ./configs:/app/configs
|
||||||
|
environment:
|
||||||
|
AMCS_CONFIG: /app/configs/docker.yaml
|
||||||
|
entrypoint: ["/app/amcs-migrate-config"]
|
||||||
|
command: ["--config", "/app/configs/docker.yaml", "--dry-run"]
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
|
||||||
|
|||||||
64
go.mod
64
go.mod
@@ -3,29 +3,85 @@ module git.warky.dev/wdevs/amcs
|
|||||||
go 1.26.1
|
go 1.26.1
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/bitechdev/ResolveSpec v1.0.87
|
||||||
github.com/google/jsonschema-go v0.4.2
|
github.com/google/jsonschema-go v0.4.2
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/jackc/pgx/v5 v5.9.1
|
github.com/jackc/pgx/v5 v5.9.1
|
||||||
github.com/modelcontextprotocol/go-sdk v1.4.1
|
github.com/modelcontextprotocol/go-sdk v1.4.1
|
||||||
github.com/pgvector/pgvector-go v0.3.0
|
github.com/pgvector/pgvector-go v0.3.0
|
||||||
github.com/spf13/cobra v1.10.2
|
github.com/spf13/cobra v1.10.2
|
||||||
golang.org/x/sync v0.17.0
|
github.com/uptrace/bun v1.2.16
|
||||||
|
github.com/uptrace/bun/dialect/pgdialect v1.2.16
|
||||||
|
github.com/uptrace/bun/driver/pgdriver v1.1.12
|
||||||
|
github.com/uptrace/bunrouter v1.0.23
|
||||||
|
golang.org/x/sync v0.19.0
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
|
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf // indirect
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||||
|
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||||
|
github.com/getsentry/sentry-go v0.40.0 // indirect
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
|
||||||
|
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||||
|
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||||
|
github.com/gorilla/mux v1.8.1 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||||
github.com/kr/text v0.2.0 // indirect
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
|
github.com/jinzhu/now v1.1.5 // indirect
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.33 // indirect
|
||||||
|
github.com/microsoft/go-mssqldb v1.9.5 // indirect
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/prometheus/client_golang v1.23.2 // indirect
|
||||||
|
github.com/prometheus/client_model v0.6.2 // indirect
|
||||||
|
github.com/prometheus/common v0.67.4 // indirect
|
||||||
|
github.com/prometheus/procfs v0.19.2 // indirect
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||||
|
github.com/redis/go-redis/v9 v9.17.2 // indirect
|
||||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||||
|
github.com/sagikazarmark/locafero v0.12.0 // indirect
|
||||||
github.com/segmentio/asm v1.1.3 // indirect
|
github.com/segmentio/asm v1.1.3 // indirect
|
||||||
github.com/segmentio/encoding v0.5.4 // indirect
|
github.com/segmentio/encoding v0.5.4 // indirect
|
||||||
github.com/spf13/pflag v1.0.9 // indirect
|
github.com/shopspring/decimal v1.4.0 // indirect
|
||||||
|
github.com/spf13/afero v1.15.0 // indirect
|
||||||
|
github.com/spf13/cast v1.10.0 // indirect
|
||||||
|
github.com/spf13/pflag v1.0.10 // indirect
|
||||||
|
github.com/spf13/viper v1.21.0 // indirect
|
||||||
|
github.com/stretchr/testify v1.11.1 // indirect
|
||||||
|
github.com/subosito/gotenv v1.6.0 // indirect
|
||||||
|
github.com/tidwall/gjson v1.18.0 // indirect
|
||||||
|
github.com/tidwall/match v1.2.0 // indirect
|
||||||
|
github.com/tidwall/pretty v1.2.1 // indirect
|
||||||
|
github.com/tidwall/sjson v1.2.5 // indirect
|
||||||
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||||
|
github.com/uptrace/bun/dialect/mssqldialect v1.2.16 // indirect
|
||||||
|
github.com/uptrace/bun/dialect/sqlitedialect v1.2.16 // indirect
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
github.com/x448/float16 v0.8.4 // indirect
|
github.com/x448/float16 v0.8.4 // indirect
|
||||||
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
|
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
|
||||||
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
|
go.uber.org/zap v1.27.1 // indirect
|
||||||
|
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||||
|
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||||
|
golang.org/x/crypto v0.46.0 // indirect
|
||||||
|
golang.org/x/mod v0.31.0 // indirect
|
||||||
golang.org/x/oauth2 v0.34.0 // indirect
|
golang.org/x/oauth2 v0.34.0 // indirect
|
||||||
golang.org/x/sys v0.40.0 // indirect
|
golang.org/x/sys v0.40.0 // indirect
|
||||||
golang.org/x/text v0.29.0 // indirect
|
golang.org/x/text v0.32.0 // indirect
|
||||||
|
google.golang.org/protobuf v1.36.11 // indirect
|
||||||
|
gorm.io/driver/postgres v1.6.0 // indirect
|
||||||
|
gorm.io/driver/sqlite v1.6.0 // indirect
|
||||||
|
gorm.io/driver/sqlserver v1.6.3 // indirect
|
||||||
|
gorm.io/gorm v1.31.1 // indirect
|
||||||
|
mellium.im/sasl v0.3.1 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
417
go.sum
417
go.sum
@@ -1,22 +1,129 @@
|
|||||||
|
dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
|
||||||
|
dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
|
||||||
entgo.io/ent v0.14.3 h1:wokAV/kIlH9TeklJWGGS7AYJdVckr0DloWjIcO9iIIQ=
|
entgo.io/ent v0.14.3 h1:wokAV/kIlH9TeklJWGGS7AYJdVckr0DloWjIcO9iIIQ=
|
||||||
entgo.io/ent v0.14.3/go.mod h1:aDPE/OziPEu8+OWbzy4UlvWmD2/kbRuWfK2A40hcxJM=
|
entgo.io/ent v0.14.3/go.mod h1:aDPE/OziPEu8+OWbzy4UlvWmD2/kbRuWfK2A40hcxJM=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1/go.mod h1:a6xsAQUZg+VsS3TJ05SRp524Hs4pZ/AeFSr5ENf0Yjo=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1/go.mod h1:uE9zaUfEQT/nbQjVi2IblCG9iaLtZsuYZ8ne+PuQ02M=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1/go.mod h1:JdM5psgjfBf5fo2uWOZhflPWyDBZ/O/CNAH9CtsuZE4=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0/go.mod h1:4OG6tQ9EOP/MT0NMjDlRzWoVFxfu9rN9B2X+tlSVktg=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1/go.mod h1:GpPjLhVR9dnUoJMyHWSPy71xY9/lcmpzIPZXmF0FCVY=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1 h1:Wgf5rZba3YZqeTNJPtvqZoBu1sBN/L4sry+u2U3Y75w=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1/go.mod h1:xxCBG/f/4Vbmh2XQJBsOmNdxWUY5j/s27jujKPbQf14=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuoEKg+gImo7pvkiQEFAc8ocibADgXeiLAxWhWmkI=
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||||
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||||
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||||
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||||
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||||
|
github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU=
|
||||||
|
github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
|
||||||
|
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||||
|
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||||
|
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||||
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
|
github.com/bitechdev/ResolveSpec v1.0.86 h1:a4yFMMDizrmvDOV61cj/+kD+mEtKL/5EIHY2GcP3uJU=
|
||||||
|
github.com/bitechdev/ResolveSpec v1.0.86/go.mod h1:YZOY2YCD0Kmb+pjAMhOqPh4q82Hij57F/CLlCMkzT78=
|
||||||
|
github.com/bitechdev/ResolveSpec v1.0.87 h1:zLiHynLK8LLpXIfCZOjL5Iy1COBS6YZcWE1BHKfYqbA=
|
||||||
|
github.com/bitechdev/ResolveSpec v1.0.87/go.mod h1:YZOY2YCD0Kmb+pjAMhOqPh4q82Hij57F/CLlCMkzT78=
|
||||||
|
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf h1:TqhNAT4zKbTdLa62d2HDBFdvgSbIGB3eJE8HqhgiL9I=
|
||||||
|
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf/go.mod h1:r5xuitiExdLAJ09PR7vBVENGvp4ZuTBeWTGtxuX3K+c=
|
||||||
|
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||||
|
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||||
|
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||||
|
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||||
|
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||||
|
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
||||||
|
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
|
||||||
|
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
|
||||||
|
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||||
|
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||||
|
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||||
|
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
|
||||||
|
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
||||||
|
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
|
||||||
|
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||||
|
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||||
|
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||||
|
github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko=
|
||||||
|
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
|
||||||
|
github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM=
|
||||||
|
github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
|
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
|
||||||
|
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
|
||||||
|
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||||
|
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
|
github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw=
|
||||||
|
github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
||||||
|
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||||
|
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||||
|
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||||
|
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||||
|
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||||
|
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||||
|
github.com/getsentry/sentry-go v0.40.0 h1:VTJMN9zbTvqDqPwheRVLcp0qcUcM+8eFivvGocAaSbo=
|
||||||
|
github.com/getsentry/sentry-go v0.40.0/go.mod h1:eRXCoh3uvmjQLY6qu63BjUZnaBu5L5WhMV1RwYO8W5s=
|
||||||
|
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||||
|
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||||
|
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||||
|
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
|
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||||
|
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
||||||
|
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||||
github.com/go-pg/pg/v10 v10.11.0 h1:CMKJqLgTrfpE/aOVeLdybezR2om071Vh38OLZjsyMI0=
|
github.com/go-pg/pg/v10 v10.11.0 h1:CMKJqLgTrfpE/aOVeLdybezR2om071Vh38OLZjsyMI0=
|
||||||
github.com/go-pg/pg/v10 v10.11.0/go.mod h1:4BpHRoxE61y4Onpof3x1a2SQvi9c+q1dJnrNdMjsroA=
|
github.com/go-pg/pg/v10 v10.11.0/go.mod h1:4BpHRoxE61y4Onpof3x1a2SQvi9c+q1dJnrNdMjsroA=
|
||||||
github.com/go-pg/zerochecker v0.2.0 h1:pp7f72c3DobMWOb2ErtZsnrPaSvHd2W4o9//8HtF4mU=
|
github.com/go-pg/zerochecker v0.2.0 h1:pp7f72c3DobMWOb2ErtZsnrPaSvHd2W4o9//8HtF4mU=
|
||||||
github.com/go-pg/zerochecker v0.2.0/go.mod h1:NJZ4wKL0NmTtz0GKCoJ8kym6Xn/EQzXRl2OnAe7MmDo=
|
github.com/go-pg/zerochecker v0.2.0/go.mod h1:NJZ4wKL0NmTtz0GKCoJ8kym6Xn/EQzXRl2OnAe7MmDo=
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
||||||
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||||
|
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
||||||
|
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||||
|
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||||
|
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
||||||
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8=
|
github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8=
|
||||||
github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE=
|
github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE=
|
||||||
|
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||||
|
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||||
|
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||||
|
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
@@ -27,52 +134,181 @@ github.com/jackc/pgx/v5 v5.9.1 h1:uwrxJXBnx76nyISkhr33kQLlUqjv7et7b9FjCen/tdc=
|
|||||||
github.com/jackc/pgx/v5 v5.9.1/go.mod h1:mal1tBGAFfLHvZzaYh77YS/eC6IX9OWbRV1QIIM0Jn4=
|
github.com/jackc/pgx/v5 v5.9.1/go.mod h1:mal1tBGAFfLHvZzaYh77YS/eC6IX9OWbRV1QIIM0Jn4=
|
||||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
|
github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
|
||||||
|
github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
|
||||||
|
github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo=
|
||||||
|
github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
|
||||||
|
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
|
||||||
|
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
|
||||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||||
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
||||||
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
||||||
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
|
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
|
||||||
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
|
github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||||
|
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||||
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||||
|
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
|
||||||
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||||
|
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
||||||
|
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.33 h1:A5blZ5ulQo2AtayQ9/limgHEkFreKj1Dv226a1K73s0=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.33/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||||
|
github.com/microsoft/go-mssqldb v1.8.2/go.mod h1:vp38dT33FGfVotRiTmDo3bFyaHq+p3LektQrjTULowo=
|
||||||
|
github.com/microsoft/go-mssqldb v1.9.5 h1:orwya0X/5bsL1o+KasupTkk2eNTNFkTQG0BEe/HxCn0=
|
||||||
|
github.com/microsoft/go-mssqldb v1.9.5/go.mod h1:VCP2a0KEZZtGLRHd1PsLavLFYy/3xX2yJUPycv3Sr2Q=
|
||||||
|
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||||
|
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||||
|
github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ=
|
||||||
|
github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo=
|
||||||
|
github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk=
|
||||||
|
github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
|
||||||
|
github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU=
|
||||||
|
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
|
||||||
|
github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs=
|
||||||
|
github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs=
|
||||||
|
github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g=
|
||||||
|
github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28=
|
||||||
|
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
|
||||||
|
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||||
github.com/modelcontextprotocol/go-sdk v1.4.1 h1:M4x9GyIPj+HoIlHNGpK2hq5o3BFhC+78PkEaldQRphc=
|
github.com/modelcontextprotocol/go-sdk v1.4.1 h1:M4x9GyIPj+HoIlHNGpK2hq5o3BFhC+78PkEaldQRphc=
|
||||||
github.com/modelcontextprotocol/go-sdk v1.4.1/go.mod h1:Bo/mS87hPQqHSRkMv4dQq1XCu6zv4INdXnFZabkNU6s=
|
github.com/modelcontextprotocol/go-sdk v1.4.1/go.mod h1:Bo/mS87hPQqHSRkMv4dQq1XCu6zv4INdXnFZabkNU6s=
|
||||||
|
github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8=
|
||||||
|
github.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||||
|
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||||
|
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||||
|
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||||
|
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
|
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
||||||
|
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||||
github.com/pgvector/pgvector-go v0.3.0 h1:Ij+Yt78R//uYqs3Zk35evZFvr+G0blW0OUN+Q2D1RWc=
|
github.com/pgvector/pgvector-go v0.3.0 h1:Ij+Yt78R//uYqs3Zk35evZFvr+G0blW0OUN+Q2D1RWc=
|
||||||
github.com/pgvector/pgvector-go v0.3.0/go.mod h1:duFy+PXWfW7QQd5ibqutBO4GxLsUZ9RVXhFZGIBsWSA=
|
github.com/pgvector/pgvector-go v0.3.0/go.mod h1:duFy+PXWfW7QQd5ibqutBO4GxLsUZ9RVXhFZGIBsWSA=
|
||||||
|
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
|
||||||
|
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
|
||||||
|
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
|
||||||
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||||
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||||
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
|
||||||
|
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||||
|
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
||||||
|
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
||||||
|
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||||
|
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||||
|
github.com/prometheus/common v0.67.4 h1:yR3NqWO1/UyO1w2PhUvXlGQs/PtFmoveVO0KZ4+Lvsc=
|
||||||
|
github.com/prometheus/common v0.67.4/go.mod h1:gP0fq6YjjNCLssJCQp0yk4M8W6ikLURwkdd/YKtTbyI=
|
||||||
|
github.com/prometheus/procfs v0.19.2 h1:zUMhqEW66Ex7OXIiDkll3tl9a1ZdilUOd/F6ZXw4Vws=
|
||||||
|
github.com/prometheus/procfs v0.19.2/go.mod h1:M0aotyiemPhBCM0z5w87kL22CxfcH05ZpYlu+b4J7mw=
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
|
||||||
|
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||||
|
github.com/redis/go-redis/v9 v9.17.2 h1:P2EGsA4qVIM3Pp+aPocCJ7DguDHhqrXNhVcEp4ViluI=
|
||||||
|
github.com/redis/go-redis/v9 v9.17.2/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370=
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
|
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||||
|
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/sagikazarmark/locafero v0.12.0 h1:/NQhBAkUb4+fH1jivKHWusDYFjMOOKU88eegjfxfHb4=
|
||||||
|
github.com/sagikazarmark/locafero v0.12.0/go.mod h1:sZh36u/YSZ918v0Io+U9ogLYQJ9tLLBmM4eneO6WwsI=
|
||||||
github.com/segmentio/asm v1.1.3 h1:WM03sfUOENvvKexOLp+pCqgb/WDjsi7EK8gIsICtzhc=
|
github.com/segmentio/asm v1.1.3 h1:WM03sfUOENvvKexOLp+pCqgb/WDjsi7EK8gIsICtzhc=
|
||||||
github.com/segmentio/asm v1.1.3/go.mod h1:Ld3L4ZXGNcSLRg4JBsZ3//1+f/TjYl0Mzen/DQy1EJg=
|
github.com/segmentio/asm v1.1.3/go.mod h1:Ld3L4ZXGNcSLRg4JBsZ3//1+f/TjYl0Mzen/DQy1EJg=
|
||||||
github.com/segmentio/encoding v0.5.4 h1:OW1VRern8Nw6ITAtwSZ7Idrl3MXCFwXHPgqESYfvNt0=
|
github.com/segmentio/encoding v0.5.4 h1:OW1VRern8Nw6ITAtwSZ7Idrl3MXCFwXHPgqESYfvNt0=
|
||||||
github.com/segmentio/encoding v0.5.4/go.mod h1:HS1ZKa3kSN32ZHVZ7ZLPLXWvOVIiZtyJnO1gPH1sKt0=
|
github.com/segmentio/encoding v0.5.4/go.mod h1:HS1ZKa3kSN32ZHVZ7ZLPLXWvOVIiZtyJnO1gPH1sKt0=
|
||||||
|
github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs=
|
||||||
|
github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c=
|
||||||
|
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||||
|
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||||
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
|
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
||||||
|
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
||||||
|
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
||||||
|
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||||
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
|
|
||||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
|
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||||
|
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
|
github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
|
||||||
|
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
|
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||||
|
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||||
|
github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU=
|
||||||
|
github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY=
|
||||||
|
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
|
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||||
|
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
|
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||||
|
github.com/tidwall/match v1.2.0 h1:0pt8FlkOwjN2fPt4bIl4BoNxb98gGHN2ObFEDkrfZnM=
|
||||||
|
github.com/tidwall/match v1.2.0/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||||
|
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||||
|
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||||
|
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||||
|
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||||
|
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||||
|
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
|
||||||
|
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||||
|
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
||||||
|
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo=
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo=
|
||||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
|
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
|
||||||
github.com/uptrace/bun v1.1.12 h1:sOjDVHxNTuM6dNGaba0wUuz7KvDE1BmNu9Gqs2gJSXQ=
|
github.com/uptrace/bun v1.2.16 h1:QlObi6ZIK5Ao7kAALnh91HWYNZUBbVwye52fmlQM9kc=
|
||||||
github.com/uptrace/bun v1.1.12/go.mod h1:NPG6JGULBeQ9IU6yHp7YGELRa5Agmd7ATZdz4tGZ6z0=
|
github.com/uptrace/bun v1.2.16/go.mod h1:jMoNg2n56ckaawi/O/J92BHaECmrz6IRjuMWqlMaMTM=
|
||||||
github.com/uptrace/bun/dialect/pgdialect v1.1.12 h1:m/CM1UfOkoBTglGO5CUTKnIKKOApOYxkcP2qn0F9tJk=
|
github.com/uptrace/bun/dialect/mssqldialect v1.2.16 h1:rKv0cKPNBviXadB/+2Y/UedA/c1JnwGzUWZkdN5FdSQ=
|
||||||
github.com/uptrace/bun/dialect/pgdialect v1.1.12/go.mod h1:Ij6WIxQILxLlL2frUBxUBOZJtLElD2QQNDcu/PWDHTc=
|
github.com/uptrace/bun/dialect/mssqldialect v1.2.16/go.mod h1:J5U7tGKWDsx2Q7MwDZF2417jCdpD6yD/ZMFJcCR80bk=
|
||||||
|
github.com/uptrace/bun/dialect/pgdialect v1.2.16 h1:KFNZ0LxAyczKNfK/IJWMyaleO6eI9/Z5tUv3DE1NVL4=
|
||||||
|
github.com/uptrace/bun/dialect/pgdialect v1.2.16/go.mod h1:IJdMeV4sLfh0LDUZl7TIxLI0LipF1vwTK3hBC7p5qLo=
|
||||||
|
github.com/uptrace/bun/dialect/sqlitedialect v1.2.16 h1:6wVAiYLj1pMibRthGwy4wDLa3D5AQo32Y8rvwPd8CQ0=
|
||||||
|
github.com/uptrace/bun/dialect/sqlitedialect v1.2.16/go.mod h1:Z7+5qK8CGZkDQiPMu+LSdVuDuR1I5jcwtkB1Pi3F82E=
|
||||||
github.com/uptrace/bun/driver/pgdriver v1.1.12 h1:3rRWB1GK0psTJrHwxzNfEij2MLibggiLdTqjTtfHc1w=
|
github.com/uptrace/bun/driver/pgdriver v1.1.12 h1:3rRWB1GK0psTJrHwxzNfEij2MLibggiLdTqjTtfHc1w=
|
||||||
github.com/uptrace/bun/driver/pgdriver v1.1.12/go.mod h1:ssYUP+qwSEgeDDS1xm2XBip9el1y9Mi5mTAvLoiADLM=
|
github.com/uptrace/bun/driver/pgdriver v1.1.12/go.mod h1:ssYUP+qwSEgeDDS1xm2XBip9el1y9Mi5mTAvLoiADLM=
|
||||||
|
github.com/uptrace/bun/driver/sqliteshim v1.2.16 h1:M6Dh5kkDWFbUWBrOsIE1g1zdZ5JbSytTD4piFRBOUAI=
|
||||||
|
github.com/uptrace/bun/driver/sqliteshim v1.2.16/go.mod h1:iKdJ06P3XS+pwKcONjSIK07bbhksH3lWsw3mpfr0+bY=
|
||||||
|
github.com/uptrace/bunrouter v1.0.23 h1:Bi7NKw3uCQkcA/GUCtDNPq5LE5UdR9pe+UyWbjHB/wU=
|
||||||
|
github.com/uptrace/bunrouter v1.0.23/go.mod h1:O3jAcl+5qgnF+ejhgkmbceEk0E/mqaK+ADOocdNpY8M=
|
||||||
github.com/vmihailenco/bufpool v0.1.11 h1:gOq2WmBrq0i2yW5QJ16ykccQ4wH9UyEsgLm6czKAd94=
|
github.com/vmihailenco/bufpool v0.1.11 h1:gOq2WmBrq0i2yW5QJ16ykccQ4wH9UyEsgLm6czKAd94=
|
||||||
github.com/vmihailenco/bufpool v0.1.11/go.mod h1:AFf/MOy3l2CFTKbxwt0mp2MwnqjNEs5H/UxrkA5jxTQ=
|
github.com/vmihailenco/bufpool v0.1.11/go.mod h1:AFf/MOy3l2CFTKbxwt0mp2MwnqjNEs5H/UxrkA5jxTQ=
|
||||||
github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU=
|
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||||
github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc=
|
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||||
github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vbd1qPqc=
|
github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vbd1qPqc=
|
||||||
github.com/vmihailenco/tagparser v0.1.2/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
|
github.com/vmihailenco/tagparser v0.1.2/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||||
@@ -81,28 +317,171 @@ github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
|
|||||||
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
||||||
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
|
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
|
||||||
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
|
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||||
|
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||||
|
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||||
|
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
|
||||||
|
go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
|
||||||
|
go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
|
||||||
|
go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
|
||||||
|
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||||
|
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||||
|
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||||
|
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||||
|
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||||
|
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||||
|
go.uber.org/zap v1.27.1 h1:08RqriUEv8+ArZRYSTXy1LeBScaMpVSTBhCeaZYfMYc=
|
||||||
|
go.uber.org/zap v1.27.1/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||||
|
go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0=
|
||||||
|
go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8=
|
||||||
|
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||||
|
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
|
||||||
|
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
||||||
|
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||||
|
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||||
|
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||||
|
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
|
||||||
|
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
|
||||||
|
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||||
|
golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM=
|
||||||
|
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
||||||
|
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
||||||
|
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
|
||||||
|
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||||
|
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||||
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
|
golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
|
||||||
|
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
|
||||||
|
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||||
|
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||||
|
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||||
|
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
||||||
|
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
|
||||||
|
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||||
|
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
|
||||||
|
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||||
|
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||||
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
||||||
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||||
|
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||||
|
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
|
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||||
golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
|
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
||||||
|
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||||
|
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
|
||||||
|
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
|
||||||
|
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||||
|
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
|
||||||
|
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||||
|
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
|
||||||
|
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
|
||||||
|
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||||
|
golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||||
|
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
|
||||||
|
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||||
|
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
|
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||||
|
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||||
|
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gorm.io/driver/postgres v1.5.4 h1:Iyrp9Meh3GmbSuyIAGyjkN+n9K+GHX9b9MqsTL4EJCo=
|
gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4=
|
||||||
gorm.io/driver/postgres v1.5.4/go.mod h1:Bgo89+h0CRcdA33Y6frlaHHVuTdOf87pmyzwW9C/BH0=
|
gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo=
|
||||||
gorm.io/gorm v1.25.5 h1:zR9lOiiYf09VNh5Q1gphfyia1JpiClIWG9hQaxB/mls=
|
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
|
||||||
gorm.io/gorm v1.25.5/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
|
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
|
||||||
|
gorm.io/driver/sqlserver v1.6.3 h1:UR+nWCuphPnq7UxnL57PSrlYjuvs+sf1N59GgFX7uAI=
|
||||||
|
gorm.io/driver/sqlserver v1.6.3/go.mod h1:VZeNn7hqX1aXoN5TPAFGWvxWG90xtA8erGn2gQmpc6U=
|
||||||
|
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
|
||||||
|
gorm.io/gorm v1.31.1 h1:7CA8FTFz/gRfgqgpeKIBcervUn3xSyPUmr6B2WXJ7kg=
|
||||||
|
gorm.io/gorm v1.31.1/go.mod h1:XyQVbO2k6YkOis7C2437jSit3SsDK72s7n7rsSHd+Gs=
|
||||||
mellium.im/sasl v0.3.1 h1:wE0LW6g7U83vhvxjC1IY8DnXM+EU095yeo8XClvCdfo=
|
mellium.im/sasl v0.3.1 h1:wE0LW6g7U83vhvxjC1IY8DnXM+EU095yeo8XClvCdfo=
|
||||||
mellium.im/sasl v0.3.1/go.mod h1:xm59PUYpZHhgQ9ZqoJ5QaCqzWMi8IeS49dhp6plPCzw=
|
mellium.im/sasl v0.3.1/go.mod h1:xm59PUYpZHhgQ9ZqoJ5QaCqzWMi8IeS49dhp6plPCzw=
|
||||||
|
modernc.org/libc v1.67.4 h1:zZGmCMUVPORtKv95c2ReQN5VDjvkoRm9GWPTEPuvlWg=
|
||||||
|
modernc.org/libc v1.67.4/go.mod h1:QvvnnJ5P7aitu0ReNpVIEyesuhmDLQ8kaEoyMjIFZJA=
|
||||||
|
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||||
|
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
|
||||||
|
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
|
||||||
|
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
|
||||||
|
modernc.org/sqlite v1.42.2 h1:7hkZUNJvJFN2PgfUdjni9Kbvd4ef4mNLOu0B9FGxM74=
|
||||||
|
modernc.org/sqlite v1.42.2/go.mod h1:+VkC6v3pLOAE0A0uVucQEcbVW0I5nHCeDaBf+DpsQT8=
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
@@ -36,36 +35,39 @@ Rules:
|
|||||||
- If unsure, prefer "observation".
|
- If unsure, prefer "observation".
|
||||||
- Do not include any text outside the JSON object.`
|
- Do not include any text outside the JSON object.`
|
||||||
|
|
||||||
|
// Client is a low-level OpenAI-compatible HTTP client. It knows nothing about
|
||||||
|
// role chains, fallbacks, or health — those concerns belong to ai.Runner. Each
|
||||||
|
// method takes the model name per-call so a single Client instance can service
|
||||||
|
// many different models on the same base URL.
|
||||||
type Client struct {
|
type Client struct {
|
||||||
name string
|
name string
|
||||||
baseURL string
|
baseURL string
|
||||||
apiKey string
|
apiKey string
|
||||||
embeddingModel string
|
headers map[string]string
|
||||||
metadataModel string
|
httpClient *http.Client
|
||||||
fallbackMetadataModels []string
|
log *slog.Logger
|
||||||
temperature float64
|
|
||||||
headers map[string]string
|
|
||||||
httpClient *http.Client
|
|
||||||
log *slog.Logger
|
|
||||||
dimensions int
|
|
||||||
logConversations bool
|
|
||||||
modelHealthMu sync.Mutex
|
|
||||||
modelHealth map[string]modelHealthState
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Name string
|
Name string
|
||||||
BaseURL string
|
BaseURL string
|
||||||
APIKey string
|
APIKey string
|
||||||
EmbeddingModel string
|
Headers map[string]string
|
||||||
MetadataModel string
|
HTTPClient *http.Client
|
||||||
FallbackMetadataModels []string
|
Log *slog.Logger
|
||||||
Temperature float64
|
}
|
||||||
Headers map[string]string
|
|
||||||
HTTPClient *http.Client
|
// MetadataOptions control a single ExtractMetadataWith call.
|
||||||
Log *slog.Logger
|
type MetadataOptions struct {
|
||||||
Dimensions int
|
Model string
|
||||||
LogConversations bool
|
Temperature float64
|
||||||
|
LogConversations bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// SummarizeOptions control a single SummarizeWith call.
|
||||||
|
type SummarizeOptions struct {
|
||||||
|
Model string
|
||||||
|
Temperature float64
|
||||||
}
|
}
|
||||||
|
|
||||||
type embeddingsRequest struct {
|
type embeddingsRequest struct {
|
||||||
@@ -127,65 +129,38 @@ type providerError struct {
|
|||||||
|
|
||||||
const maxMetadataAttempts = 3
|
const maxMetadataAttempts = 3
|
||||||
|
|
||||||
const (
|
// ErrEmptyResponse and ErrNoJSONObject are sentinel errors callers can inspect
|
||||||
emptyResponseCircuitThreshold = 3
|
// to classify metadata failures (e.g. bump empty-response health counters).
|
||||||
emptyResponseCircuitTTL = 5 * time.Minute
|
|
||||||
permanentModelFailureTTL = 24 * time.Hour
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
errMetadataEmptyResponse = errors.New("metadata empty response")
|
ErrEmptyResponse = errors.New("metadata empty response")
|
||||||
errMetadataNoJSONObject = errors.New("metadata response contains no JSON object")
|
ErrNoJSONObject = errors.New("metadata response contains no JSON object")
|
||||||
)
|
)
|
||||||
|
|
||||||
type modelHealthState struct {
|
|
||||||
consecutiveEmpty int
|
|
||||||
unhealthyUntil time.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
func New(cfg Config) *Client {
|
func New(cfg Config) *Client {
|
||||||
fallbacks := make([]string, 0, len(cfg.FallbackMetadataModels))
|
|
||||||
seen := make(map[string]struct{}, len(cfg.FallbackMetadataModels))
|
|
||||||
for _, model := range cfg.FallbackMetadataModels {
|
|
||||||
model = strings.TrimSpace(model)
|
|
||||||
if model == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := seen[model]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[model] = struct{}{}
|
|
||||||
fallbacks = append(fallbacks, model)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Client{
|
return &Client{
|
||||||
name: cfg.Name,
|
name: cfg.Name,
|
||||||
baseURL: cfg.BaseURL,
|
baseURL: cfg.BaseURL,
|
||||||
apiKey: cfg.APIKey,
|
apiKey: cfg.APIKey,
|
||||||
embeddingModel: cfg.EmbeddingModel,
|
headers: cfg.Headers,
|
||||||
metadataModel: cfg.MetadataModel,
|
httpClient: cfg.HTTPClient,
|
||||||
fallbackMetadataModels: fallbacks,
|
log: cfg.Log,
|
||||||
temperature: cfg.Temperature,
|
|
||||||
headers: cfg.Headers,
|
|
||||||
httpClient: cfg.HTTPClient,
|
|
||||||
log: cfg.Log,
|
|
||||||
dimensions: cfg.Dimensions,
|
|
||||||
logConversations: cfg.LogConversations,
|
|
||||||
modelHealth: make(map[string]modelHealthState),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Embed(ctx context.Context, input string) ([]float32, error) {
|
func (c *Client) Name() string { return c.name }
|
||||||
|
|
||||||
|
// EmbedWith generates an embedding for the given input using model.
|
||||||
|
func (c *Client) EmbedWith(ctx context.Context, model, input string) ([]float32, error) {
|
||||||
input = strings.TrimSpace(input)
|
input = strings.TrimSpace(input)
|
||||||
if input == "" {
|
if input == "" {
|
||||||
return nil, fmt.Errorf("%s embed: input must not be empty", c.name)
|
return nil, fmt.Errorf("%s embed: input must not be empty", c.name)
|
||||||
}
|
}
|
||||||
|
if strings.TrimSpace(model) == "" {
|
||||||
|
return nil, fmt.Errorf("%s embed: model is required", c.name)
|
||||||
|
}
|
||||||
|
|
||||||
var resp embeddingsResponse
|
var resp embeddingsResponse
|
||||||
err := c.doJSON(ctx, "/embeddings", embeddingsRequest{
|
err := c.doJSON(ctx, "/embeddings", embeddingsRequest{Input: input, Model: model}, &resp)
|
||||||
Input: input,
|
|
||||||
Model: c.embeddingModel,
|
|
||||||
}, &resp)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -195,141 +170,34 @@ func (c *Client) Embed(ctx context.Context, input string) ([]float32, error) {
|
|||||||
if len(resp.Data) == 0 {
|
if len(resp.Data) == 0 {
|
||||||
return nil, fmt.Errorf("%s embed: no embedding returned", c.name)
|
return nil, fmt.Errorf("%s embed: no embedding returned", c.name)
|
||||||
}
|
}
|
||||||
if c.dimensions > 0 && len(resp.Data[0].Embedding) != c.dimensions {
|
|
||||||
return nil, fmt.Errorf("%s embed: expected %d dimensions, got %d", c.name, c.dimensions, len(resp.Data[0].Embedding))
|
|
||||||
}
|
|
||||||
|
|
||||||
return resp.Data[0].Embedding, nil
|
return resp.Data[0].Embedding, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error) {
|
// ExtractMetadataWith extracts structured metadata for input using opts.Model.
|
||||||
|
// Returns compat.ErrEmptyResponse / ErrNoJSONObject wrapped when the model
|
||||||
|
// produces unusable output so callers can classify the failure.
|
||||||
|
func (c *Client) ExtractMetadataWith(ctx context.Context, opts MetadataOptions, input string) (thoughttypes.ThoughtMetadata, error) {
|
||||||
input = strings.TrimSpace(input)
|
input = strings.TrimSpace(input)
|
||||||
if input == "" {
|
if input == "" {
|
||||||
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: input must not be empty", c.name)
|
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: input must not be empty", c.name)
|
||||||
}
|
}
|
||||||
|
if strings.TrimSpace(opts.Model) == "" {
|
||||||
start := time.Now()
|
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: model is required", c.name)
|
||||||
if c.log != nil {
|
|
||||||
c.log.Info("metadata client started",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", c.metadataModel),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logCompletion := func(model string, err error) {
|
|
||||||
if c.log == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
attrs := []any{
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", model),
|
|
||||||
slog.String("duration", formatLogDuration(time.Since(start))),
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
attrs = append(attrs, slog.String("error", err.Error()))
|
|
||||||
c.log.Error("metadata client completed", attrs...)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.log.Info("metadata client completed", attrs...)
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := c.extractMetadataWithModel(ctx, input, c.metadataModel)
|
|
||||||
if errors.Is(err, errMetadataEmptyResponse) {
|
|
||||||
c.noteEmptyResponse(c.metadataModel)
|
|
||||||
}
|
|
||||||
if isPermanentModelError(err) {
|
|
||||||
c.notePermanentModelFailure(c.metadataModel, err)
|
|
||||||
}
|
|
||||||
if err == nil {
|
|
||||||
c.noteModelSuccess(c.metadataModel)
|
|
||||||
logCompletion(c.metadataModel, nil)
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, fallbackModel := range c.fallbackMetadataModels {
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if fallbackModel == "" || fallbackModel == c.metadataModel {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if c.shouldBypassModel(fallbackModel) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata extraction failed, trying fallback model",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("primary_model", c.metadataModel),
|
|
||||||
slog.String("fallback_model", fallbackModel),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
fallbackResult, fallbackErr := c.extractMetadataWithModel(ctx, input, fallbackModel)
|
|
||||||
if errors.Is(fallbackErr, errMetadataEmptyResponse) {
|
|
||||||
c.noteEmptyResponse(fallbackModel)
|
|
||||||
}
|
|
||||||
if isPermanentModelError(fallbackErr) {
|
|
||||||
c.notePermanentModelFailure(fallbackModel, fallbackErr)
|
|
||||||
}
|
|
||||||
if fallbackErr == nil {
|
|
||||||
c.noteModelSuccess(fallbackModel)
|
|
||||||
logCompletion(fallbackModel, nil)
|
|
||||||
return fallbackResult, nil
|
|
||||||
}
|
|
||||||
err = fallbackErr
|
|
||||||
}
|
|
||||||
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
err = fmt.Errorf("%s metadata: %w", c.name, ctx.Err())
|
|
||||||
logCompletion(c.metadataModel, err)
|
|
||||||
return thoughttypes.ThoughtMetadata{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
heuristic := heuristicMetadataFromInput(input)
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata extraction failed for all models, using heuristic fallback",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
logCompletion(c.metadataModel, nil)
|
|
||||||
return heuristic, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func formatLogDuration(d time.Duration) string {
|
|
||||||
if d < 0 {
|
|
||||||
d = -d
|
|
||||||
}
|
|
||||||
|
|
||||||
totalMilliseconds := d.Milliseconds()
|
|
||||||
minutes := totalMilliseconds / 60000
|
|
||||||
seconds := (totalMilliseconds / 1000) % 60
|
|
||||||
milliseconds := totalMilliseconds % 1000
|
|
||||||
return fmt.Sprintf("%02d:%02d:%03d", minutes, seconds, milliseconds)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) extractMetadataWithModel(ctx context.Context, input, model string) (thoughttypes.ThoughtMetadata, error) {
|
|
||||||
if c.shouldBypassModel(model) {
|
|
||||||
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: model %q temporarily bypassed after repeated empty responses", c.name, model)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
stream := true
|
stream := true
|
||||||
req := chatCompletionsRequest{
|
req := chatCompletionsRequest{
|
||||||
Model: model,
|
Model: opts.Model,
|
||||||
Temperature: c.temperature,
|
Temperature: opts.Temperature,
|
||||||
ResponseFormat: &responseType{
|
ResponseFormat: &responseType{Type: "json_object"},
|
||||||
Type: "json_object",
|
Stream: &stream,
|
||||||
},
|
|
||||||
Stream: &stream,
|
|
||||||
Messages: []chatMessage{
|
Messages: []chatMessage{
|
||||||
{Role: "system", Content: metadataSystemPrompt},
|
{Role: "system", Content: metadataSystemPrompt},
|
||||||
{Role: "user", Content: input},
|
{Role: "user", Content: input},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
metadata, err := c.extractMetadataWithRequest(ctx, req, input, model)
|
metadata, err := c.extractMetadataWithRequest(ctx, req, input, opts)
|
||||||
if err == nil || !shouldRetryWithoutJSONMode(err) {
|
if err == nil || !shouldRetryWithoutJSONMode(err) {
|
||||||
return metadata, err
|
return metadata, err
|
||||||
}
|
}
|
||||||
@@ -337,23 +205,22 @@ func (c *Client) extractMetadataWithModel(ctx context.Context, input, model stri
|
|||||||
if c.log != nil {
|
if c.log != nil {
|
||||||
c.log.Warn("metadata json mode failed, retrying without response_format",
|
c.log.Warn("metadata json mode failed, retrying without response_format",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.String("error", err.Error()),
|
slog.String("error", err.Error()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
req.ResponseFormat = nil
|
req.ResponseFormat = nil
|
||||||
return c.extractMetadataWithRequest(ctx, req, input, model)
|
return c.extractMetadataWithRequest(ctx, req, input, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatCompletionsRequest, input, model string) (thoughttypes.ThoughtMetadata, error) {
|
func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatCompletionsRequest, input string, opts MetadataOptions) (thoughttypes.ThoughtMetadata, error) {
|
||||||
|
|
||||||
var lastErr error
|
var lastErr error
|
||||||
for attempt := 1; attempt <= maxMetadataAttempts; attempt++ {
|
for attempt := 1; attempt <= maxMetadataAttempts; attempt++ {
|
||||||
if c.logConversations && c.log != nil {
|
if opts.LogConversations && c.log != nil {
|
||||||
c.log.Info("metadata conversation request",
|
c.log.Info("metadata conversation request",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.Int("attempt", attempt),
|
slog.Int("attempt", attempt),
|
||||||
slog.String("system", metadataSystemPrompt),
|
slog.String("system", metadataSystemPrompt),
|
||||||
slog.String("input", input),
|
slog.String("input", input),
|
||||||
@@ -373,10 +240,10 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
|
|
||||||
rawResponse := extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text)
|
rawResponse := extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text)
|
||||||
|
|
||||||
if c.logConversations && c.log != nil {
|
if opts.LogConversations && c.log != nil {
|
||||||
c.log.Info("metadata conversation response",
|
c.log.Info("metadata conversation response",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.Int("attempt", attempt),
|
slog.Int("attempt", attempt),
|
||||||
slog.String("response", rawResponse),
|
slog.String("response", rawResponse),
|
||||||
)
|
)
|
||||||
@@ -387,13 +254,13 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
metadataText = stripCodeFence(metadataText)
|
metadataText = stripCodeFence(metadataText)
|
||||||
metadataText = extractJSONObject(metadataText)
|
metadataText = extractJSONObject(metadataText)
|
||||||
if metadataText == "" {
|
if metadataText == "" {
|
||||||
lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataNoJSONObject)
|
lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrNoJSONObject)
|
||||||
if strings.TrimSpace(rawResponse) == "" && attempt < maxMetadataAttempts && ctx.Err() == nil {
|
if strings.TrimSpace(rawResponse) == "" && attempt < maxMetadataAttempts && ctx.Err() == nil {
|
||||||
lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataEmptyResponse)
|
lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrEmptyResponse)
|
||||||
if c.log != nil {
|
if c.log != nil {
|
||||||
c.log.Warn("metadata response empty, waiting and retrying",
|
c.log.Warn("metadata response empty, waiting and retrying",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.Int("attempt", attempt+1),
|
slog.Int("attempt", attempt+1),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -403,7 +270,7 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(rawResponse) == "" {
|
if strings.TrimSpace(rawResponse) == "" {
|
||||||
lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataEmptyResponse)
|
lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrEmptyResponse)
|
||||||
}
|
}
|
||||||
return thoughttypes.ThoughtMetadata{}, lastErr
|
return thoughttypes.ThoughtMetadata{}, lastErr
|
||||||
}
|
}
|
||||||
@@ -420,13 +287,17 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
if lastErr != nil {
|
if lastErr != nil {
|
||||||
return thoughttypes.ThoughtMetadata{}, lastErr
|
return thoughttypes.ThoughtMetadata{}, lastErr
|
||||||
}
|
}
|
||||||
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: %w", c.name, errMetadataNoJSONObject)
|
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: %w", c.name, ErrNoJSONObject)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error) {
|
// SummarizeWith runs a chat-completion summarisation using opts.Model.
|
||||||
|
func (c *Client) SummarizeWith(ctx context.Context, opts SummarizeOptions, systemPrompt, userPrompt string) (string, error) {
|
||||||
|
if strings.TrimSpace(opts.Model) == "" {
|
||||||
|
return "", fmt.Errorf("%s summarize: model is required", c.name)
|
||||||
|
}
|
||||||
req := chatCompletionsRequest{
|
req := chatCompletionsRequest{
|
||||||
Model: c.metadataModel,
|
Model: opts.Model,
|
||||||
Temperature: 0.2,
|
Temperature: opts.Temperature,
|
||||||
Messages: []chatMessage{
|
Messages: []chatMessage{
|
||||||
{Role: "system", Content: systemPrompt},
|
{Role: "system", Content: systemPrompt},
|
||||||
{Role: "user", Content: userPrompt},
|
{Role: "user", Content: userPrompt},
|
||||||
@@ -447,12 +318,49 @@ func (c *Client) Summarize(ctx context.Context, systemPrompt, userPrompt string)
|
|||||||
return extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text), nil
|
return extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Name() string {
|
// IsPermanentModelError reports whether err indicates the model itself is
|
||||||
return c.name
|
// invalid or missing (vs. a transient outage). Runners use this to mark a
|
||||||
|
// target unhealthy for longer.
|
||||||
|
func IsPermanentModelError(err error) bool {
|
||||||
|
if err == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
lower := strings.ToLower(err.Error())
|
||||||
|
for _, marker := range []string{
|
||||||
|
"invalid model name",
|
||||||
|
"model_not_found",
|
||||||
|
"model not found",
|
||||||
|
"unknown model",
|
||||||
|
"no such model",
|
||||||
|
"does not exist",
|
||||||
|
} {
|
||||||
|
if strings.Contains(lower, marker) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) EmbeddingModel() string {
|
// HeuristicMetadataFromInput produces best-effort metadata from the note text
|
||||||
return c.embeddingModel
|
// when every model in the chain has failed. Exported so ai.Runner can use it.
|
||||||
|
func HeuristicMetadataFromInput(input string) thoughttypes.ThoughtMetadata {
|
||||||
|
text := strings.TrimSpace(input)
|
||||||
|
lower := strings.ToLower(text)
|
||||||
|
|
||||||
|
metadata := thoughttypes.ThoughtMetadata{
|
||||||
|
People: heuristicPeople(text),
|
||||||
|
ActionItems: heuristicActionItems(text),
|
||||||
|
DatesMentioned: heuristicDates(text),
|
||||||
|
Topics: heuristicTopics(lower),
|
||||||
|
Type: heuristicType(lower),
|
||||||
|
}
|
||||||
|
if len(metadata.Topics) == 0 {
|
||||||
|
metadata.Topics = []string{"uncategorized"}
|
||||||
|
}
|
||||||
|
if metadata.Type == "" {
|
||||||
|
metadata.Type = "observation"
|
||||||
|
}
|
||||||
|
return metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) doJSON(ctx context.Context, path string, requestBody any, dest any) error {
|
func (c *Client) doJSON(ctx context.Context, path string, requestBody any, dest any) error {
|
||||||
@@ -724,8 +632,6 @@ func isRetryableChatResponseError(err error) bool {
|
|||||||
return strings.Contains(lower, "read response") || strings.Contains(lower, "read stream response")
|
return strings.Contains(lower, "read response") || strings.Contains(lower, "read stream response")
|
||||||
}
|
}
|
||||||
|
|
||||||
// extractJSONObject finds the first complete {...} block in s.
|
|
||||||
// It handles models that prepend prose to a JSON response despite json_object mode.
|
|
||||||
func extractJSONObject(s string) string {
|
func extractJSONObject(s string) string {
|
||||||
for start := 0; start < len(s); start++ {
|
for start := 0; start < len(s); start++ {
|
||||||
if s[start] != '{' {
|
if s[start] != '{' {
|
||||||
@@ -768,10 +674,6 @@ func extractJSONObject(s string) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// stripThinkingBlocks removes <think>...</think> and <thinking>...</thinking>
|
|
||||||
// blocks produced by reasoning models (DeepSeek R1, QwQ, etc.) so that the
|
|
||||||
// remaining text can be parsed as JSON without interference from thinking content
|
|
||||||
// that may itself contain braces.
|
|
||||||
func stripThinkingBlocks(s string) string {
|
func stripThinkingBlocks(s string) string {
|
||||||
for _, tag := range []string{"think", "thinking"} {
|
for _, tag := range []string{"think", "thinking"} {
|
||||||
open := "<" + tag + ">"
|
open := "<" + tag + ">"
|
||||||
@@ -857,7 +759,6 @@ func extractTextFromAny(value any) string {
|
|||||||
}
|
}
|
||||||
return strings.Join(parts, "\n")
|
return strings.Join(parts, "\n")
|
||||||
case map[string]any:
|
case map[string]any:
|
||||||
// Common provider shapes for chat content parts.
|
|
||||||
for _, key := range []string{"text", "output_text", "content", "value"} {
|
for _, key := range []string{"text", "output_text", "content", "value"} {
|
||||||
if nested, ok := typed[key]; ok {
|
if nested, ok := typed[key]; ok {
|
||||||
if text := strings.TrimSpace(extractTextFromAny(nested)); text != "" {
|
if text := strings.TrimSpace(extractTextFromAny(nested)); text != "" {
|
||||||
@@ -875,28 +776,6 @@ var (
|
|||||||
wordPattern = regexp.MustCompile(`[a-zA-Z][a-zA-Z0-9_/-]{2,}`)
|
wordPattern = regexp.MustCompile(`[a-zA-Z][a-zA-Z0-9_/-]{2,}`)
|
||||||
)
|
)
|
||||||
|
|
||||||
func heuristicMetadataFromInput(input string) thoughttypes.ThoughtMetadata {
|
|
||||||
text := strings.TrimSpace(input)
|
|
||||||
lower := strings.ToLower(text)
|
|
||||||
|
|
||||||
metadata := thoughttypes.ThoughtMetadata{
|
|
||||||
People: heuristicPeople(text),
|
|
||||||
ActionItems: heuristicActionItems(text),
|
|
||||||
DatesMentioned: heuristicDates(text),
|
|
||||||
Topics: heuristicTopics(lower),
|
|
||||||
Type: heuristicType(lower),
|
|
||||||
Source: "",
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(metadata.Topics) == 0 {
|
|
||||||
metadata.Topics = []string{"uncategorized"}
|
|
||||||
}
|
|
||||||
if metadata.Type == "" {
|
|
||||||
metadata.Type = "observation"
|
|
||||||
}
|
|
||||||
return metadata
|
|
||||||
}
|
|
||||||
|
|
||||||
func heuristicType(lower string) string {
|
func heuristicType(lower string) string {
|
||||||
switch {
|
switch {
|
||||||
case strings.Contains(lower, "preferred name"), strings.Contains(lower, "personal profile"), strings.Contains(lower, "wife:"), strings.Contains(lower, "daughter:"), strings.Contains(lower, "born:"):
|
case strings.Contains(lower, "preferred name"), strings.Contains(lower, "personal profile"), strings.Contains(lower, "wife:"), strings.Contains(lower, "daughter:"), strings.Contains(lower, "born:"):
|
||||||
@@ -1055,7 +934,7 @@ func shouldRetryWithoutJSONMode(err error) bool {
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if errors.Is(err, errMetadataEmptyResponse) || errors.Is(err, errMetadataNoJSONObject) {
|
if errors.Is(err, ErrEmptyResponse) || errors.Is(err, ErrNoJSONObject) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1063,27 +942,6 @@ func shouldRetryWithoutJSONMode(err error) bool {
|
|||||||
return strings.Contains(lower, "parse json")
|
return strings.Contains(lower, "parse json")
|
||||||
}
|
}
|
||||||
|
|
||||||
func isPermanentModelError(err error) bool {
|
|
||||||
if err == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
lower := strings.ToLower(err.Error())
|
|
||||||
for _, marker := range []string{
|
|
||||||
"invalid model name",
|
|
||||||
"model_not_found",
|
|
||||||
"model not found",
|
|
||||||
"unknown model",
|
|
||||||
"no such model",
|
|
||||||
"does not exist",
|
|
||||||
} {
|
|
||||||
if strings.Contains(lower, marker) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func sleepRetry(ctx context.Context, attempt int, log *slog.Logger, provider string) error {
|
func sleepRetry(ctx context.Context, attempt int, log *slog.Logger, provider string) error {
|
||||||
delay := time.Duration(attempt*attempt) * 200 * time.Millisecond
|
delay := time.Duration(attempt*attempt) * 200 * time.Millisecond
|
||||||
if log != nil {
|
if log != nil {
|
||||||
@@ -1110,59 +968,3 @@ func sleepMetadataRetry(ctx context.Context, attempt int) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) shouldBypassModel(model string) bool {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
state, ok := c.modelHealth[model]
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return !state.unhealthyUntil.IsZero() && time.Now().Before(state.unhealthyUntil)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) noteEmptyResponse(model string) {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
state := c.modelHealth[model]
|
|
||||||
state.consecutiveEmpty++
|
|
||||||
if state.consecutiveEmpty >= emptyResponseCircuitThreshold {
|
|
||||||
state.unhealthyUntil = time.Now().Add(emptyResponseCircuitTTL)
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata model marked temporarily unhealthy after repeated empty responses",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", model),
|
|
||||||
slog.Time("until", state.unhealthyUntil),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
c.modelHealth[model] = state
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) noteModelSuccess(model string) {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
delete(c.modelHealth, model)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) notePermanentModelFailure(model string, err error) {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
state := c.modelHealth[model]
|
|
||||||
state.consecutiveEmpty = emptyResponseCircuitThreshold
|
|
||||||
state.unhealthyUntil = time.Now().Add(permanentModelFailureTTL)
|
|
||||||
c.modelHealth[model] = state
|
|
||||||
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata model marked unhealthy after permanent failure",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", model),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
slog.Time("until", state.unhealthyUntil),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -11,6 +11,17 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func newTestClient(t *testing.T, url string) *Client {
|
||||||
|
t.Helper()
|
||||||
|
return New(Config{
|
||||||
|
Name: "litellm",
|
||||||
|
BaseURL: url,
|
||||||
|
APIKey: "test-key",
|
||||||
|
HTTPClient: http.DefaultClient,
|
||||||
|
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
@@ -26,6 +37,9 @@ func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
|||||||
if req.Stream == nil || !*req.Stream {
|
if req.Stream == nil || !*req.Stream {
|
||||||
t.Fatalf("stream flag = %v, want true", req.Stream)
|
t.Fatalf("stream flag = %v, want true", req.Stream)
|
||||||
}
|
}
|
||||||
|
if req.Model != "qwen3.5:latest" {
|
||||||
|
t.Fatalf("model = %q, want qwen3.5:latest", req.Model)
|
||||||
|
}
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "text/event-stream")
|
w.Header().Set("Content-Type", "text/event-stream")
|
||||||
_, _ = io.WriteString(w, "data: {\"choices\":[{\"delta\":{\"content\":\"{\\\"people\\\":[],\"}}]}\n\n")
|
_, _ = io.WriteString(w, "data: {\"choices\":[{\"delta\":{\"content\":\"{\\\"people\\\":[],\"}}]}\n\n")
|
||||||
@@ -35,20 +49,13 @@ func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
|||||||
}))
|
}))
|
||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
client := New(Config{
|
client := newTestClient(t, server.URL)
|
||||||
Name: "litellm",
|
metadata, err := client.ExtractMetadataWith(context.Background(), MetadataOptions{
|
||||||
BaseURL: server.URL,
|
Model: "qwen3.5:latest",
|
||||||
APIKey: "test-key",
|
Temperature: 0.1,
|
||||||
MetadataModel: "qwen3.5:latest",
|
}, "Project idea: Build an Android companion app.")
|
||||||
Temperature: 0.1,
|
|
||||||
HTTPClient: server.Client(),
|
|
||||||
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
|
||||||
EmbeddingModel: "unused",
|
|
||||||
})
|
|
||||||
|
|
||||||
metadata, err := client.ExtractMetadata(context.Background(), "Project idea: Build an Android companion app.")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("ExtractMetadata() error = %v", err)
|
t.Fatalf("ExtractMetadataWith() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if metadata.Type != "idea" {
|
if metadata.Type != "idea" {
|
||||||
@@ -94,20 +101,13 @@ func TestExtractMetadataRetriesWithoutJSONMode(t *testing.T) {
|
|||||||
}))
|
}))
|
||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
client := New(Config{
|
client := newTestClient(t, server.URL)
|
||||||
Name: "litellm",
|
metadata, err := client.ExtractMetadataWith(context.Background(), MetadataOptions{
|
||||||
BaseURL: server.URL,
|
Model: "qwen3.5:latest",
|
||||||
APIKey: "test-key",
|
Temperature: 0.1,
|
||||||
MetadataModel: "qwen3.5:latest",
|
}, "Project idea: Build an Android companion app.")
|
||||||
Temperature: 0.1,
|
|
||||||
HTTPClient: server.Client(),
|
|
||||||
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
|
||||||
EmbeddingModel: "unused",
|
|
||||||
})
|
|
||||||
|
|
||||||
metadata, err := client.ExtractMetadata(context.Background(), "Project idea: Build an Android companion app.")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("ExtractMetadata() error = %v", err)
|
t.Fatalf("ExtractMetadataWith() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if metadata.Type != "idea" {
|
if metadata.Type != "idea" {
|
||||||
@@ -127,71 +127,33 @@ func TestExtractMetadataRetriesWithoutJSONMode(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExtractMetadataBypassesInvalidFallbackModelAfterFirstFailure(t *testing.T) {
|
func TestIsPermanentModelError(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
var mu sync.Mutex
|
cases := []struct {
|
||||||
primaryCalls := 0
|
name string
|
||||||
invalidFallbackCalls := 0
|
err error
|
||||||
|
want bool
|
||||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
}{
|
||||||
defer func() {
|
{"nil", nil, false},
|
||||||
_ = r.Body.Close()
|
{"invalid model", errMsg("Invalid model name passed in model=qwen3"), true},
|
||||||
}()
|
{"model not found", errMsg("model_not_found"), true},
|
||||||
|
{"no such model", errMsg("no such model"), true},
|
||||||
var req chatCompletionsRequest
|
{"transient", errMsg("connection refused"), false},
|
||||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
||||||
t.Fatalf("decode request: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch req.Model {
|
|
||||||
case "empty-primary":
|
|
||||||
_, _ = io.WriteString(w, `{"choices":[{"message":{"role":"assistant","content":""}}]}`)
|
|
||||||
case "qwen3.5:latest":
|
|
||||||
mu.Lock()
|
|
||||||
primaryCalls++
|
|
||||||
mu.Unlock()
|
|
||||||
_, _ = io.WriteString(w, `{"choices":[{"message":{"role":"assistant","content":"{\"people\":[],\"action_items\":[],\"dates_mentioned\":[],\"topics\":[\"metadata\"],\"type\":\"observation\",\"source\":\"primary\"}"}}]}`)
|
|
||||||
case "qwen3":
|
|
||||||
mu.Lock()
|
|
||||||
invalidFallbackCalls++
|
|
||||||
mu.Unlock()
|
|
||||||
w.WriteHeader(http.StatusBadRequest)
|
|
||||||
_, _ = io.WriteString(w, "{\"error\":{\"message\":\"{'error': '/chat/completions: Invalid model name passed in model=qwen3. Call `/v1/models` to view available models for your key.'}\"}}")
|
|
||||||
default:
|
|
||||||
t.Fatalf("unexpected model %q", req.Model)
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
defer server.Close()
|
|
||||||
|
|
||||||
client := New(Config{
|
|
||||||
Name: "litellm",
|
|
||||||
BaseURL: server.URL,
|
|
||||||
APIKey: "test-key",
|
|
||||||
MetadataModel: "empty-primary",
|
|
||||||
FallbackMetadataModels: []string{"qwen3", "qwen3.5:latest"},
|
|
||||||
Temperature: 0.1,
|
|
||||||
HTTPClient: server.Client(),
|
|
||||||
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
|
||||||
EmbeddingModel: "unused",
|
|
||||||
})
|
|
||||||
|
|
||||||
for i := 0; i < 2; i++ {
|
|
||||||
metadata, err := client.ExtractMetadata(context.Background(), "A short note about metadata.")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("ExtractMetadata() error = %v", err)
|
|
||||||
}
|
|
||||||
if metadata.Source != "primary" {
|
|
||||||
t.Fatalf("metadata source = %q, want primary", metadata.Source)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mu.Lock()
|
for _, tc := range cases {
|
||||||
defer mu.Unlock()
|
tc := tc
|
||||||
if invalidFallbackCalls != 1 {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
t.Fatalf("invalid fallback calls = %d, want 1", invalidFallbackCalls)
|
if got := IsPermanentModelError(tc.err); got != tc.want {
|
||||||
}
|
t.Fatalf("IsPermanentModelError(%v) = %v, want %v", tc.err, got, tc.want)
|
||||||
if primaryCalls != 2 {
|
}
|
||||||
t.Fatalf("valid fallback calls = %d, want 2", primaryCalls)
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type stringError string
|
||||||
|
|
||||||
|
func (s stringError) Error() string { return string(s) }
|
||||||
|
|
||||||
|
func errMsg(s string) error { return stringError(s) }
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
package ai
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/litellm"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/ollama"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/openrouter"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewProvider(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (Provider, error) {
|
|
||||||
switch cfg.Provider {
|
|
||||||
case "litellm":
|
|
||||||
return litellm.New(cfg, httpClient, log)
|
|
||||||
case "ollama":
|
|
||||||
return ollama.New(cfg, httpClient, log)
|
|
||||||
case "openrouter":
|
|
||||||
return openrouter.New(cfg, httpClient, log)
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("unsupported ai.provider: %s", cfg.Provider)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
package ai
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestNewProviderSupportsOllama(t *testing.T) {
|
|
||||||
provider, err := NewProvider(config.AIConfig{
|
|
||||||
Provider: "ollama",
|
|
||||||
Embeddings: config.AIEmbeddingConfig{
|
|
||||||
Model: "nomic-embed-text",
|
|
||||||
Dimensions: 768,
|
|
||||||
},
|
|
||||||
Metadata: config.AIMetadataConfig{
|
|
||||||
Model: "llama3.2",
|
|
||||||
},
|
|
||||||
Ollama: config.OllamaConfig{
|
|
||||||
BaseURL: "http://localhost:11434/v1",
|
|
||||||
APIKey: "ollama",
|
|
||||||
},
|
|
||||||
}, &http.Client{}, slog.New(slog.NewTextHandler(io.Discard, nil)))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("NewProvider() error = %v", err)
|
|
||||||
}
|
|
||||||
if provider.Name() != "ollama" {
|
|
||||||
t.Fatalf("provider name = %q, want ollama", provider.Name())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
package litellm
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) {
|
|
||||||
fallbacks := cfg.LiteLLM.EffectiveFallbackMetadataModels()
|
|
||||||
if len(fallbacks) == 0 {
|
|
||||||
fallbacks = cfg.Metadata.EffectiveFallbackModels()
|
|
||||||
}
|
|
||||||
return compat.New(compat.Config{
|
|
||||||
Name: "litellm",
|
|
||||||
BaseURL: cfg.LiteLLM.BaseURL,
|
|
||||||
APIKey: cfg.LiteLLM.APIKey,
|
|
||||||
EmbeddingModel: cfg.LiteLLM.EmbeddingModel,
|
|
||||||
MetadataModel: cfg.LiteLLM.MetadataModel,
|
|
||||||
FallbackMetadataModels: fallbacks,
|
|
||||||
Temperature: cfg.Metadata.Temperature,
|
|
||||||
Headers: cfg.LiteLLM.RequestHeaders,
|
|
||||||
HTTPClient: httpClient,
|
|
||||||
Log: log,
|
|
||||||
Dimensions: cfg.Embeddings.Dimensions,
|
|
||||||
LogConversations: cfg.Metadata.LogConversations,
|
|
||||||
}), nil
|
|
||||||
}
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
package ollama
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) {
|
|
||||||
return compat.New(compat.Config{
|
|
||||||
Name: "ollama",
|
|
||||||
BaseURL: cfg.Ollama.BaseURL,
|
|
||||||
APIKey: cfg.Ollama.APIKey,
|
|
||||||
EmbeddingModel: cfg.Embeddings.Model,
|
|
||||||
MetadataModel: cfg.Metadata.Model,
|
|
||||||
FallbackMetadataModels: cfg.Metadata.EffectiveFallbackModels(),
|
|
||||||
Temperature: cfg.Metadata.Temperature,
|
|
||||||
Headers: cfg.Ollama.RequestHeaders,
|
|
||||||
HTTPClient: httpClient,
|
|
||||||
Log: log,
|
|
||||||
Dimensions: cfg.Embeddings.Dimensions,
|
|
||||||
LogConversations: cfg.Metadata.LogConversations,
|
|
||||||
}), nil
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
package openrouter
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) {
|
|
||||||
headers := make(map[string]string, len(cfg.OpenRouter.ExtraHeaders)+2)
|
|
||||||
for key, value := range cfg.OpenRouter.ExtraHeaders {
|
|
||||||
headers[key] = value
|
|
||||||
}
|
|
||||||
if cfg.OpenRouter.SiteURL != "" {
|
|
||||||
headers["HTTP-Referer"] = cfg.OpenRouter.SiteURL
|
|
||||||
}
|
|
||||||
if cfg.OpenRouter.AppName != "" {
|
|
||||||
headers["X-Title"] = cfg.OpenRouter.AppName
|
|
||||||
}
|
|
||||||
|
|
||||||
return compat.New(compat.Config{
|
|
||||||
Name: "openrouter",
|
|
||||||
BaseURL: cfg.OpenRouter.BaseURL,
|
|
||||||
APIKey: cfg.OpenRouter.APIKey,
|
|
||||||
EmbeddingModel: cfg.Embeddings.Model,
|
|
||||||
MetadataModel: cfg.Metadata.Model,
|
|
||||||
FallbackMetadataModels: cfg.Metadata.EffectiveFallbackModels(),
|
|
||||||
Temperature: cfg.Metadata.Temperature,
|
|
||||||
Headers: headers,
|
|
||||||
HTTPClient: httpClient,
|
|
||||||
Log: log,
|
|
||||||
Dimensions: cfg.Embeddings.Dimensions,
|
|
||||||
LogConversations: cfg.Metadata.LogConversations,
|
|
||||||
}), nil
|
|
||||||
}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
package ai
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Provider interface {
|
|
||||||
Embed(ctx context.Context, input string) ([]float32, error)
|
|
||||||
ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error)
|
|
||||||
Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error)
|
|
||||||
Name() string
|
|
||||||
EmbeddingModel() string
|
|
||||||
}
|
|
||||||
96
internal/ai/registry.go
Normal file
96
internal/ai/registry.go
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Registry holds one compat.Client per named provider. Runners look up clients
|
||||||
|
// by provider name when walking a role chain.
|
||||||
|
type Registry struct {
|
||||||
|
clients map[string]*compat.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRegistry builds a Registry from the configured providers. Each provider
|
||||||
|
// type maps onto a compat.Client with type-specific header plumbing (e.g.
|
||||||
|
// openrouter's HTTP-Referer / X-Title).
|
||||||
|
func NewRegistry(providers map[string]config.ProviderConfig, httpClient *http.Client, log *slog.Logger) (*Registry, error) {
|
||||||
|
if httpClient == nil {
|
||||||
|
return nil, fmt.Errorf("ai registry: http client is required")
|
||||||
|
}
|
||||||
|
if len(providers) == 0 {
|
||||||
|
return nil, fmt.Errorf("ai registry: no providers configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
clients := make(map[string]*compat.Client, len(providers))
|
||||||
|
for name, p := range providers {
|
||||||
|
headers, err := providerHeaders(p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("ai registry: provider %q: %w", name, err)
|
||||||
|
}
|
||||||
|
clients[name] = compat.New(compat.Config{
|
||||||
|
Name: name,
|
||||||
|
BaseURL: p.BaseURL,
|
||||||
|
APIKey: p.APIKey,
|
||||||
|
Headers: headers,
|
||||||
|
HTTPClient: httpClient,
|
||||||
|
Log: log,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return &Registry{clients: clients}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client returns the compat.Client registered under name.
|
||||||
|
func (r *Registry) Client(name string) (*compat.Client, error) {
|
||||||
|
c, ok := r.clients[name]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("ai registry: provider %q is not configured", name)
|
||||||
|
}
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Names returns the registered provider names.
|
||||||
|
func (r *Registry) Names() []string {
|
||||||
|
names := make([]string, 0, len(r.clients))
|
||||||
|
for name := range r.clients {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
|
func providerHeaders(p config.ProviderConfig) (map[string]string, error) {
|
||||||
|
switch p.Type {
|
||||||
|
case "litellm", "ollama":
|
||||||
|
return cloneHeaders(p.RequestHeaders), nil
|
||||||
|
case "openrouter":
|
||||||
|
headers := cloneHeaders(p.RequestHeaders)
|
||||||
|
if headers == nil {
|
||||||
|
headers = map[string]string{}
|
||||||
|
}
|
||||||
|
if s := strings.TrimSpace(p.SiteURL); s != "" {
|
||||||
|
headers["HTTP-Referer"] = s
|
||||||
|
}
|
||||||
|
if s := strings.TrimSpace(p.AppName); s != "" {
|
||||||
|
headers["X-Title"] = s
|
||||||
|
}
|
||||||
|
return headers, nil
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported provider type %q", p.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneHeaders(in map[string]string) map[string]string {
|
||||||
|
if len(in) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out := make(map[string]string, len(in))
|
||||||
|
for k, v := range in {
|
||||||
|
out[k] = v
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
80
internal/ai/registry_test.go
Normal file
80
internal/ai/registry_test.go
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewRegistryOpenRouterHeaders(t *testing.T) {
|
||||||
|
var (
|
||||||
|
gotReferer string
|
||||||
|
gotTitle string
|
||||||
|
gotCustom string
|
||||||
|
)
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
gotReferer = r.Header.Get("HTTP-Referer")
|
||||||
|
gotTitle = r.Header.Get("X-Title")
|
||||||
|
gotCustom = r.Header.Get("X-Custom")
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||||
|
"choices": []map[string]any{{"message": map[string]any{"role": "assistant", "content": "ok"}}},
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
providers := map[string]config.ProviderConfig{
|
||||||
|
"router": {
|
||||||
|
Type: "openrouter",
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
APIKey: "secret",
|
||||||
|
RequestHeaders: map[string]string{
|
||||||
|
"X-Custom": "value",
|
||||||
|
},
|
||||||
|
AppName: "amcs",
|
||||||
|
SiteURL: "https://example.com",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reg, err := NewRegistry(providers, srv.Client(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRegistry() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := reg.Client("router")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Client(router) error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := client.SummarizeWith(context.Background(), compat.SummarizeOptions{Model: "gpt-4.1-mini"}, "system", "user"); err != nil {
|
||||||
|
t.Fatalf("SummarizeWith() error = %v", err)
|
||||||
|
}
|
||||||
|
if gotReferer != "https://example.com" {
|
||||||
|
t.Fatalf("HTTP-Referer = %q, want https://example.com", gotReferer)
|
||||||
|
}
|
||||||
|
if gotTitle != "amcs" {
|
||||||
|
t.Fatalf("X-Title = %q, want amcs", gotTitle)
|
||||||
|
}
|
||||||
|
if gotCustom != "value" {
|
||||||
|
t.Fatalf("X-Custom = %q, want value", gotCustom)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewRegistryRejectsUnsupportedProviderType(t *testing.T) {
|
||||||
|
providers := map[string]config.ProviderConfig{
|
||||||
|
"bad": {
|
||||||
|
Type: "unknown",
|
||||||
|
BaseURL: "http://localhost:4000/v1",
|
||||||
|
APIKey: "secret",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := NewRegistry(providers, &http.Client{}, nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("NewRegistry() error = nil, want unsupported provider type error")
|
||||||
|
}
|
||||||
|
}
|
||||||
367
internal/ai/runner.go
Normal file
367
internal/ai/runner.go
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Health TTLs per failure class. These are short enough that a healed target
|
||||||
|
// gets retried without manual intervention, but long enough to avoid hammering
|
||||||
|
// a broken provider every call.
|
||||||
|
const (
|
||||||
|
transientCooldown = 30 * time.Second
|
||||||
|
permanentCooldown = 10 * time.Minute
|
||||||
|
emptyResponseThreshold = 3
|
||||||
|
emptyResponseCooldown = 2 * time.Minute
|
||||||
|
dimensionMismatchWarning = "embedding dimension mismatch"
|
||||||
|
)
|
||||||
|
|
||||||
|
// EmbedResult carries the vector plus the (provider, model) that produced it —
|
||||||
|
// callers store the actual model so later searches against that row use the
|
||||||
|
// matching query embedding.
|
||||||
|
type EmbedResult struct {
|
||||||
|
Vector []float32
|
||||||
|
Provider string
|
||||||
|
Model string
|
||||||
|
}
|
||||||
|
|
||||||
|
// EmbeddingRunner executes the embeddings role chain with sequential fallback.
|
||||||
|
type EmbeddingRunner struct {
|
||||||
|
registry *Registry
|
||||||
|
chain []config.RoleTarget
|
||||||
|
dimensions int
|
||||||
|
health *healthTracker
|
||||||
|
log *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// MetadataRunner executes the metadata role chain with sequential fallback and
|
||||||
|
// a heuristic fallthrough when every target is unhealthy or fails.
|
||||||
|
type MetadataRunner struct {
|
||||||
|
registry *Registry
|
||||||
|
chain []config.RoleTarget
|
||||||
|
opts metadataRunOpts
|
||||||
|
health *healthTracker
|
||||||
|
log *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
type metadataRunOpts struct {
|
||||||
|
temperature float64
|
||||||
|
logConversations bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEmbeddingRunner builds a runner for the embeddings role. chain must be
|
||||||
|
// non-empty and every target must be registered.
|
||||||
|
func NewEmbeddingRunner(registry *Registry, chain []config.RoleTarget, dimensions int, log *slog.Logger) (*EmbeddingRunner, error) {
|
||||||
|
if registry == nil {
|
||||||
|
return nil, fmt.Errorf("embedding runner: registry is required")
|
||||||
|
}
|
||||||
|
if len(chain) == 0 {
|
||||||
|
return nil, fmt.Errorf("embedding runner: chain is empty")
|
||||||
|
}
|
||||||
|
if dimensions <= 0 {
|
||||||
|
return nil, fmt.Errorf("embedding runner: dimensions must be > 0")
|
||||||
|
}
|
||||||
|
for i, t := range chain {
|
||||||
|
if _, err := registry.Client(t.Provider); err != nil {
|
||||||
|
return nil, fmt.Errorf("embedding runner: chain[%d]: %w", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &EmbeddingRunner{
|
||||||
|
registry: registry,
|
||||||
|
chain: chain,
|
||||||
|
dimensions: dimensions,
|
||||||
|
health: newHealthTracker(),
|
||||||
|
log: log,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMetadataRunner builds a runner for the metadata role.
|
||||||
|
func NewMetadataRunner(registry *Registry, chain []config.RoleTarget, temperature float64, logConversations bool, log *slog.Logger) (*MetadataRunner, error) {
|
||||||
|
if registry == nil {
|
||||||
|
return nil, fmt.Errorf("metadata runner: registry is required")
|
||||||
|
}
|
||||||
|
if len(chain) == 0 {
|
||||||
|
return nil, fmt.Errorf("metadata runner: chain is empty")
|
||||||
|
}
|
||||||
|
for i, t := range chain {
|
||||||
|
if _, err := registry.Client(t.Provider); err != nil {
|
||||||
|
return nil, fmt.Errorf("metadata runner: chain[%d]: %w", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &MetadataRunner{
|
||||||
|
registry: registry,
|
||||||
|
chain: chain,
|
||||||
|
opts: metadataRunOpts{
|
||||||
|
temperature: temperature,
|
||||||
|
logConversations: logConversations,
|
||||||
|
},
|
||||||
|
health: newHealthTracker(),
|
||||||
|
log: log,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrimaryProvider returns the first provider in the chain.
|
||||||
|
func (r *EmbeddingRunner) PrimaryProvider() string { return r.chain[0].Provider }
|
||||||
|
|
||||||
|
// PrimaryModel returns the first model in the chain — the one used as the
|
||||||
|
// storage key for search matching.
|
||||||
|
func (r *EmbeddingRunner) PrimaryModel() string { return r.chain[0].Model }
|
||||||
|
|
||||||
|
// Dimensions returns the required vector dimension.
|
||||||
|
func (r *EmbeddingRunner) Dimensions() int { return r.dimensions }
|
||||||
|
|
||||||
|
// Embed walks the chain and returns the first successful embedding. The
|
||||||
|
// returned EmbedResult names the actual (provider, model) that produced the
|
||||||
|
// vector — callers use that when recording the row.
|
||||||
|
func (r *EmbeddingRunner) Embed(ctx context.Context, input string) (EmbedResult, error) {
|
||||||
|
var errs []error
|
||||||
|
for _, target := range r.chain {
|
||||||
|
if r.health.skip(target) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
errs = append(errs, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
vec, err := client.EmbedWith(ctx, target.Model, input)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return EmbedResult{}, ctx.Err()
|
||||||
|
}
|
||||||
|
r.classify(target, err)
|
||||||
|
r.logFailure("embed", target, err)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(vec) != r.dimensions {
|
||||||
|
dimErr := fmt.Errorf("%s: expected %d, got %d", dimensionMismatchWarning, r.dimensions, len(vec))
|
||||||
|
r.health.markTransient(target)
|
||||||
|
r.logFailure("embed", target, dimErr)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, dimErr))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return EmbedResult{Vector: vec, Provider: target.Provider, Model: target.Model}, nil
|
||||||
|
}
|
||||||
|
return EmbedResult{}, fmt.Errorf("all embedding targets failed: %w", errors.Join(errs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// EmbedPrimary embeds using only the primary target — used for search queries
|
||||||
|
// so the query vector matches rows stored under the primary model. Falls back
|
||||||
|
// to returning the error without walking the chain.
|
||||||
|
func (r *EmbeddingRunner) EmbedPrimary(ctx context.Context, input string) ([]float32, error) {
|
||||||
|
target := r.chain[0]
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
vec, err := client.EmbedWith(ctx, target.Model, input)
|
||||||
|
if err != nil {
|
||||||
|
r.classify(target, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(vec) != r.dimensions {
|
||||||
|
return nil, fmt.Errorf("%s: expected %d, got %d", dimensionMismatchWarning, r.dimensions, len(vec))
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return vec, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrimaryProvider / PrimaryModel for metadata mirror the embedding runner.
|
||||||
|
func (r *MetadataRunner) PrimaryProvider() string { return r.chain[0].Provider }
|
||||||
|
func (r *MetadataRunner) PrimaryModel() string { return r.chain[0].Model }
|
||||||
|
|
||||||
|
// ExtractMetadata walks the chain sequentially. If every target fails or is
|
||||||
|
// unhealthy, it returns a heuristic metadata so capture never hard-fails.
|
||||||
|
func (r *MetadataRunner) ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error) {
|
||||||
|
var errs []error
|
||||||
|
for _, target := range r.chain {
|
||||||
|
if r.health.skip(target) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
errs = append(errs, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
md, err := client.ExtractMetadataWith(ctx, compat.MetadataOptions{
|
||||||
|
Model: target.Model,
|
||||||
|
Temperature: r.opts.temperature,
|
||||||
|
LogConversations: r.opts.logConversations,
|
||||||
|
}, input)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return thoughttypes.ThoughtMetadata{}, ctx.Err()
|
||||||
|
}
|
||||||
|
r.classify(target, err)
|
||||||
|
r.logFailure("metadata", target, err)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return md, nil
|
||||||
|
}
|
||||||
|
if r.log != nil {
|
||||||
|
r.log.Warn("metadata chain exhausted, using heuristic fallback",
|
||||||
|
slog.Int("targets", len(r.chain)),
|
||||||
|
slog.String("error", errors.Join(errs...).Error()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return compat.HeuristicMetadataFromInput(input), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summarize walks the chain; unlike metadata, there is no heuristic fallback —
|
||||||
|
// returns the joined error when everything fails.
|
||||||
|
func (r *MetadataRunner) Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error) {
|
||||||
|
var errs []error
|
||||||
|
for _, target := range r.chain {
|
||||||
|
if r.health.skip(target) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
errs = append(errs, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
out, err := client.SummarizeWith(ctx, compat.SummarizeOptions{
|
||||||
|
Model: target.Model,
|
||||||
|
Temperature: r.opts.temperature,
|
||||||
|
}, systemPrompt, userPrompt)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return "", ctx.Err()
|
||||||
|
}
|
||||||
|
r.classify(target, err)
|
||||||
|
r.logFailure("summarize", target, err)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("all summarize targets failed: %w", errors.Join(errs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *EmbeddingRunner) classify(target config.RoleTarget, err error) {
|
||||||
|
switch {
|
||||||
|
case compat.IsPermanentModelError(err):
|
||||||
|
r.health.markPermanent(target)
|
||||||
|
default:
|
||||||
|
r.health.markTransient(target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MetadataRunner) classify(target config.RoleTarget, err error) {
|
||||||
|
switch {
|
||||||
|
case compat.IsPermanentModelError(err):
|
||||||
|
r.health.markPermanent(target)
|
||||||
|
case errors.Is(err, compat.ErrEmptyResponse):
|
||||||
|
r.health.markEmpty(target)
|
||||||
|
default:
|
||||||
|
r.health.markTransient(target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *EmbeddingRunner) logFailure(role string, target config.RoleTarget, err error) {
|
||||||
|
if r.log == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.log.Warn("ai target failed",
|
||||||
|
slog.String("role", role),
|
||||||
|
slog.String("provider", target.Provider),
|
||||||
|
slog.String("model", target.Model),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MetadataRunner) logFailure(role string, target config.RoleTarget, err error) {
|
||||||
|
if r.log == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.log.Warn("ai target failed",
|
||||||
|
slog.String("role", role),
|
||||||
|
slog.String("provider", target.Provider),
|
||||||
|
slog.String("model", target.Model),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// healthTracker records per-(provider, model) failure state. skip returns true
|
||||||
|
// when a target is still inside its cooldown window; the caller then tries the
|
||||||
|
// next target in the chain.
|
||||||
|
type healthTracker struct {
|
||||||
|
mu sync.Mutex
|
||||||
|
states map[config.RoleTarget]*healthState
|
||||||
|
}
|
||||||
|
|
||||||
|
type healthState struct {
|
||||||
|
unhealthyUntil time.Time
|
||||||
|
emptyCount int
|
||||||
|
}
|
||||||
|
|
||||||
|
func newHealthTracker() *healthTracker {
|
||||||
|
return &healthTracker{states: map[config.RoleTarget]*healthState{}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) skip(target config.RoleTarget) bool {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
s, ok := h.states[target]
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return time.Now().Before(s.unhealthyUntil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markTransient(target config.RoleTarget) {
|
||||||
|
h.setCooldown(target, transientCooldown)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markPermanent(target config.RoleTarget) {
|
||||||
|
h.setCooldown(target, permanentCooldown)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markEmpty(target config.RoleTarget) {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
s := h.states[target]
|
||||||
|
if s == nil {
|
||||||
|
s = &healthState{}
|
||||||
|
h.states[target] = s
|
||||||
|
}
|
||||||
|
s.emptyCount++
|
||||||
|
if s.emptyCount >= emptyResponseThreshold {
|
||||||
|
s.unhealthyUntil = time.Now().Add(emptyResponseCooldown)
|
||||||
|
s.emptyCount = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markHealthy(target config.RoleTarget) {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
if s, ok := h.states[target]; ok {
|
||||||
|
s.unhealthyUntil = time.Time{}
|
||||||
|
s.emptyCount = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) setCooldown(target config.RoleTarget, d time.Duration) {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
s := h.states[target]
|
||||||
|
if s == nil {
|
||||||
|
s = &healthState{}
|
||||||
|
h.states[target] = s
|
||||||
|
}
|
||||||
|
s.unhealthyUntil = time.Now().Add(d)
|
||||||
|
s.emptyCount = 0
|
||||||
|
}
|
||||||
139
internal/ai/runner_test.go
Normal file
139
internal/ai/runner_test.go
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"sync"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestEmbeddingRunnerFallsBackAndSkipsUnhealthyPrimary(t *testing.T) {
|
||||||
|
var (
|
||||||
|
mu sync.Mutex
|
||||||
|
primaryCalls int
|
||||||
|
)
|
||||||
|
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/embeddings" {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var req struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch req.Model {
|
||||||
|
case "embed-primary":
|
||||||
|
mu.Lock()
|
||||||
|
primaryCalls++
|
||||||
|
mu.Unlock()
|
||||||
|
http.Error(w, "upstream down", http.StatusBadGateway)
|
||||||
|
case "embed-fallback":
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||||
|
"data": []map[string]any{{"embedding": []float32{0.1, 0.2, 0.3}}},
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "unknown model", http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg, err := NewRegistry(map[string]config.ProviderConfig{
|
||||||
|
"p1": {Type: "litellm", BaseURL: srv.URL, APIKey: "k1"},
|
||||||
|
"p2": {Type: "litellm", BaseURL: srv.URL, APIKey: "k2"},
|
||||||
|
}, srv.Client(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRegistry() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
runner, err := NewEmbeddingRunner(reg, []config.RoleTarget{
|
||||||
|
{Provider: "p1", Model: "embed-primary"},
|
||||||
|
{Provider: "p2", Model: "embed-fallback"},
|
||||||
|
}, 3, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewEmbeddingRunner() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := runner.Embed(context.Background(), "hello")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Embed() first call error = %v", err)
|
||||||
|
}
|
||||||
|
if res.Provider != "p2" || res.Model != "embed-fallback" {
|
||||||
|
t.Fatalf("Embed() first call target = %s/%s, want p2/embed-fallback", res.Provider, res.Model)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err = runner.Embed(context.Background(), "hello again")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Embed() second call error = %v", err)
|
||||||
|
}
|
||||||
|
if res.Provider != "p2" || res.Model != "embed-fallback" {
|
||||||
|
t.Fatalf("Embed() second call target = %s/%s, want p2/embed-fallback", res.Provider, res.Model)
|
||||||
|
}
|
||||||
|
|
||||||
|
mu.Lock()
|
||||||
|
calls := primaryCalls
|
||||||
|
mu.Unlock()
|
||||||
|
if calls != 3 {
|
||||||
|
t.Fatalf("primary calls = %d, want 3 (first request retries 3x; second call should skip unhealthy primary)", calls)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMetadataRunnerSummarizeFallsBack(t *testing.T) {
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/chat/completions" {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var req struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch req.Model {
|
||||||
|
case "sum-primary":
|
||||||
|
http.Error(w, "provider error", http.StatusBadGateway)
|
||||||
|
case "sum-fallback":
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||||
|
"choices": []map[string]any{{
|
||||||
|
"message": map[string]any{"role": "assistant", "content": "fallback summary"},
|
||||||
|
}},
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "unknown model", http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg, err := NewRegistry(map[string]config.ProviderConfig{
|
||||||
|
"p1": {Type: "litellm", BaseURL: srv.URL, APIKey: "k1"},
|
||||||
|
"p2": {Type: "litellm", BaseURL: srv.URL, APIKey: "k2"},
|
||||||
|
}, srv.Client(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRegistry() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
runner, err := NewMetadataRunner(reg, []config.RoleTarget{
|
||||||
|
{Provider: "p1", Model: "sum-primary"},
|
||||||
|
{Provider: "p2", Model: "sum-fallback"},
|
||||||
|
}, 0.1, false, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewMetadataRunner() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
summary, err := runner.Summarize(context.Background(), "system", "user")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Summarize() error = %v", err)
|
||||||
|
}
|
||||||
|
if summary != "fallback summary" {
|
||||||
|
t.Fatalf("summary = %q, want %q", summary, "fallback summary")
|
||||||
|
}
|
||||||
|
}
|
||||||
79
internal/app/admin_actions.go
Normal file
79
internal/app/admin_actions.go
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/tools"
|
||||||
|
)
|
||||||
|
|
||||||
|
type adminActions struct {
|
||||||
|
backfill *tools.BackfillTool
|
||||||
|
retry *tools.EnrichmentRetryer
|
||||||
|
logger *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func newAdminActions(backfill *tools.BackfillTool, retry *tools.EnrichmentRetryer, logger *slog.Logger) *adminActions {
|
||||||
|
return &adminActions{
|
||||||
|
backfill: backfill,
|
||||||
|
retry: retry,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *adminActions) backfillHandler() http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
w.Header().Set("Allow", http.MethodPost)
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var in tools.BackfillInput
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&in); err != nil {
|
||||||
|
http.Error(w, "invalid request body", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, out, err := a.backfill.Handle(r.Context(), nil, in)
|
||||||
|
if err != nil {
|
||||||
|
if a.logger != nil {
|
||||||
|
a.logger.Warn("admin backfill failed", slog.String("error", err.Error()))
|
||||||
|
}
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_ = json.NewEncoder(w).Encode(out)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *adminActions) retryMetadataHandler() http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
w.Header().Set("Allow", http.MethodPost)
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var in tools.RetryEnrichmentInput
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&in); err != nil {
|
||||||
|
http.Error(w, "invalid request body", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, out, err := a.retry.Handle(r.Context(), nil, in)
|
||||||
|
if err != nil {
|
||||||
|
if a.logger != nil {
|
||||||
|
a.logger.Warn("admin metadata retry failed", slog.String("error", err.Error()))
|
||||||
|
}
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_ = json.NewEncoder(w).Encode(out)
|
||||||
|
})
|
||||||
|
}
|
||||||
268
internal/app/admin_legacy.go.disabled
Normal file
268
internal/app/admin_legacy.go.disabled
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
package app
|
||||||
|
// Legacy admin handlers retired in favor of ResolveSpec-backed routes.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/store"
|
||||||
|
ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
type adminHandlers struct {
|
||||||
|
db *store.DB
|
||||||
|
logger *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func newAdminHandlers(db *store.DB, logger *slog.Logger) *adminHandlers {
|
||||||
|
return &adminHandlers{db: db, logger: logger}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *adminHandlers) register(mux *http.ServeMux, middleware func(http.Handler) http.Handler) {
|
||||||
|
handle := func(pattern string, fn http.HandlerFunc) {
|
||||||
|
mux.Handle(pattern, middleware(fn))
|
||||||
|
}
|
||||||
|
|
||||||
|
handle("GET /api/admin/projects", h.listProjects)
|
||||||
|
handle("POST /api/admin/projects", h.createProject)
|
||||||
|
handle("GET /api/admin/thoughts", h.listThoughts)
|
||||||
|
handle("GET /api/admin/thoughts/{id}", h.getThought)
|
||||||
|
handle("DELETE /api/admin/thoughts/{id}", h.deleteThought)
|
||||||
|
handle("POST /api/admin/thoughts/{id}/archive", h.archiveThought)
|
||||||
|
handle("GET /api/admin/skills", h.listSkills)
|
||||||
|
handle("DELETE /api/admin/skills/{id}", h.deleteSkill)
|
||||||
|
handle("GET /api/admin/guardrails", h.listGuardrails)
|
||||||
|
handle("DELETE /api/admin/guardrails/{id}", h.deleteGuardrail)
|
||||||
|
handle("GET /api/admin/files", h.listFiles)
|
||||||
|
handle("GET /api/admin/stats", h.stats)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Projects ---
|
||||||
|
|
||||||
|
func (h *adminHandlers) listProjects(w http.ResponseWriter, r *http.Request) {
|
||||||
|
projects, err := h.db.ListProjects(r.Context())
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "list projects", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, projects)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *adminHandlers) createProject(w http.ResponseWriter, r *http.Request) {
|
||||||
|
var body struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||||
|
http.Error(w, "invalid request body", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(body.Name) == "" {
|
||||||
|
http.Error(w, "name is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
project, err := h.db.CreateProject(r.Context(), body.Name, body.Description)
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "create project", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusCreated)
|
||||||
|
writeJSON(w, project)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Thoughts ---
|
||||||
|
|
||||||
|
func (h *adminHandlers) listThoughts(w http.ResponseWriter, r *http.Request) {
|
||||||
|
q := r.URL.Query()
|
||||||
|
limit := 50
|
||||||
|
if l := q.Get("limit"); l != "" {
|
||||||
|
if n, err := strconv.Atoi(l); err == nil && n > 0 {
|
||||||
|
limit = min(n, 200)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query := strings.TrimSpace(q.Get("q"))
|
||||||
|
includeArchived := q.Get("include_archived") == "true"
|
||||||
|
|
||||||
|
var projectID *uuid.UUID
|
||||||
|
if pid := q.Get("project_id"); pid != "" {
|
||||||
|
if id, err := uuid.Parse(pid); err == nil {
|
||||||
|
projectID = &id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if query != "" {
|
||||||
|
results, err := h.db.SearchThoughtsText(r.Context(), query, limit, projectID, nil)
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "search thoughts", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, results)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
thoughts, err := h.db.ListThoughts(r.Context(), ext.ListFilter{
|
||||||
|
Limit: limit,
|
||||||
|
ProjectID: projectID,
|
||||||
|
IncludeArchived: includeArchived,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "list thoughts", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, thoughts)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *adminHandlers) getThought(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, ok := parseUUID(w, r.PathValue("id"))
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
thought, err := h.db.GetThought(r.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "get thought", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, thought)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *adminHandlers) deleteThought(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, ok := parseUUID(w, r.PathValue("id"))
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := h.db.DeleteThought(r.Context(), id); err != nil {
|
||||||
|
h.internalError(w, "delete thought", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *adminHandlers) archiveThought(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, ok := parseUUID(w, r.PathValue("id"))
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := h.db.ArchiveThought(r.Context(), id); err != nil {
|
||||||
|
h.internalError(w, "archive thought", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Skills ---
|
||||||
|
|
||||||
|
func (h *adminHandlers) listSkills(w http.ResponseWriter, r *http.Request) {
|
||||||
|
tag := r.URL.Query().Get("tag")
|
||||||
|
skills, err := h.db.ListSkills(r.Context(), tag)
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "list skills", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, skills)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *adminHandlers) deleteSkill(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, ok := parseUUID(w, r.PathValue("id"))
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := h.db.RemoveSkill(r.Context(), id); err != nil {
|
||||||
|
h.internalError(w, "delete skill", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Guardrails ---
|
||||||
|
|
||||||
|
func (h *adminHandlers) listGuardrails(w http.ResponseWriter, r *http.Request) {
|
||||||
|
q := r.URL.Query()
|
||||||
|
guardrails, err := h.db.ListGuardrails(r.Context(), q.Get("tag"), q.Get("severity"))
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "list guardrails", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, guardrails)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *adminHandlers) deleteGuardrail(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, ok := parseUUID(w, r.PathValue("id"))
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := h.db.RemoveGuardrail(r.Context(), id); err != nil {
|
||||||
|
h.internalError(w, "delete guardrail", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Files ---
|
||||||
|
|
||||||
|
func (h *adminHandlers) listFiles(w http.ResponseWriter, r *http.Request) {
|
||||||
|
q := r.URL.Query()
|
||||||
|
limit := 100
|
||||||
|
if l := q.Get("limit"); l != "" {
|
||||||
|
if n, err := strconv.Atoi(l); err == nil && n > 0 {
|
||||||
|
limit = min(n, 500)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filter := ext.StoredFileFilter{Limit: limit}
|
||||||
|
if pid := q.Get("project_id"); pid != "" {
|
||||||
|
if id, err := uuid.Parse(pid); err == nil {
|
||||||
|
filter.ProjectID = &id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if tid := q.Get("thought_id"); tid != "" {
|
||||||
|
if id, err := uuid.Parse(tid); err == nil {
|
||||||
|
filter.ThoughtID = &id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
filter.Kind = q.Get("kind")
|
||||||
|
|
||||||
|
files, err := h.db.ListStoredFiles(r.Context(), filter)
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "list files", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, files)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Stats ---
|
||||||
|
|
||||||
|
func (h *adminHandlers) stats(w http.ResponseWriter, r *http.Request) {
|
||||||
|
stats, err := h.db.Stats(r.Context())
|
||||||
|
if err != nil {
|
||||||
|
h.internalError(w, "stats", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, stats)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Helpers ---
|
||||||
|
|
||||||
|
func (h *adminHandlers) internalError(w http.ResponseWriter, op string, err error) {
|
||||||
|
h.logger.Error("admin handler error", slog.String("op", op), slog.String("error", err.Error()))
|
||||||
|
http.Error(w, "internal server error", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeJSON(w http.ResponseWriter, v any) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_ = json.NewEncoder(w).Encode(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseUUID(w http.ResponseWriter, s string) (uuid.UUID, bool) {
|
||||||
|
id, err := uuid.Parse(s)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, "invalid id", http.StatusBadRequest)
|
||||||
|
return uuid.UUID{}, false
|
||||||
|
}
|
||||||
|
return id, true
|
||||||
|
}
|
||||||
@@ -34,7 +34,7 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
|
|
||||||
logger.Info("loaded configuration",
|
logger.Info("loaded configuration",
|
||||||
slog.String("path", loadedFrom),
|
slog.String("path", loadedFrom),
|
||||||
slog.String("provider", cfg.AI.Provider),
|
slog.Int("config_version", cfg.Version),
|
||||||
slog.String("version", info.Version),
|
slog.String("version", info.Version),
|
||||||
slog.String("tag_name", info.TagName),
|
slog.String("tag_name", info.TagName),
|
||||||
slog.String("build_date", info.BuildDate),
|
slog.String("build_date", info.BuildDate),
|
||||||
@@ -52,11 +52,37 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
httpClient := &http.Client{Timeout: 30 * time.Second}
|
httpClient := &http.Client{Timeout: 30 * time.Second}
|
||||||
provider, err := ai.NewProvider(cfg.AI, httpClient, logger)
|
registry, err := ai.NewRegistry(cfg.AI.Providers, httpClient, logger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
foregroundEmbeddings, err := ai.NewEmbeddingRunner(registry, cfg.AI.Embeddings.Chain(), cfg.AI.Embeddings.Dimensions, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
foregroundMetadata, err := ai.NewMetadataRunner(registry, cfg.AI.Metadata.Chain(), cfg.AI.Metadata.Temperature, cfg.AI.Metadata.LogConversations, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
backgroundEmbeddings := foregroundEmbeddings
|
||||||
|
backgroundMetadata := foregroundMetadata
|
||||||
|
if cfg.AI.Background != nil {
|
||||||
|
if cfg.AI.Background.Embeddings != nil {
|
||||||
|
backgroundEmbeddings, err = ai.NewEmbeddingRunner(registry, cfg.AI.Background.Embeddings.AsTargets(), cfg.AI.Embeddings.Dimensions, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cfg.AI.Background.Metadata != nil {
|
||||||
|
backgroundMetadata, err = ai.NewMetadataRunner(registry, cfg.AI.Background.Metadata.AsTargets(), cfg.AI.Metadata.Temperature, cfg.AI.Metadata.LogConversations, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var keyring *auth.Keyring
|
var keyring *auth.Keyring
|
||||||
var oauthRegistry *auth.OAuthRegistry
|
var oauthRegistry *auth.OAuthRegistry
|
||||||
var tokenStore *auth.TokenStore
|
var tokenStore *auth.TokenStore
|
||||||
@@ -66,23 +92,24 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
tokenStore = auth.NewTokenStore(0)
|
||||||
if len(cfg.Auth.OAuth.Clients) > 0 {
|
if len(cfg.Auth.OAuth.Clients) > 0 {
|
||||||
oauthRegistry, err = auth.NewOAuthRegistry(cfg.Auth.OAuth.Clients)
|
oauthRegistry, err = auth.NewOAuthRegistry(cfg.Auth.OAuth.Clients)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
tokenStore = auth.NewTokenStore(0)
|
|
||||||
}
|
}
|
||||||
authCodes := auth.NewAuthCodeStore()
|
authCodes := auth.NewAuthCodeStore()
|
||||||
dynClients := auth.NewDynamicClientStore()
|
dynClients := auth.NewDynamicClientStore()
|
||||||
activeProjects := session.NewActiveProjects()
|
activeProjects := session.NewActiveProjects()
|
||||||
|
|
||||||
logger.Info("database connection verified",
|
logger.Info("ai providers initialised",
|
||||||
slog.String("provider", provider.Name()),
|
slog.String("embedding_primary", foregroundEmbeddings.PrimaryProvider()+"/"+foregroundEmbeddings.PrimaryModel()),
|
||||||
|
slog.String("metadata_primary", foregroundMetadata.PrimaryProvider()+"/"+foregroundMetadata.PrimaryModel()),
|
||||||
)
|
)
|
||||||
|
|
||||||
if cfg.Backfill.Enabled && cfg.Backfill.RunOnStartup {
|
if cfg.Backfill.Enabled && cfg.Backfill.RunOnStartup {
|
||||||
go runBackfillPass(ctx, db, provider, cfg.Backfill, logger)
|
go runBackfillPass(ctx, db, backgroundEmbeddings, cfg.Backfill, logger)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.Backfill.Enabled && cfg.Backfill.Interval > 0 {
|
if cfg.Backfill.Enabled && cfg.Backfill.Interval > 0 {
|
||||||
@@ -94,14 +121,14 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return
|
return
|
||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
runBackfillPass(ctx, db, provider, cfg.Backfill, logger)
|
runBackfillPass(ctx, db, backgroundEmbeddings, cfg.Backfill, logger)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.RunOnStartup {
|
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.RunOnStartup {
|
||||||
go runMetadataRetryPass(ctx, db, provider, cfg, activeProjects, logger)
|
go runMetadataRetryPass(ctx, db, backgroundMetadata, cfg, activeProjects, logger)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.Interval > 0 {
|
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.Interval > 0 {
|
||||||
@@ -113,13 +140,13 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return
|
return
|
||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
runMetadataRetryPass(ctx, db, provider, cfg, activeProjects, logger)
|
runMetadataRetryPass(ctx, db, backgroundMetadata, cfg, activeProjects, logger)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
handler, err := routes(logger, cfg, info, db, provider, keyring, oauthRegistry, tokenStore, authCodes, dynClients, activeProjects)
|
handler, err := routes(logger, cfg, info, db, foregroundEmbeddings, foregroundMetadata, backgroundEmbeddings, backgroundMetadata, keyring, oauthRegistry, tokenStore, authCodes, dynClients, activeProjects)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -156,38 +183,41 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *store.DB, provider ai.Provider, keyring *auth.Keyring, oauthRegistry *auth.OAuthRegistry, tokenStore *auth.TokenStore, authCodes *auth.AuthCodeStore, dynClients *auth.DynamicClientStore, activeProjects *session.ActiveProjects) (http.Handler, error) {
|
func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, bgEmbeddings *ai.EmbeddingRunner, bgMetadata *ai.MetadataRunner, keyring *auth.Keyring, oauthRegistry *auth.OAuthRegistry, tokenStore *auth.TokenStore, authCodes *auth.AuthCodeStore, dynClients *auth.DynamicClientStore, activeProjects *session.ActiveProjects) (http.Handler, error) {
|
||||||
mux := http.NewServeMux()
|
mux := http.NewServeMux()
|
||||||
accessTracker := auth.NewAccessTracker()
|
accessTracker := auth.NewAccessTracker()
|
||||||
oauthEnabled := oauthRegistry != nil && tokenStore != nil
|
oauthEnabled := oauthRegistry != nil
|
||||||
authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger)
|
authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger)
|
||||||
filesTool := tools.NewFilesTool(db, activeProjects)
|
filesTool := tools.NewFilesTool(db, activeProjects)
|
||||||
enrichmentRetryer := tools.NewEnrichmentRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
enrichmentRetryer := tools.NewEnrichmentRetryer(context.Background(), db, bgMetadata, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
||||||
backfillTool := tools.NewBackfillTool(db, provider, activeProjects, logger)
|
backfillTool := tools.NewBackfillTool(db, bgEmbeddings, activeProjects, logger)
|
||||||
|
adminActions := newAdminActions(backfillTool, enrichmentRetryer, logger)
|
||||||
|
|
||||||
toolSet := mcpserver.ToolSet{
|
toolSet := mcpserver.ToolSet{
|
||||||
Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, enrichmentRetryer, backfillTool, logger),
|
Capture: tools.NewCaptureTool(db, embeddings, cfg.Capture, activeProjects, enrichmentRetryer, backfillTool),
|
||||||
Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects),
|
Search: tools.NewSearchTool(db, embeddings, cfg.Search, activeProjects),
|
||||||
List: tools.NewListTool(db, cfg.Search, activeProjects),
|
List: tools.NewListTool(db, cfg.Search, activeProjects),
|
||||||
Stats: tools.NewStatsTool(db),
|
Stats: tools.NewStatsTool(db),
|
||||||
Get: tools.NewGetTool(db),
|
Get: tools.NewGetTool(db),
|
||||||
Update: tools.NewUpdateTool(db, provider, cfg.Capture, logger),
|
Update: tools.NewUpdateTool(db, embeddings, metadata, cfg.Capture, logger),
|
||||||
Delete: tools.NewDeleteTool(db),
|
Delete: tools.NewDeleteTool(db),
|
||||||
Archive: tools.NewArchiveTool(db),
|
Archive: tools.NewArchiveTool(db),
|
||||||
Projects: tools.NewProjectsTool(db, activeProjects),
|
Projects: tools.NewProjectsTool(db, activeProjects),
|
||||||
Version: tools.NewVersionTool(cfg.MCP.ServerName, info),
|
Version: tools.NewVersionTool(cfg.MCP.ServerName, info),
|
||||||
Context: tools.NewContextTool(db, provider, cfg.Search, activeProjects),
|
Learnings: tools.NewLearningsTool(db, activeProjects, cfg.Search),
|
||||||
Recall: tools.NewRecallTool(db, provider, cfg.Search, activeProjects),
|
Plans: tools.NewPlansTool(db, activeProjects, cfg.Search),
|
||||||
Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects),
|
Context: tools.NewContextTool(db, embeddings, cfg.Search, activeProjects),
|
||||||
Links: tools.NewLinksTool(db, provider, cfg.Search),
|
Recall: tools.NewRecallTool(db, embeddings, cfg.Search, activeProjects),
|
||||||
|
Summarize: tools.NewSummarizeTool(db, embeddings, metadata, cfg.Search, activeProjects),
|
||||||
|
Links: tools.NewLinksTool(db, embeddings, cfg.Search),
|
||||||
Files: filesTool,
|
Files: filesTool,
|
||||||
Backfill: backfillTool,
|
Backfill: backfillTool,
|
||||||
Reparse: tools.NewReparseMetadataTool(db, provider, cfg.Capture, activeProjects, logger),
|
Reparse: tools.NewReparseMetadataTool(db, bgMetadata, cfg.Capture, activeProjects, logger),
|
||||||
RetryMetadata: tools.NewRetryEnrichmentTool(enrichmentRetryer),
|
RetryMetadata: tools.NewRetryEnrichmentTool(enrichmentRetryer),
|
||||||
Maintenance: tools.NewMaintenanceTool(db),
|
//Maintenance: tools.NewMaintenanceTool(db),
|
||||||
Skills: tools.NewSkillsTool(db, activeProjects),
|
Skills: tools.NewSkillsTool(db, activeProjects),
|
||||||
ChatHistory: tools.NewChatHistoryTool(db, activeProjects),
|
ChatHistory: tools.NewChatHistoryTool(db, activeProjects),
|
||||||
Describe: tools.NewDescribeTool(db, mcpserver.BuildToolCatalog()),
|
Describe: tools.NewDescribeTool(db, mcpserver.BuildToolCatalog()),
|
||||||
}
|
}
|
||||||
|
|
||||||
mcpHandlers, err := mcpserver.NewHandlers(cfg.MCP, logger, toolSet, activeProjects.Clear)
|
mcpHandlers, err := mcpserver.NewHandlers(cfg.MCP, logger, toolSet, activeProjects.Clear)
|
||||||
@@ -199,21 +229,26 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st
|
|||||||
mux.Handle(cfg.MCP.SSEPath, authMiddleware(mcpHandlers.SSE))
|
mux.Handle(cfg.MCP.SSEPath, authMiddleware(mcpHandlers.SSE))
|
||||||
logger.Info("SSE transport enabled", slog.String("sse_path", cfg.MCP.SSEPath))
|
logger.Info("SSE transport enabled", slog.String("sse_path", cfg.MCP.SSEPath))
|
||||||
}
|
}
|
||||||
|
if err := registerResolveSpecAdminRoutes(mux, db, authMiddleware, logger); err != nil {
|
||||||
|
return nil, fmt.Errorf("setup resolvespec admin routes: %w", err)
|
||||||
|
}
|
||||||
mux.Handle("/files", authMiddleware(fileHandler(filesTool)))
|
mux.Handle("/files", authMiddleware(fileHandler(filesTool)))
|
||||||
mux.Handle("/files/{id}", authMiddleware(fileHandler(filesTool)))
|
mux.Handle("/files/{id}", authMiddleware(fileHandler(filesTool)))
|
||||||
if oauthEnabled {
|
mux.HandleFunc("/.well-known/oauth-authorization-server", oauthMetadataHandler())
|
||||||
mux.HandleFunc("/.well-known/oauth-authorization-server", oauthMetadataHandler())
|
mux.HandleFunc("/api/oauth/register", oauthRegisterHandler(dynClients, logger))
|
||||||
mux.HandleFunc("/oauth-authorization-server", oauthMetadataHandler())
|
mux.HandleFunc("/api/oauth/authorize", oauthAuthorizeHandler(dynClients, authCodes, logger))
|
||||||
mux.HandleFunc("/oauth/register", oauthRegisterHandler(dynClients, logger))
|
mux.HandleFunc("/api/oauth/token", oauthTokenHandler(oauthRegistry, tokenStore, authCodes, logger))
|
||||||
mux.HandleFunc("/authorize", oauthAuthorizeHandler(dynClients, authCodes, logger))
|
mux.Handle("/api/admin/actions/backfill", authMiddleware(adminActions.backfillHandler()))
|
||||||
mux.HandleFunc("/oauth/authorize", oauthAuthorizeHandler(dynClients, authCodes, logger))
|
mux.Handle("/api/admin/actions/retry-metadata", authMiddleware(adminActions.retryMetadataHandler()))
|
||||||
mux.HandleFunc("/oauth/token", oauthTokenHandler(oauthRegistry, tokenStore, authCodes, logger))
|
|
||||||
}
|
|
||||||
mux.HandleFunc("/favicon.ico", serveFavicon)
|
mux.HandleFunc("/favicon.ico", serveFavicon)
|
||||||
mux.HandleFunc("/images/project.jpg", serveHomeImage)
|
mux.HandleFunc("/images/project.jpg", serveHomeImage)
|
||||||
mux.HandleFunc("/images/icon.png", serveIcon)
|
mux.HandleFunc("/images/icon.png", serveIcon)
|
||||||
mux.HandleFunc("/llm", serveLLMInstructions)
|
mux.HandleFunc("/llm", serveLLMInstructions)
|
||||||
mux.HandleFunc("/api/status", statusAPIHandler(info, accessTracker, oauthEnabled))
|
mux.HandleFunc("/llms.txt", serveLLMSTXT)
|
||||||
|
mux.HandleFunc("/.well-known/llms.txt", serveLLMSTXT)
|
||||||
|
mux.HandleFunc("/robots.txt", serveRobotsTXT)
|
||||||
|
mux.Handle("/api/status", authMiddleware(statusAPIHandler(info, accessTracker, oauthEnabled)))
|
||||||
|
mux.HandleFunc("/status", publicStatusHandler(accessTracker))
|
||||||
|
|
||||||
mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) {
|
mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) {
|
||||||
w.WriteHeader(http.StatusOK)
|
w.WriteHeader(http.StatusOK)
|
||||||
@@ -242,8 +277,8 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st
|
|||||||
), nil
|
), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func runMetadataRetryPass(ctx context.Context, db *store.DB, provider ai.Provider, cfg *config.Config, activeProjects *session.ActiveProjects, logger *slog.Logger) {
|
func runMetadataRetryPass(ctx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, cfg *config.Config, activeProjects *session.ActiveProjects, logger *slog.Logger) {
|
||||||
retryer := tools.NewMetadataRetryer(ctx, db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
retryer := tools.NewMetadataRetryer(ctx, db, metadataRunner, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
||||||
_, out, err := retryer.Handle(ctx, nil, tools.RetryMetadataInput{
|
_, out, err := retryer.Handle(ctx, nil, tools.RetryMetadataInput{
|
||||||
Limit: cfg.MetadataRetry.MaxPerRun,
|
Limit: cfg.MetadataRetry.MaxPerRun,
|
||||||
IncludeArchived: cfg.MetadataRetry.IncludeArchived,
|
IncludeArchived: cfg.MetadataRetry.IncludeArchived,
|
||||||
@@ -261,8 +296,8 @@ func runMetadataRetryPass(ctx context.Context, db *store.DB, provider ai.Provide
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func runBackfillPass(ctx context.Context, db *store.DB, provider ai.Provider, cfg config.BackfillConfig, logger *slog.Logger) {
|
func runBackfillPass(ctx context.Context, db *store.DB, embeddings *ai.EmbeddingRunner, cfg config.BackfillConfig, logger *slog.Logger) {
|
||||||
backfiller := tools.NewBackfillTool(db, provider, nil, logger)
|
backfiller := tools.NewBackfillTool(db, embeddings, nil, logger)
|
||||||
_, out, err := backfiller.Handle(ctx, nil, tools.BackfillInput{
|
_, out, err := backfiller.Handle(ctx, nil, tools.BackfillInput{
|
||||||
Limit: cfg.MaxPerRun,
|
Limit: cfg.MaxPerRun,
|
||||||
IncludeArchived: cfg.IncludeArchived,
|
IncludeArchived: cfg.IncludeArchived,
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
package app
|
package app
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
amcsllm "git.warky.dev/wdevs/amcs/llm"
|
amcsllm "git.warky.dev/wdevs/amcs/llm"
|
||||||
)
|
)
|
||||||
@@ -20,3 +22,74 @@ func serveLLMInstructions(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
_, _ = w.Write(amcsllm.MemoryInstructions)
|
_, _ = w.Write(amcsllm.MemoryInstructions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func serveRobotsTXT(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/robots.txt" {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if r.Method != http.MethodGet && r.Method != http.MethodHead {
|
||||||
|
w.Header().Set("Allow", "GET, HEAD")
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||||
|
w.Header().Set("Cache-Control", "public, max-age=300")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
if r.Method == http.MethodHead {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
body := fmt.Sprintf("User-agent: *\nAllow: /\n\n# LLM-friendly docs\nLLM: %s/llm\nLLMS: %s/llms.txt\n", requestBaseURL(r), requestBaseURL(r))
|
||||||
|
_, _ = w.Write([]byte(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
func serveLLMSTXT(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/llms.txt" && r.URL.Path != "/.well-known/llms.txt" {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if r.Method != http.MethodGet && r.Method != http.MethodHead {
|
||||||
|
w.Header().Set("Allow", "GET, HEAD")
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||||
|
w.Header().Set("Cache-Control", "public, max-age=300")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
if r.Method == http.MethodHead {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
base := requestBaseURL(r)
|
||||||
|
body := fmt.Sprintf(
|
||||||
|
"# AMCS\n\n> A memory server for AI assistants (MCP tools, semantic retrieval, and structured project memory).\n\n## Endpoints\n- %s/llm\n- %s/status\n- %s/mcp\n- %s/.well-known/oauth-authorization-server\n",
|
||||||
|
base,
|
||||||
|
base,
|
||||||
|
base,
|
||||||
|
base,
|
||||||
|
)
|
||||||
|
_, _ = w.Write([]byte(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
func requestBaseURL(r *http.Request) string {
|
||||||
|
scheme := "http"
|
||||||
|
if r != nil && r.TLS != nil {
|
||||||
|
scheme = "https"
|
||||||
|
}
|
||||||
|
if r != nil {
|
||||||
|
if proto := strings.TrimSpace(r.Header.Get("X-Forwarded-Proto")); proto != "" {
|
||||||
|
scheme = proto
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
host := "localhost"
|
||||||
|
if r != nil {
|
||||||
|
if v := strings.TrimSpace(r.Host); v != "" {
|
||||||
|
host = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return scheme + "://" + host
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package app
|
|||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
amcsllm "git.warky.dev/wdevs/amcs/llm"
|
amcsllm "git.warky.dev/wdevs/amcs/llm"
|
||||||
@@ -29,3 +30,70 @@ func TestServeLLMInstructions(t *testing.T) {
|
|||||||
t.Fatalf("body = %q, want embedded instructions", body)
|
t.Fatalf("body = %q, want embedded instructions", body)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestServeRobotsTXT(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/robots.txt", nil)
|
||||||
|
req.Host = "amcs.example.com"
|
||||||
|
req.Header.Set("X-Forwarded-Proto", "https")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
serveRobotsTXT(rec, req)
|
||||||
|
|
||||||
|
res := rec.Result()
|
||||||
|
defer func() {
|
||||||
|
_ = res.Body.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
|
if res.StatusCode != http.StatusOK {
|
||||||
|
t.Fatalf("status = %d, want %d", res.StatusCode, http.StatusOK)
|
||||||
|
}
|
||||||
|
if got := res.Header.Get("Content-Type"); got != "text/plain; charset=utf-8" {
|
||||||
|
t.Fatalf("content-type = %q, want %q", got, "text/plain; charset=utf-8")
|
||||||
|
}
|
||||||
|
body := rec.Body.String()
|
||||||
|
if !strings.Contains(body, "LLM: https://amcs.example.com/llm") {
|
||||||
|
t.Fatalf("body = %q, want LLM link", body)
|
||||||
|
}
|
||||||
|
if !strings.Contains(body, "LLMS: https://amcs.example.com/llms.txt") {
|
||||||
|
t.Fatalf("body = %q, want LLMS link", body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestServeLLMSTXT(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/llms.txt", nil)
|
||||||
|
req.Host = "amcs.example.com"
|
||||||
|
req.Header.Set("X-Forwarded-Proto", "https")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
serveLLMSTXT(rec, req)
|
||||||
|
|
||||||
|
res := rec.Result()
|
||||||
|
defer func() {
|
||||||
|
_ = res.Body.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
|
if res.StatusCode != http.StatusOK {
|
||||||
|
t.Fatalf("status = %d, want %d", res.StatusCode, http.StatusOK)
|
||||||
|
}
|
||||||
|
if got := res.Header.Get("Content-Type"); got != "text/plain; charset=utf-8" {
|
||||||
|
t.Fatalf("content-type = %q, want %q", got, "text/plain; charset=utf-8")
|
||||||
|
}
|
||||||
|
body := rec.Body.String()
|
||||||
|
if !strings.Contains(body, "https://amcs.example.com/llm") {
|
||||||
|
t.Fatalf("body = %q, want /llm link", body)
|
||||||
|
}
|
||||||
|
if !strings.Contains(body, "https://amcs.example.com/.well-known/oauth-authorization-server") {
|
||||||
|
t.Fatalf("body = %q, want oauth discovery link", body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestServeLLMSTXTWellKnownPath(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/.well-known/llms.txt", nil)
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
serveLLMSTXT(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusOK {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/auth"
|
"git.warky.dev/wdevs/amcs/internal/auth"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/requestip"
|
||||||
)
|
)
|
||||||
|
|
||||||
// --- JSON types ---
|
// --- JSON types ---
|
||||||
@@ -66,9 +67,9 @@ func oauthMetadataHandler() http.HandlerFunc {
|
|||||||
base := serverBaseURL(r)
|
base := serverBaseURL(r)
|
||||||
meta := oauthServerMetadata{
|
meta := oauthServerMetadata{
|
||||||
Issuer: base,
|
Issuer: base,
|
||||||
AuthorizationEndpoint: base + "/authorize",
|
AuthorizationEndpoint: base + "/api/oauth/authorize",
|
||||||
TokenEndpoint: base + "/oauth/token",
|
TokenEndpoint: base + "/api/oauth/token",
|
||||||
RegistrationEndpoint: base + "/oauth/register",
|
RegistrationEndpoint: base + "/api/oauth/register",
|
||||||
ScopesSupported: []string{"mcp"},
|
ScopesSupported: []string{"mcp"},
|
||||||
ResponseTypesSupported: []string{"code"},
|
ResponseTypesSupported: []string{"code"},
|
||||||
GrantTypesSupported: []string{"authorization_code", "client_credentials"},
|
GrantTypesSupported: []string{"authorization_code", "client_credentials"},
|
||||||
@@ -243,6 +244,10 @@ func oauthTokenHandler(oauthRegistry *auth.OAuthRegistry, tokenStore *auth.Token
|
|||||||
|
|
||||||
switch r.FormValue("grant_type") {
|
switch r.FormValue("grant_type") {
|
||||||
case "client_credentials":
|
case "client_credentials":
|
||||||
|
if oauthRegistry == nil {
|
||||||
|
writeTokenError(w, "unsupported_grant_type", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
handleClientCredentials(w, r, oauthRegistry, tokenStore, log)
|
handleClientCredentials(w, r, oauthRegistry, tokenStore, log)
|
||||||
case "authorization_code":
|
case "authorization_code":
|
||||||
handleAuthorizationCode(w, r, authCodes, tokenStore, log)
|
handleAuthorizationCode(w, r, authCodes, tokenStore, log)
|
||||||
@@ -261,7 +266,7 @@ func handleClientCredentials(w http.ResponseWriter, r *http.Request, oauthRegist
|
|||||||
}
|
}
|
||||||
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("oauth token: invalid client credentials", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("oauth token: invalid client credentials", slog.String("remote_addr", requestip.FromRequest(r)))
|
||||||
w.Header().Set("WWW-Authenticate", `Basic realm="oauth"`)
|
w.Header().Set("WWW-Authenticate", `Basic realm="oauth"`)
|
||||||
writeTokenError(w, "invalid_client", http.StatusUnauthorized)
|
writeTokenError(w, "invalid_client", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
@@ -290,7 +295,7 @@ func handleAuthorizationCode(w http.ResponseWriter, r *http.Request, authCodes *
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !verifyPKCE(codeVerifier, entry.CodeChallenge, entry.CodeChallengeMethod) {
|
if !verifyPKCE(codeVerifier, entry.CodeChallenge, entry.CodeChallengeMethod) {
|
||||||
log.Warn("oauth token: PKCE verification failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("oauth token: PKCE verification failed", slog.String("remote_addr", requestip.FromRequest(r)))
|
||||||
writeTokenError(w, "invalid_grant", http.StatusBadRequest)
|
writeTokenError(w, "invalid_grant", http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -333,7 +338,7 @@ button{padding:.5rem 1.2rem;margin-right:.5rem;cursor:pointer;font-size:1rem}
|
|||||||
<body>
|
<body>
|
||||||
<h2>Authorize Access</h2>
|
<h2>Authorize Access</h2>
|
||||||
<p><strong>%s</strong> is requesting access to this AMCS server.</p>
|
<p><strong>%s</strong> is requesting access to this AMCS server.</p>
|
||||||
<form method=POST action=/oauth/authorize>
|
<form method=POST action=/api/oauth/authorize>
|
||||||
<input type=hidden name=client_id value="%s">
|
<input type=hidden name=client_id value="%s">
|
||||||
<input type=hidden name=redirect_uri value="%s">
|
<input type=hidden name=redirect_uri value="%s">
|
||||||
<input type=hidden name=state value="%s">
|
<input type=hidden name=state value="%s">
|
||||||
|
|||||||
172
internal/app/resolvespec_admin.go
Normal file
172
internal/app/resolvespec_admin.go
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/resolvespec"
|
||||||
|
"github.com/uptrace/bunrouter"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/store"
|
||||||
|
)
|
||||||
|
|
||||||
|
func registerResolveSpecAdminRoutes(mux *http.ServeMux, db *store.DB, middleware func(http.Handler) http.Handler, logger *slog.Logger) error {
|
||||||
|
rs := resolvespec.NewHandlerWithBun(db.Bun())
|
||||||
|
registerResolveSpecGuards(rs)
|
||||||
|
for _, model := range resolveSpecModels() {
|
||||||
|
if err := rs.RegisterModel(model.schema, model.entity, model.model); err != nil {
|
||||||
|
return fmt.Errorf("register resolvespec model %s.%s: %w", model.schema, model.entity, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rsRouter := bunrouter.New()
|
||||||
|
resolvespec.SetupBunRouterRoutes(rsRouter, rs, nil)
|
||||||
|
|
||||||
|
rsMount := http.StripPrefix("/api/rs", rsRouter)
|
||||||
|
protectedRSMount := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/" && strings.HasSuffix(r.URL.Path, "/") {
|
||||||
|
trimmed := strings.TrimRight(r.URL.Path, "/")
|
||||||
|
if trimmed == "" {
|
||||||
|
trimmed = "/"
|
||||||
|
}
|
||||||
|
clone := r.Clone(r.Context())
|
||||||
|
clone.URL.Path = trimmed
|
||||||
|
if clone.URL.RawPath != "" {
|
||||||
|
clone.URL.RawPath = strings.TrimRight(clone.URL.RawPath, "/")
|
||||||
|
if clone.URL.RawPath == "" {
|
||||||
|
clone.URL.RawPath = "/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r = clone
|
||||||
|
}
|
||||||
|
if r.Method == http.MethodOptions {
|
||||||
|
rsMount.ServeHTTP(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
middleware(rsMount).ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
|
||||||
|
mux.Handle("/api/rs/", protectedRSMount)
|
||||||
|
mux.Handle("/api/rs", http.RedirectHandler("/api/rs/openapi", http.StatusTemporaryRedirect))
|
||||||
|
|
||||||
|
if logger != nil {
|
||||||
|
logger.Info("resolvespec admin api enabled",
|
||||||
|
slog.String("prefix", "/api/rs"),
|
||||||
|
slog.Int("models", len(resolveSpecModels())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func registerResolveSpecGuards(rs *resolvespec.Handler) {
|
||||||
|
mutableByEntity := map[string]map[string]struct{}{
|
||||||
|
"projects": {
|
||||||
|
"create": {},
|
||||||
|
"update": {},
|
||||||
|
"delete": {},
|
||||||
|
},
|
||||||
|
"thoughts": {
|
||||||
|
"create": {},
|
||||||
|
"update": {},
|
||||||
|
"delete": {},
|
||||||
|
},
|
||||||
|
"plans": {
|
||||||
|
"create": {},
|
||||||
|
"update": {},
|
||||||
|
"delete": {},
|
||||||
|
},
|
||||||
|
"learnings": {
|
||||||
|
"create": {},
|
||||||
|
"update": {},
|
||||||
|
"delete": {},
|
||||||
|
},
|
||||||
|
"agent_skills": {
|
||||||
|
"create": {},
|
||||||
|
"update": {},
|
||||||
|
"delete": {},
|
||||||
|
},
|
||||||
|
"agent_guardrails": {
|
||||||
|
"create": {},
|
||||||
|
"update": {},
|
||||||
|
"delete": {},
|
||||||
|
},
|
||||||
|
"stored_files": {
|
||||||
|
"update": {},
|
||||||
|
"delete": {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
rs.Hooks().Register(resolvespec.BeforeHandle, func(hookCtx *resolvespec.HookContext) error {
|
||||||
|
switch hookCtx.Operation {
|
||||||
|
case "read", "meta":
|
||||||
|
return nil
|
||||||
|
case "create", "update", "delete":
|
||||||
|
allowedOps, ok := mutableByEntity[hookCtx.Entity]
|
||||||
|
if !ok {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortCode = http.StatusForbidden
|
||||||
|
hookCtx.AbortMessage = fmt.Sprintf("operation %q is not allowed for %s.%s", hookCtx.Operation, hookCtx.Schema, hookCtx.Entity)
|
||||||
|
return fmt.Errorf("forbidden operation")
|
||||||
|
}
|
||||||
|
if _, ok := allowedOps[hookCtx.Operation]; !ok {
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortCode = http.StatusForbidden
|
||||||
|
hookCtx.AbortMessage = fmt.Sprintf("operation %q is not allowed for %s.%s", hookCtx.Operation, hookCtx.Schema, hookCtx.Entity)
|
||||||
|
return fmt.Errorf("forbidden operation")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
hookCtx.Abort = true
|
||||||
|
hookCtx.AbortCode = http.StatusBadRequest
|
||||||
|
hookCtx.AbortMessage = fmt.Sprintf("unsupported operation %q", hookCtx.Operation)
|
||||||
|
return fmt.Errorf("unsupported operation")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type resolveSpecModel struct {
|
||||||
|
schema string
|
||||||
|
entity string
|
||||||
|
model any
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveSpecModels() []resolveSpecModel {
|
||||||
|
//This must be generated with relspec to include all models
|
||||||
|
//Use the relspec command with template generation. It supprot templ.
|
||||||
|
return []resolveSpecModel{
|
||||||
|
// {schema: "public", entity: "activities", model: generatedmodels.ModelPublicActivities{}},
|
||||||
|
{schema: "public", entity: "agent_guardrails", model: generatedmodels.ModelPublicAgentGuardrails{}},
|
||||||
|
{schema: "public", entity: "agent_skills", model: generatedmodels.ModelPublicAgentSkills{}},
|
||||||
|
{schema: "public", entity: "chat_histories", model: generatedmodels.ModelPublicChatHistories{}},
|
||||||
|
// {schema: "public", entity: "contact_interactions", model: generatedmodels.ModelPublicContactInteractions{}},
|
||||||
|
{schema: "public", entity: "embeddings", model: generatedmodels.ModelPublicEmbeddings{}},
|
||||||
|
// {schema: "public", entity: "family_members", model: generatedmodels.ModelPublicFamilyMembers{}},
|
||||||
|
// {schema: "public", entity: "household_items", model: generatedmodels.ModelPublicHouseholdItems{}},
|
||||||
|
// {schema: "public", entity: "household_vendors", model: generatedmodels.ModelPublicHouseholdVendors{}},
|
||||||
|
// {schema: "public", entity: "important_dates", model: generatedmodels.ModelPublicImportantDates{}},
|
||||||
|
{schema: "public", entity: "learnings", model: generatedmodels.ModelPublicLearnings{}},
|
||||||
|
// {schema: "public", entity: "maintenance_logs", model: generatedmodels.ModelPublicMaintenanceLogs{}},
|
||||||
|
// {schema: "public", entity: "maintenance_tasks", model: generatedmodels.ModelPublicMaintenanceTasks{}},
|
||||||
|
// {schema: "public", entity: "meal_plans", model: generatedmodels.ModelPublicMealPlans{}},
|
||||||
|
// {schema: "public", entity: "opportunities", model: generatedmodels.ModelPublicOpportunities{}},
|
||||||
|
{schema: "public", entity: "plan_dependencies", model: generatedmodels.ModelPublicPlanDependencies{}},
|
||||||
|
{schema: "public", entity: "plan_guardrails", model: generatedmodels.ModelPublicPlanGuardrails{}},
|
||||||
|
{schema: "public", entity: "plan_related_plans", model: generatedmodels.ModelPublicPlanRelatedPlans{}},
|
||||||
|
{schema: "public", entity: "plan_skills", model: generatedmodels.ModelPublicPlanSkills{}},
|
||||||
|
{schema: "public", entity: "plans", model: generatedmodels.ModelPublicPlans{}},
|
||||||
|
// {schema: "public", entity: "professional_contacts", model: generatedmodels.ModelPublicProfessionalContacts{}},
|
||||||
|
{schema: "public", entity: "project_guardrails", model: generatedmodels.ModelPublicProjectGuardrails{}},
|
||||||
|
{schema: "public", entity: "project_skills", model: generatedmodels.ModelPublicProjectSkills{}},
|
||||||
|
{schema: "public", entity: "projects", model: generatedmodels.ModelPublicProjects{}},
|
||||||
|
// {schema: "public", entity: "recipes", model: generatedmodels.ModelPublicRecipes{}},
|
||||||
|
// {schema: "public", entity: "shopping_lists", model: generatedmodels.ModelPublicShoppingLists{}},
|
||||||
|
{schema: "public", entity: "stored_files", model: generatedmodels.ModelPublicStoredFiles{}},
|
||||||
|
{schema: "public", entity: "thought_links", model: generatedmodels.ModelPublicThoughtLinks{}},
|
||||||
|
{schema: "public", entity: "thoughts", model: generatedmodels.ModelPublicThoughts{}},
|
||||||
|
{schema: "public", entity: "tool_annotations", model: generatedmodels.ModelPublicToolAnnotations{}},
|
||||||
|
}
|
||||||
|
}
|
||||||
173
internal/app/resolvespec_admin_test.go
Normal file
173
internal/app/resolvespec_admin_test.go
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/auth"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
|
||||||
|
"github.com/bitechdev/ResolveSpec/pkg/resolvespec"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestResolveSpecAuthRequiresValidCredentials(t *testing.T) {
|
||||||
|
keyring, err := auth.NewKeyring([]config.APIKey{{ID: "operator", Value: "secret"}})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewKeyring() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger := slog.New(slog.NewTextHandler(io.Discard, nil))
|
||||||
|
protected := auth.Middleware(config.AuthConfig{}, keyring, nil, nil, nil, logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/api/rs/public/projects" {
|
||||||
|
t.Fatalf("path = %q, want /api/rs/public/projects", r.URL.Path)
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
t.Run("missing credentials are rejected", func(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/rs/public/projects", strings.NewReader(`{"operation":"read"}`))
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
protected.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusUnauthorized {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusUnauthorized)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("valid API key is accepted", func(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/rs/public/projects", strings.NewReader(`{"operation":"read"}`))
|
||||||
|
req.Header.Set("x-brain-key", "secret")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
protected.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolveSpecGuardAllowsSupportedMutations(t *testing.T) {
|
||||||
|
rs := resolvespec.NewHandler(nil, nil)
|
||||||
|
registerResolveSpecGuards(rs)
|
||||||
|
|
||||||
|
cases := []struct {
|
||||||
|
name string
|
||||||
|
entity string
|
||||||
|
operation string
|
||||||
|
}{
|
||||||
|
{name: "learnings read", entity: "learnings", operation: "read"},
|
||||||
|
{name: "projects create", entity: "projects", operation: "create"},
|
||||||
|
{name: "projects update", entity: "projects", operation: "update"},
|
||||||
|
{name: "projects delete", entity: "projects", operation: "delete"},
|
||||||
|
{name: "plans create", entity: "plans", operation: "create"},
|
||||||
|
{name: "plans update", entity: "plans", operation: "update"},
|
||||||
|
{name: "plans delete", entity: "plans", operation: "delete"},
|
||||||
|
{name: "learnings create", entity: "learnings", operation: "create"},
|
||||||
|
{name: "learnings update", entity: "learnings", operation: "update"},
|
||||||
|
{name: "learnings delete", entity: "learnings", operation: "delete"},
|
||||||
|
{name: "thoughts create", entity: "thoughts", operation: "create"},
|
||||||
|
{name: "thoughts update", entity: "thoughts", operation: "update"},
|
||||||
|
{name: "thoughts delete", entity: "thoughts", operation: "delete"},
|
||||||
|
{name: "agent_skills create", entity: "agent_skills", operation: "create"},
|
||||||
|
{name: "agent_skills update", entity: "agent_skills", operation: "update"},
|
||||||
|
{name: "agent_skills delete", entity: "agent_skills", operation: "delete"},
|
||||||
|
{name: "agent_guardrails create", entity: "agent_guardrails", operation: "create"},
|
||||||
|
{name: "agent_guardrails update", entity: "agent_guardrails", operation: "update"},
|
||||||
|
{name: "agent_guardrails delete", entity: "agent_guardrails", operation: "delete"},
|
||||||
|
{name: "stored_files update", entity: "stored_files", operation: "update"},
|
||||||
|
{name: "stored_files delete", entity: "stored_files", operation: "delete"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range cases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
hookCtx := &resolvespec.HookContext{
|
||||||
|
Schema: "public",
|
||||||
|
Entity: tc.entity,
|
||||||
|
Operation: tc.operation,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := rs.Hooks().Execute(resolvespec.BeforeHandle, hookCtx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Execute() error = %v, want nil", err)
|
||||||
|
}
|
||||||
|
if hookCtx.Abort {
|
||||||
|
t.Fatalf("Abort = true, want false (code=%d message=%q)", hookCtx.AbortCode, hookCtx.AbortMessage)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolveSpecGuardBlocksUnsupportedMutations(t *testing.T) {
|
||||||
|
rs := resolvespec.NewHandler(nil, nil)
|
||||||
|
registerResolveSpecGuards(rs)
|
||||||
|
|
||||||
|
cases := []struct {
|
||||||
|
name string
|
||||||
|
entity string
|
||||||
|
operation string
|
||||||
|
wantCode int
|
||||||
|
wantMessageIn string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "mutations blocked for non-allowlisted operation",
|
||||||
|
entity: "stored_files",
|
||||||
|
operation: "create",
|
||||||
|
wantCode: http.StatusForbidden,
|
||||||
|
wantMessageIn: `operation "create" is not allowed for public.stored_files`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "mutations blocked for non-allowlisted entity",
|
||||||
|
entity: "maintenance_logs",
|
||||||
|
operation: "delete",
|
||||||
|
wantCode: http.StatusForbidden,
|
||||||
|
wantMessageIn: `operation "delete" is not allowed for public.maintenance_logs`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "unknown operation is rejected",
|
||||||
|
entity: "projects",
|
||||||
|
operation: "scan",
|
||||||
|
wantCode: http.StatusBadRequest,
|
||||||
|
wantMessageIn: `unsupported operation "scan"`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range cases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
hookCtx := &resolvespec.HookContext{
|
||||||
|
Schema: "public",
|
||||||
|
Entity: tc.entity,
|
||||||
|
Operation: tc.operation,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := rs.Hooks().Execute(resolvespec.BeforeHandle, hookCtx)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Execute() error = nil, want non-nil")
|
||||||
|
}
|
||||||
|
if !hookCtx.Abort {
|
||||||
|
t.Fatal("Abort = false, want true")
|
||||||
|
}
|
||||||
|
if hookCtx.AbortCode != tc.wantCode {
|
||||||
|
t.Fatalf("AbortCode = %d, want %d", hookCtx.AbortCode, tc.wantCode)
|
||||||
|
}
|
||||||
|
if !strings.Contains(hookCtx.AbortMessage, tc.wantMessageIn) {
|
||||||
|
t.Fatalf("AbortMessage = %q, want substring %q", hookCtx.AbortMessage, tc.wantMessageIn)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolveSpecModelsIncludeLearnings(t *testing.T) {
|
||||||
|
models := resolveSpecModels()
|
||||||
|
for _, model := range models {
|
||||||
|
if model.schema == "public" && model.entity == "learnings" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.Fatal("resolveSpecModels() missing public.learnings")
|
||||||
|
}
|
||||||
@@ -25,11 +25,28 @@ type statusAPIResponse struct {
|
|||||||
TotalKnown int `json:"total_known"`
|
TotalKnown int `json:"total_known"`
|
||||||
ConnectedWindow string `json:"connected_window"`
|
ConnectedWindow string `json:"connected_window"`
|
||||||
Entries []auth.AccessSnapshot `json:"entries"`
|
Entries []auth.AccessSnapshot `json:"entries"`
|
||||||
|
Metrics auth.AccessMetrics `json:"metrics"`
|
||||||
OAuthEnabled bool `json:"oauth_enabled"`
|
OAuthEnabled bool `json:"oauth_enabled"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type publicClientStatus struct {
|
||||||
|
KeyID string `json:"key_id"`
|
||||||
|
RequestCount int `json:"request_count"`
|
||||||
|
LastAccessedAt time.Time `json:"last_accessed_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type publicStatusResponse struct {
|
||||||
|
ConnectedCount int `json:"connected_count"`
|
||||||
|
ConnectedWindow string `json:"connected_window"`
|
||||||
|
Entries []publicClientStatus `json:"entries"`
|
||||||
|
}
|
||||||
|
|
||||||
func statusSnapshot(info buildinfo.Info, tracker *auth.AccessTracker, oauthEnabled bool, now time.Time) statusAPIResponse {
|
func statusSnapshot(info buildinfo.Info, tracker *auth.AccessTracker, oauthEnabled bool, now time.Time) statusAPIResponse {
|
||||||
entries := tracker.Snapshot()
|
entries := tracker.Snapshot()
|
||||||
|
metrics := tracker.Metrics(20)
|
||||||
|
metrics.TopIPs = nil
|
||||||
|
metrics.TopAgents = nil
|
||||||
|
metrics.TopTools = nil
|
||||||
return statusAPIResponse{
|
return statusAPIResponse{
|
||||||
Title: "Avelon Memory Crystal Server (AMCS)",
|
Title: "Avelon Memory Crystal Server (AMCS)",
|
||||||
Description: "AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools.",
|
Description: "AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools.",
|
||||||
@@ -39,7 +56,8 @@ func statusSnapshot(info buildinfo.Info, tracker *auth.AccessTracker, oauthEnabl
|
|||||||
ConnectedCount: tracker.ConnectedCount(now, connectedWindow),
|
ConnectedCount: tracker.ConnectedCount(now, connectedWindow),
|
||||||
TotalKnown: len(entries),
|
TotalKnown: len(entries),
|
||||||
ConnectedWindow: "last 10 minutes",
|
ConnectedWindow: "last 10 minutes",
|
||||||
Entries: entries,
|
Entries: nil,
|
||||||
|
Metrics: metrics,
|
||||||
OAuthEnabled: oauthEnabled,
|
OAuthEnabled: oauthEnabled,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -73,6 +91,47 @@ func statusAPIHandler(info buildinfo.Info, tracker *auth.AccessTracker, oauthEna
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func publicStatusHandler(tracker *auth.AccessTracker) http.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/status" {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if r.Method != http.MethodGet && r.Method != http.MethodHead {
|
||||||
|
w.Header().Set("Allow", "GET, HEAD")
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
now := time.Now()
|
||||||
|
cutoff := now.UTC().Add(-connectedWindow)
|
||||||
|
snapshot := tracker.Snapshot()
|
||||||
|
entries := make([]publicClientStatus, 0, len(snapshot))
|
||||||
|
for _, item := range snapshot {
|
||||||
|
if item.LastAccessedAt.Before(cutoff) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
entries = append(entries, publicClientStatus{
|
||||||
|
KeyID: item.KeyID,
|
||||||
|
RequestCount: item.RequestCount,
|
||||||
|
LastAccessedAt: item.LastAccessedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
if r.Method == http.MethodHead {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = json.NewEncoder(w).Encode(publicStatusResponse{
|
||||||
|
ConnectedCount: len(entries),
|
||||||
|
ConnectedWindow: "last 10 minutes",
|
||||||
|
Entries: entries,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func homeHandler(_ buildinfo.Info, _ *auth.AccessTracker, _ bool) http.HandlerFunc {
|
func homeHandler(_ buildinfo.Info, _ *auth.AccessTracker, _ bool) http.HandlerFunc {
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
if r.Method != http.MethodGet && r.Method != http.MethodHead {
|
if r.Method != http.MethodGet && r.Method != http.MethodHead {
|
||||||
@@ -90,8 +149,6 @@ func homeHandler(_ buildinfo.Info, _ *auth.AccessTracker, _ bool) http.HandlerFu
|
|||||||
if serveUIAsset(w, r, requestPath) {
|
if serveUIAsset(w, r, requestPath) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
http.NotFound(w, r)
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
serveUIIndex(w, r)
|
serveUIIndex(w, r)
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ func TestStatusSnapshotHidesOAuthLinkWhenDisabled(t *testing.T) {
|
|||||||
func TestStatusSnapshotShowsTrackedAccess(t *testing.T) {
|
func TestStatusSnapshotShowsTrackedAccess(t *testing.T) {
|
||||||
tracker := auth.NewAccessTracker()
|
tracker := auth.NewAccessTracker()
|
||||||
now := time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC)
|
now := time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC)
|
||||||
tracker.Record("client-a", "/files", "127.0.0.1:1234", "tester", now)
|
tracker.Record("client-a", "/files", "127.0.0.1:1234", "tester", "list_projects", now)
|
||||||
|
|
||||||
snapshot := statusSnapshot(buildinfo.Info{Version: "v1.2.3"}, tracker, true, now)
|
snapshot := statusSnapshot(buildinfo.Info{Version: "v1.2.3"}, tracker, true, now)
|
||||||
|
|
||||||
@@ -43,11 +43,23 @@ func TestStatusSnapshotShowsTrackedAccess(t *testing.T) {
|
|||||||
if snapshot.ConnectedCount != 1 {
|
if snapshot.ConnectedCount != 1 {
|
||||||
t.Fatalf("ConnectedCount = %d, want 1", snapshot.ConnectedCount)
|
t.Fatalf("ConnectedCount = %d, want 1", snapshot.ConnectedCount)
|
||||||
}
|
}
|
||||||
if len(snapshot.Entries) != 1 {
|
if len(snapshot.Entries) != 0 {
|
||||||
t.Fatalf("len(Entries) = %d, want 1", len(snapshot.Entries))
|
t.Fatalf("len(Entries) = %d, want 0 for counts-only status", len(snapshot.Entries))
|
||||||
}
|
}
|
||||||
if snapshot.Entries[0].KeyID != "client-a" || snapshot.Entries[0].LastPath != "/files" {
|
if snapshot.Metrics.TotalRequests != 1 {
|
||||||
t.Fatalf("entry = %+v, want keyID client-a and path /files", snapshot.Entries[0])
|
t.Fatalf("Metrics.TotalRequests = %d, want 1", snapshot.Metrics.TotalRequests)
|
||||||
|
}
|
||||||
|
if snapshot.Metrics.UniqueIPs != 1 {
|
||||||
|
t.Fatalf("Metrics.UniqueIPs = %d, want 1", snapshot.Metrics.UniqueIPs)
|
||||||
|
}
|
||||||
|
if snapshot.Metrics.UniqueAgents != 1 {
|
||||||
|
t.Fatalf("Metrics.UniqueAgents = %d, want 1", snapshot.Metrics.UniqueAgents)
|
||||||
|
}
|
||||||
|
if snapshot.Metrics.UniqueTools != 1 {
|
||||||
|
t.Fatalf("Metrics.UniqueTools = %d, want 1", snapshot.Metrics.UniqueTools)
|
||||||
|
}
|
||||||
|
if len(snapshot.Metrics.TopIPs) != 0 || len(snapshot.Metrics.TopAgents) != 0 || len(snapshot.Metrics.TopTools) != 0 {
|
||||||
|
t.Fatalf("Top breakdowns should be hidden in counts-only status: %+v", snapshot.Metrics)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,6 +86,52 @@ func TestStatusAPIHandlerReturnsJSON(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestStatusAPIHandlerRejectsStatusPath(t *testing.T) {
|
||||||
|
handler := statusAPIHandler(buildinfo.Info{Version: "v1"}, auth.NewAccessTracker(), true)
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/status", nil)
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNotFound {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNotFound)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPublicStatusHandlerReturnsConnectedClientsOnly(t *testing.T) {
|
||||||
|
tracker := auth.NewAccessTracker()
|
||||||
|
now := time.Now().UTC()
|
||||||
|
tracker.Record("recent-client", "/mcp", "127.0.0.1:1234", "tester", "list_projects", now.Add(-2*time.Minute))
|
||||||
|
tracker.Record("stale-client", "/mcp", "127.0.0.1:9999", "tester", "list_projects", now.Add(-30*time.Minute))
|
||||||
|
|
||||||
|
handler := publicStatusHandler(tracker)
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/status", nil)
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusOK {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusOK)
|
||||||
|
}
|
||||||
|
|
||||||
|
var payload publicStatusResponse
|
||||||
|
if err := json.Unmarshal(rec.Body.Bytes(), &payload); err != nil {
|
||||||
|
t.Fatalf("json.Unmarshal() error = %v", err)
|
||||||
|
}
|
||||||
|
if payload.ConnectedCount != 1 {
|
||||||
|
t.Fatalf("ConnectedCount = %d, want 1", payload.ConnectedCount)
|
||||||
|
}
|
||||||
|
if len(payload.Entries) != 1 {
|
||||||
|
t.Fatalf("len(Entries) = %d, want 1", len(payload.Entries))
|
||||||
|
}
|
||||||
|
if payload.Entries[0].KeyID != "recent-client" {
|
||||||
|
t.Fatalf("Entries[0].KeyID = %q, want %q", payload.Entries[0].KeyID, "recent-client")
|
||||||
|
}
|
||||||
|
if payload.Entries[0].LastAccessedAt.Before(now.Add(-11 * time.Minute)) {
|
||||||
|
t.Fatalf("Entries[0].LastAccessedAt = %v, expected recent timestamp", payload.Entries[0].LastAccessedAt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestHomeHandlerAllowsHead(t *testing.T) {
|
func TestHomeHandlerAllowsHead(t *testing.T) {
|
||||||
handler := homeHandler(buildinfo.Info{Version: "v1"}, auth.NewAccessTracker(), false)
|
handler := homeHandler(buildinfo.Info{Version: "v1"}, auth.NewAccessTracker(), false)
|
||||||
req := httptest.NewRequest(http.MethodHead, "/", nil)
|
req := httptest.NewRequest(http.MethodHead, "/", nil)
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
package auth
|
package auth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net"
|
||||||
"sort"
|
"sort"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@@ -16,15 +18,24 @@ type AccessSnapshot struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type AccessTracker struct {
|
type AccessTracker struct {
|
||||||
mu sync.RWMutex
|
mu sync.RWMutex
|
||||||
entries map[string]AccessSnapshot
|
entries map[string]AccessSnapshot
|
||||||
|
ipCounts map[string]int
|
||||||
|
agentCounts map[string]int
|
||||||
|
toolCounts map[string]int
|
||||||
|
totalRequests int
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewAccessTracker() *AccessTracker {
|
func NewAccessTracker() *AccessTracker {
|
||||||
return &AccessTracker{entries: make(map[string]AccessSnapshot)}
|
return &AccessTracker{
|
||||||
|
entries: make(map[string]AccessSnapshot),
|
||||||
|
ipCounts: make(map[string]int),
|
||||||
|
agentCounts: make(map[string]int),
|
||||||
|
toolCounts: make(map[string]int),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *AccessTracker) Record(keyID, path, remoteAddr, userAgent string, now time.Time) {
|
func (t *AccessTracker) Record(keyID, path, remoteAddr, userAgent, toolName string, now time.Time) {
|
||||||
if t == nil || keyID == "" {
|
if t == nil || keyID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -32,14 +43,39 @@ func (t *AccessTracker) Record(keyID, path, remoteAddr, userAgent string, now ti
|
|||||||
t.mu.Lock()
|
t.mu.Lock()
|
||||||
defer t.mu.Unlock()
|
defer t.mu.Unlock()
|
||||||
|
|
||||||
|
normalizedRemoteAddr := normalizeRemoteAddr(remoteAddr)
|
||||||
|
|
||||||
entry := t.entries[keyID]
|
entry := t.entries[keyID]
|
||||||
entry.KeyID = keyID
|
entry.KeyID = keyID
|
||||||
entry.LastPath = path
|
entry.LastPath = path
|
||||||
entry.RemoteAddr = remoteAddr
|
entry.RemoteAddr = normalizedRemoteAddr
|
||||||
entry.UserAgent = userAgent
|
entry.UserAgent = userAgent
|
||||||
entry.LastAccessedAt = now.UTC()
|
entry.LastAccessedAt = now.UTC()
|
||||||
entry.RequestCount++
|
entry.RequestCount++
|
||||||
t.entries[keyID] = entry
|
t.entries[keyID] = entry
|
||||||
|
t.totalRequests++
|
||||||
|
|
||||||
|
if normalizedRemoteAddr != "" {
|
||||||
|
t.ipCounts[normalizedRemoteAddr]++
|
||||||
|
}
|
||||||
|
if userAgent != "" {
|
||||||
|
t.agentCounts[userAgent]++
|
||||||
|
}
|
||||||
|
if tool := strings.TrimSpace(toolName); tool != "" {
|
||||||
|
t.toolCounts[tool]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeRemoteAddr(value string) string {
|
||||||
|
trimmed := strings.TrimSpace(value)
|
||||||
|
if trimmed == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
host, _, err := net.SplitHostPort(trimmed)
|
||||||
|
if err == nil {
|
||||||
|
return host
|
||||||
|
}
|
||||||
|
return trimmed
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *AccessTracker) Snapshot() []AccessSnapshot {
|
func (t *AccessTracker) Snapshot() []AccessSnapshot {
|
||||||
@@ -79,3 +115,59 @@ func (t *AccessTracker) ConnectedCount(now time.Time, window time.Duration) int
|
|||||||
}
|
}
|
||||||
return count
|
return count
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type RequestAggregate struct {
|
||||||
|
Key string `json:"key"`
|
||||||
|
RequestCount int `json:"request_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AccessMetrics struct {
|
||||||
|
TotalRequests int `json:"total_requests"`
|
||||||
|
UniquePrincipals int `json:"unique_principals"`
|
||||||
|
UniqueIPs int `json:"unique_ips"`
|
||||||
|
UniqueAgents int `json:"unique_agents"`
|
||||||
|
UniqueTools int `json:"unique_tools"`
|
||||||
|
TopIPs []RequestAggregate `json:"top_ips"`
|
||||||
|
TopAgents []RequestAggregate `json:"top_agents"`
|
||||||
|
TopTools []RequestAggregate `json:"top_tools"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *AccessTracker) Metrics(topN int) AccessMetrics {
|
||||||
|
if t == nil {
|
||||||
|
return AccessMetrics{}
|
||||||
|
}
|
||||||
|
if topN <= 0 {
|
||||||
|
topN = 10
|
||||||
|
}
|
||||||
|
|
||||||
|
t.mu.RLock()
|
||||||
|
defer t.mu.RUnlock()
|
||||||
|
|
||||||
|
return AccessMetrics{
|
||||||
|
TotalRequests: t.totalRequests,
|
||||||
|
UniquePrincipals: len(t.entries),
|
||||||
|
UniqueIPs: len(t.ipCounts),
|
||||||
|
UniqueAgents: len(t.agentCounts),
|
||||||
|
UniqueTools: len(t.toolCounts),
|
||||||
|
TopIPs: topAggregates(t.ipCounts, topN),
|
||||||
|
TopAgents: topAggregates(t.agentCounts, topN),
|
||||||
|
TopTools: topAggregates(t.toolCounts, topN),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func topAggregates(items map[string]int, topN int) []RequestAggregate {
|
||||||
|
out := make([]RequestAggregate, 0, len(items))
|
||||||
|
for key, count := range items {
|
||||||
|
out = append(out, RequestAggregate{Key: key, RequestCount: count})
|
||||||
|
}
|
||||||
|
sort.Slice(out, func(i, j int) bool {
|
||||||
|
if out[i].RequestCount == out[j].RequestCount {
|
||||||
|
return out[i].Key < out[j].Key
|
||||||
|
}
|
||||||
|
return out[i].RequestCount > out[j].RequestCount
|
||||||
|
})
|
||||||
|
if len(out) > topN {
|
||||||
|
out = out[:topN]
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ func TestAccessTrackerRecordAndSnapshot(t *testing.T) {
|
|||||||
older := time.Date(2026, 4, 4, 10, 0, 0, 0, time.UTC)
|
older := time.Date(2026, 4, 4, 10, 0, 0, 0, time.UTC)
|
||||||
newer := older.Add(2 * time.Minute)
|
newer := older.Add(2 * time.Minute)
|
||||||
|
|
||||||
tracker.Record("client-a", "/files", "10.0.0.1:1234", "agent-a", older)
|
tracker.Record("client-a", "/files", "10.0.0.1:1234", "agent-a", "", older)
|
||||||
tracker.Record("client-b", "/mcp", "10.0.0.2:1234", "agent-b", newer)
|
tracker.Record("client-b", "/mcp", "10.0.0.2:1234", "agent-b", "list_projects", newer)
|
||||||
tracker.Record("client-a", "/files/1", "10.0.0.1:1234", "agent-a2", newer.Add(30*time.Second))
|
tracker.Record("client-a", "/files/1", "10.0.0.1:1234", "agent-a2", "", newer.Add(30*time.Second))
|
||||||
|
|
||||||
snap := tracker.Snapshot()
|
snap := tracker.Snapshot()
|
||||||
if len(snap) != 2 {
|
if len(snap) != 2 {
|
||||||
@@ -30,16 +30,67 @@ func TestAccessTrackerRecordAndSnapshot(t *testing.T) {
|
|||||||
if snap[0].UserAgent != "agent-a2" {
|
if snap[0].UserAgent != "agent-a2" {
|
||||||
t.Fatalf("snapshot[0].UserAgent = %q, want agent-a2", snap[0].UserAgent)
|
t.Fatalf("snapshot[0].UserAgent = %q, want agent-a2", snap[0].UserAgent)
|
||||||
}
|
}
|
||||||
|
if snap[0].RemoteAddr != "10.0.0.1" {
|
||||||
|
t.Fatalf("snapshot[0].RemoteAddr = %q, want 10.0.0.1", snap[0].RemoteAddr)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccessTrackerConnectedCount(t *testing.T) {
|
func TestAccessTrackerConnectedCount(t *testing.T) {
|
||||||
tracker := NewAccessTracker()
|
tracker := NewAccessTracker()
|
||||||
now := time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC)
|
now := time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC)
|
||||||
|
|
||||||
tracker.Record("recent", "/mcp", "", "", now.Add(-2*time.Minute))
|
tracker.Record("recent", "/mcp", "", "", "", now.Add(-2*time.Minute))
|
||||||
tracker.Record("stale", "/mcp", "", "", now.Add(-11*time.Minute))
|
tracker.Record("stale", "/mcp", "", "", "", now.Add(-11*time.Minute))
|
||||||
|
|
||||||
if got := tracker.ConnectedCount(now, 10*time.Minute); got != 1 {
|
if got := tracker.ConnectedCount(now, 10*time.Minute); got != 1 {
|
||||||
t.Fatalf("ConnectedCount() = %d, want 1", got)
|
t.Fatalf("ConnectedCount() = %d, want 1", got)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAccessTrackerMetrics(t *testing.T) {
|
||||||
|
tracker := NewAccessTracker()
|
||||||
|
now := time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC)
|
||||||
|
|
||||||
|
tracker.Record("client-a", "/mcp", "10.0.0.1:1234", "agent-a", "list_projects", now)
|
||||||
|
tracker.Record("client-a", "/mcp", "10.0.0.1:1234", "agent-a", "list_projects", now.Add(1*time.Second))
|
||||||
|
tracker.Record("client-b", "/files", "10.0.0.2:5678", "agent-b", "", now.Add(2*time.Second))
|
||||||
|
tracker.Record("client-c", "/files", "10.0.0.2:5678", "agent-b", "search_thoughts", now.Add(3*time.Second))
|
||||||
|
|
||||||
|
metrics := tracker.Metrics(5)
|
||||||
|
if metrics.TotalRequests != 4 {
|
||||||
|
t.Fatalf("TotalRequests = %d, want 4", metrics.TotalRequests)
|
||||||
|
}
|
||||||
|
if metrics.UniquePrincipals != 3 {
|
||||||
|
t.Fatalf("UniquePrincipals = %d, want 3", metrics.UniquePrincipals)
|
||||||
|
}
|
||||||
|
if metrics.UniqueIPs != 2 {
|
||||||
|
t.Fatalf("UniqueIPs = %d, want 2", metrics.UniqueIPs)
|
||||||
|
}
|
||||||
|
if metrics.UniqueAgents != 2 {
|
||||||
|
t.Fatalf("UniqueAgents = %d, want 2", metrics.UniqueAgents)
|
||||||
|
}
|
||||||
|
if metrics.UniqueTools != 2 {
|
||||||
|
t.Fatalf("UniqueTools = %d, want 2", metrics.UniqueTools)
|
||||||
|
}
|
||||||
|
if len(metrics.TopIPs) != 2 {
|
||||||
|
t.Fatalf("len(TopIPs) = %d, want 2", len(metrics.TopIPs))
|
||||||
|
}
|
||||||
|
if metrics.TopIPs[0].RequestCount != 2 || metrics.TopIPs[1].RequestCount != 2 {
|
||||||
|
t.Fatalf("TopIPs counts = %+v, want both counts to be 2", metrics.TopIPs)
|
||||||
|
}
|
||||||
|
if metrics.TopIPs[0].Key != "10.0.0.1" && metrics.TopIPs[0].Key != "10.0.0.2" {
|
||||||
|
t.Fatalf("TopIPs[0].Key = %q, want normalized IP", metrics.TopIPs[0].Key)
|
||||||
|
}
|
||||||
|
if len(metrics.TopAgents) != 2 {
|
||||||
|
t.Fatalf("len(TopAgents) = %d, want 2", len(metrics.TopAgents))
|
||||||
|
}
|
||||||
|
if metrics.TopAgents[0].RequestCount != 2 || metrics.TopAgents[1].RequestCount != 2 {
|
||||||
|
t.Fatalf("TopAgents counts = %+v, want both counts to be 2", metrics.TopAgents)
|
||||||
|
}
|
||||||
|
if len(metrics.TopTools) != 2 {
|
||||||
|
t.Fatalf("len(TopTools) = %d, want 2", len(metrics.TopTools))
|
||||||
|
}
|
||||||
|
if metrics.TopTools[0].Key != "list_projects" || metrics.TopTools[0].RequestCount != 2 {
|
||||||
|
t.Fatalf("TopTools[0] = %+v, want list_projects with count 2", metrics.TopTools[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package auth
|
package auth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
"net/http"
|
||||||
@@ -8,6 +10,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/observability"
|
||||||
)
|
)
|
||||||
|
|
||||||
func testLogger() *slog.Logger {
|
func testLogger() *slog.Logger {
|
||||||
@@ -157,3 +160,81 @@ func TestMiddlewareRejectsMissingOrInvalidKey(t *testing.T) {
|
|||||||
t.Fatalf("invalid key status = %d, want %d", rec.Code, http.StatusUnauthorized)
|
t.Fatalf("invalid key status = %d, want %d", rec.Code, http.StatusUnauthorized)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestMiddlewareRecordsForwardedRemoteAddr(t *testing.T) {
|
||||||
|
keyring, err := NewKeyring([]config.APIKey{{ID: "client-a", Value: "secret"}})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewKeyring() error = %v", err)
|
||||||
|
}
|
||||||
|
tracker := NewAccessTracker()
|
||||||
|
|
||||||
|
handler := Middleware(config.AuthConfig{HeaderName: "x-brain-key"}, keyring, nil, nil, tracker, testLogger())(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/mcp", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.5:2222"
|
||||||
|
req.Header.Set("x-brain-key", "secret")
|
||||||
|
req.Header.Set("X-Real-IP", "203.0.113.99")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
snap := tracker.Snapshot()
|
||||||
|
if len(snap) != 1 {
|
||||||
|
t.Fatalf("len(snapshot) = %d, want 1", len(snap))
|
||||||
|
}
|
||||||
|
if snap[0].RemoteAddr != "203.0.113.99" {
|
||||||
|
t.Fatalf("snapshot remote_addr = %q, want %q", snap[0].RemoteAddr, "203.0.113.99")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMiddlewareRecordsMCPToolUsage(t *testing.T) {
|
||||||
|
keyring, err := NewKeyring([]config.APIKey{{ID: "client-a", Value: "secret"}})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewKeyring() error = %v", err)
|
||||||
|
}
|
||||||
|
tracker := NewAccessTracker()
|
||||||
|
logger := testLogger()
|
||||||
|
|
||||||
|
authenticated := Middleware(config.AuthConfig{HeaderName: "x-brain-key"}, keyring, nil, nil, tracker, logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
handler := observability.AccessLog(logger)(authenticated)
|
||||||
|
|
||||||
|
payload := map[string]any{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "1",
|
||||||
|
"method": "tools/call",
|
||||||
|
"params": map[string]any{
|
||||||
|
"name": "list_projects",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("json.Marshal() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body))
|
||||||
|
req.Header.Set("x-brain-key", "secret")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
metrics := tracker.Metrics(10)
|
||||||
|
if metrics.UniqueTools != 1 {
|
||||||
|
t.Fatalf("UniqueTools = %d, want 1", metrics.UniqueTools)
|
||||||
|
}
|
||||||
|
if len(metrics.TopTools) != 1 {
|
||||||
|
t.Fatalf("len(TopTools) = %d, want 1", len(metrics.TopTools))
|
||||||
|
}
|
||||||
|
if metrics.TopTools[0].Key != "list_projects" || metrics.TopTools[0].RequestCount != 1 {
|
||||||
|
t.Fatalf("TopTools[0] = %+v, want list_projects with count 1", metrics.TopTools[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/observability"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/requestip"
|
||||||
)
|
)
|
||||||
|
|
||||||
type contextKey string
|
type contextKey string
|
||||||
@@ -22,17 +24,25 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
}
|
}
|
||||||
recordAccess := func(r *http.Request, keyID string) {
|
recordAccess := func(r *http.Request, keyID string) {
|
||||||
if tracker != nil {
|
if tracker != nil {
|
||||||
tracker.Record(keyID, r.URL.Path, r.RemoteAddr, r.UserAgent(), time.Now())
|
tracker.Record(
|
||||||
|
keyID,
|
||||||
|
r.URL.Path,
|
||||||
|
requestip.FromRequest(r),
|
||||||
|
r.UserAgent(),
|
||||||
|
observability.MCPToolFromContext(r.Context()),
|
||||||
|
time.Now(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
remoteAddr := requestip.FromRequest(r)
|
||||||
// 1. Custom header → keyring only.
|
// 1. Custom header → keyring only.
|
||||||
if keyring != nil {
|
if keyring != nil {
|
||||||
if token := strings.TrimSpace(r.Header.Get(headerName)); token != "" {
|
if token := strings.TrimSpace(r.Header.Get(headerName)); token != "" {
|
||||||
keyID, ok := keyring.Lookup(token)
|
keyID, ok := keyring.Lookup(token)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("authentication failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("authentication failed", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -58,7 +68,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.Warn("bearer token rejected", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("bearer token rejected", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid token or API key", http.StatusUnauthorized)
|
http.Error(w, "invalid token or API key", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -71,7 +81,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
}
|
}
|
||||||
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("oauth client authentication failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("oauth client authentication failed", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid OAuth client credentials", http.StatusUnauthorized)
|
http.Error(w, "invalid OAuth client credentials", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -85,7 +95,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
if token := strings.TrimSpace(r.URL.Query().Get(cfg.QueryParam)); token != "" {
|
if token := strings.TrimSpace(r.URL.Query().Get(cfg.QueryParam)); token != "" {
|
||||||
keyID, ok := keyring.Lookup(token)
|
keyID, ok := keyring.Lookup(token)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("authentication failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("authentication failed", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
|
Version int `yaml:"version"`
|
||||||
Server ServerConfig `yaml:"server"`
|
Server ServerConfig `yaml:"server"`
|
||||||
MCP MCPConfig `yaml:"mcp"`
|
MCP MCPConfig `yaml:"mcp"`
|
||||||
Auth AuthConfig `yaml:"auth"`
|
Auth AuthConfig `yaml:"auth"`
|
||||||
@@ -37,11 +38,8 @@ type MCPConfig struct {
|
|||||||
Version string `yaml:"version"`
|
Version string `yaml:"version"`
|
||||||
Transport string `yaml:"transport"`
|
Transport string `yaml:"transport"`
|
||||||
SessionTimeout time.Duration `yaml:"session_timeout"`
|
SessionTimeout time.Duration `yaml:"session_timeout"`
|
||||||
// PublicURL is the externally reachable base URL of this server (e.g. https://amcs.example.com).
|
PublicURL string `yaml:"public_url"`
|
||||||
// When set, it is used to build absolute icon URLs in the MCP server identity.
|
Instructions string `yaml:"-"`
|
||||||
PublicURL string `yaml:"public_url"`
|
|
||||||
// Instructions is set at startup from the embedded memory.md and sent to MCP clients on initialise.
|
|
||||||
Instructions string `yaml:"-"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type AuthConfig struct {
|
type AuthConfig struct {
|
||||||
@@ -77,52 +75,82 @@ type DatabaseConfig struct {
|
|||||||
MaxConnIdleTime time.Duration `yaml:"max_conn_idle_time"`
|
MaxConnIdleTime time.Duration `yaml:"max_conn_idle_time"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AIConfig (v2): named providers + per-role chains.
|
||||||
type AIConfig struct {
|
type AIConfig struct {
|
||||||
Provider string `yaml:"provider"`
|
Providers map[string]ProviderConfig `yaml:"providers"`
|
||||||
Embeddings AIEmbeddingConfig `yaml:"embeddings"`
|
Embeddings EmbeddingsRoleConfig `yaml:"embeddings"`
|
||||||
Metadata AIMetadataConfig `yaml:"metadata"`
|
Metadata MetadataRoleConfig `yaml:"metadata"`
|
||||||
LiteLLM LiteLLMConfig `yaml:"litellm"`
|
Background *BackgroundRolesConfig `yaml:"background,omitempty"`
|
||||||
Ollama OllamaConfig `yaml:"ollama"`
|
|
||||||
OpenRouter OpenRouterAIConfig `yaml:"openrouter"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type AIEmbeddingConfig struct {
|
type ProviderConfig struct {
|
||||||
Model string `yaml:"model"`
|
Type string `yaml:"type"`
|
||||||
Dimensions int `yaml:"dimensions"`
|
BaseURL string `yaml:"base_url"`
|
||||||
|
APIKey string `yaml:"api_key"`
|
||||||
|
RequestHeaders map[string]string `yaml:"request_headers,omitempty"`
|
||||||
|
AppName string `yaml:"app_name,omitempty"`
|
||||||
|
SiteURL string `yaml:"site_url,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type AIMetadataConfig struct {
|
type RoleTarget struct {
|
||||||
Model string `yaml:"model"`
|
Provider string `yaml:"provider"`
|
||||||
FallbackModels []string `yaml:"fallback_models"`
|
Model string `yaml:"model"`
|
||||||
FallbackModel string `yaml:"fallback_model"` // legacy single fallback
|
}
|
||||||
|
|
||||||
|
type RoleChain struct {
|
||||||
|
Primary RoleTarget `yaml:"primary"`
|
||||||
|
Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type EmbeddingsRoleConfig struct {
|
||||||
|
Dimensions int `yaml:"dimensions"`
|
||||||
|
Primary RoleTarget `yaml:"primary"`
|
||||||
|
Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetadataRoleConfig struct {
|
||||||
Temperature float64 `yaml:"temperature"`
|
Temperature float64 `yaml:"temperature"`
|
||||||
LogConversations bool `yaml:"log_conversations"`
|
LogConversations bool `yaml:"log_conversations"`
|
||||||
Timeout time.Duration `yaml:"timeout"`
|
Timeout time.Duration `yaml:"timeout"`
|
||||||
|
Primary RoleTarget `yaml:"primary"`
|
||||||
|
Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type LiteLLMConfig struct {
|
// BackgroundRolesConfig overrides the foreground chains for background workers
|
||||||
BaseURL string `yaml:"base_url"`
|
// (backfill_embeddings, metadata_retry, reparse_metadata). Either field may be
|
||||||
APIKey string `yaml:"api_key"`
|
// nil to inherit the foreground role unchanged.
|
||||||
UseResponsesAPI bool `yaml:"use_responses_api"`
|
type BackgroundRolesConfig struct {
|
||||||
RequestHeaders map[string]string `yaml:"request_headers"`
|
Embeddings *RoleChain `yaml:"embeddings,omitempty"`
|
||||||
EmbeddingModel string `yaml:"embedding_model"`
|
Metadata *RoleChain `yaml:"metadata,omitempty"`
|
||||||
MetadataModel string `yaml:"metadata_model"`
|
|
||||||
FallbackMetadataModels []string `yaml:"fallback_metadata_models"`
|
|
||||||
FallbackMetadataModel string `yaml:"fallback_metadata_model"` // legacy single fallback
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type OllamaConfig struct {
|
// Chain returns primary followed by fallbacks (deduped, blanks dropped).
|
||||||
BaseURL string `yaml:"base_url"`
|
func (e EmbeddingsRoleConfig) Chain() []RoleTarget {
|
||||||
APIKey string `yaml:"api_key"`
|
return dedupeTargets(append([]RoleTarget{e.Primary}, e.Fallbacks...))
|
||||||
RequestHeaders map[string]string `yaml:"request_headers"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type OpenRouterAIConfig struct {
|
func (m MetadataRoleConfig) Chain() []RoleTarget {
|
||||||
BaseURL string `yaml:"base_url"`
|
return dedupeTargets(append([]RoleTarget{m.Primary}, m.Fallbacks...))
|
||||||
APIKey string `yaml:"api_key"`
|
}
|
||||||
AppName string `yaml:"app_name"`
|
|
||||||
SiteURL string `yaml:"site_url"`
|
func (c RoleChain) AsTargets() []RoleTarget {
|
||||||
ExtraHeaders map[string]string `yaml:"extra_headers"`
|
return dedupeTargets(append([]RoleTarget{c.Primary}, c.Fallbacks...))
|
||||||
|
}
|
||||||
|
|
||||||
|
func dedupeTargets(in []RoleTarget) []RoleTarget {
|
||||||
|
out := make([]RoleTarget, 0, len(in))
|
||||||
|
seen := make(map[RoleTarget]struct{}, len(in))
|
||||||
|
for _, t := range in {
|
||||||
|
if t.Provider == "" || t.Model == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := seen[t]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[t] = struct{}{}
|
||||||
|
out = append(out, t)
|
||||||
|
}
|
||||||
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
type CaptureConfig struct {
|
type CaptureConfig struct {
|
||||||
@@ -167,45 +195,3 @@ type MetadataRetryConfig struct {
|
|||||||
MaxPerRun int `yaml:"max_per_run"`
|
MaxPerRun int `yaml:"max_per_run"`
|
||||||
IncludeArchived bool `yaml:"include_archived"`
|
IncludeArchived bool `yaml:"include_archived"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c AIMetadataConfig) EffectiveFallbackModels() []string {
|
|
||||||
models := make([]string, 0, len(c.FallbackModels)+1)
|
|
||||||
for _, model := range c.FallbackModels {
|
|
||||||
if model != "" {
|
|
||||||
models = append(models, model)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c.FallbackModel != "" {
|
|
||||||
models = append(models, c.FallbackModel)
|
|
||||||
}
|
|
||||||
return dedupeNonEmpty(models)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c LiteLLMConfig) EffectiveFallbackMetadataModels() []string {
|
|
||||||
models := make([]string, 0, len(c.FallbackMetadataModels)+1)
|
|
||||||
for _, model := range c.FallbackMetadataModels {
|
|
||||||
if model != "" {
|
|
||||||
models = append(models, model)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c.FallbackMetadataModel != "" {
|
|
||||||
models = append(models, c.FallbackMetadataModel)
|
|
||||||
}
|
|
||||||
return dedupeNonEmpty(models)
|
|
||||||
}
|
|
||||||
|
|
||||||
func dedupeNonEmpty(values []string) []string {
|
|
||||||
seen := make(map[string]struct{}, len(values))
|
|
||||||
out := make([]string, 0, len(values))
|
|
||||||
for _, value := range values {
|
|
||||||
if value == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := seen[value]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[value] = struct{}{}
|
|
||||||
out = append(out, value)
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package config
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -12,6 +13,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func Load(explicitPath string) (*Config, string, error) {
|
func Load(explicitPath string) (*Config, string, error) {
|
||||||
|
return LoadWithLogger(explicitPath, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadWithLogger is Load with a logger surface for migration notices. Passing
|
||||||
|
// nil is fine — migration events will simply not be logged.
|
||||||
|
func LoadWithLogger(explicitPath string, log *slog.Logger) (*Config, string, error) {
|
||||||
path := ResolvePath(explicitPath)
|
path := ResolvePath(explicitPath)
|
||||||
|
|
||||||
data, err := os.ReadFile(path)
|
data, err := os.ReadFile(path)
|
||||||
@@ -19,10 +26,38 @@ func Load(explicitPath string) (*Config, string, error) {
|
|||||||
return nil, path, fmt.Errorf("read config %q: %w", path, err)
|
return nil, path, fmt.Errorf("read config %q: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := defaultConfig()
|
raw := map[string]any{}
|
||||||
if err := yaml.Unmarshal(data, &cfg); err != nil {
|
if err := yaml.Unmarshal(data, &raw); err != nil {
|
||||||
return nil, path, fmt.Errorf("decode config %q: %w", path, err)
|
return nil, path, fmt.Errorf("decode config %q: %w", path, err)
|
||||||
}
|
}
|
||||||
|
if raw == nil {
|
||||||
|
raw = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
applied, err := Migrate(raw)
|
||||||
|
if err != nil {
|
||||||
|
return nil, path, fmt.Errorf("migrate config %q: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(applied) > 0 {
|
||||||
|
if log != nil {
|
||||||
|
for _, step := range applied {
|
||||||
|
log.Warn("config migrated in memory",
|
||||||
|
slog.String("path", path),
|
||||||
|
slog.Int("from_version", step.From),
|
||||||
|
slog.Int("to_version", step.To),
|
||||||
|
slog.String("describe", step.Describe),
|
||||||
|
slog.String("hint", "persist with amcs-migrate-config"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg, err := decodeTyped(raw)
|
||||||
|
if err != nil {
|
||||||
|
return nil, path, fmt.Errorf("decode migrated config %q: %w", path, err)
|
||||||
|
}
|
||||||
|
cfg.Version = CurrentConfigVersion
|
||||||
|
|
||||||
applyEnvOverrides(&cfg)
|
applyEnvOverrides(&cfg)
|
||||||
if err := cfg.Validate(); err != nil {
|
if err := cfg.Validate(); err != nil {
|
||||||
@@ -32,6 +67,18 @@ func Load(explicitPath string) (*Config, string, error) {
|
|||||||
return &cfg, path, nil
|
return &cfg, path, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func decodeTyped(raw map[string]any) (Config, error) {
|
||||||
|
out, err := yaml.Marshal(raw)
|
||||||
|
if err != nil {
|
||||||
|
return Config{}, fmt.Errorf("re-marshal migrated config: %w", err)
|
||||||
|
}
|
||||||
|
cfg := defaultConfig()
|
||||||
|
if err := yaml.Unmarshal(out, &cfg); err != nil {
|
||||||
|
return Config{}, err
|
||||||
|
}
|
||||||
|
return cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
func ResolvePath(explicitPath string) string {
|
func ResolvePath(explicitPath string) string {
|
||||||
if path := strings.TrimSpace(explicitPath); path != "" {
|
if path := strings.TrimSpace(explicitPath); path != "" {
|
||||||
if path != ".yaml" && path != ".yml" {
|
if path != ".yaml" && path != ".yml" {
|
||||||
@@ -49,6 +96,7 @@ func ResolvePath(explicitPath string) string {
|
|||||||
func defaultConfig() Config {
|
func defaultConfig() Config {
|
||||||
info := buildinfo.Current()
|
info := buildinfo.Current()
|
||||||
return Config{
|
return Config{
|
||||||
|
Version: CurrentConfigVersion,
|
||||||
Server: ServerConfig{
|
Server: ServerConfig{
|
||||||
Host: "0.0.0.0",
|
Host: "0.0.0.0",
|
||||||
Port: 8080,
|
Port: 8080,
|
||||||
@@ -69,20 +117,14 @@ func defaultConfig() Config {
|
|||||||
QueryParam: "key",
|
QueryParam: "key",
|
||||||
},
|
},
|
||||||
AI: AIConfig{
|
AI: AIConfig{
|
||||||
Provider: "litellm",
|
Providers: map[string]ProviderConfig{},
|
||||||
Embeddings: AIEmbeddingConfig{
|
Embeddings: EmbeddingsRoleConfig{
|
||||||
Model: "openai/text-embedding-3-small",
|
|
||||||
Dimensions: 1536,
|
Dimensions: 1536,
|
||||||
},
|
},
|
||||||
Metadata: AIMetadataConfig{
|
Metadata: MetadataRoleConfig{
|
||||||
Model: "gpt-4o-mini",
|
|
||||||
Temperature: 0.1,
|
Temperature: 0.1,
|
||||||
Timeout: 10 * time.Second,
|
Timeout: 10 * time.Second,
|
||||||
},
|
},
|
||||||
Ollama: OllamaConfig{
|
|
||||||
BaseURL: "http://localhost:11434/v1",
|
|
||||||
APIKey: "ollama",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
Capture: CaptureConfig{
|
Capture: CaptureConfig{
|
||||||
Source: DefaultSource,
|
Source: DefaultSource,
|
||||||
@@ -119,11 +161,12 @@ func defaultConfig() Config {
|
|||||||
func applyEnvOverrides(cfg *Config) {
|
func applyEnvOverrides(cfg *Config) {
|
||||||
overrideString(&cfg.Database.URL, "AMCS_DATABASE_URL")
|
overrideString(&cfg.Database.URL, "AMCS_DATABASE_URL")
|
||||||
overrideString(&cfg.MCP.PublicURL, "AMCS_PUBLIC_URL")
|
overrideString(&cfg.MCP.PublicURL, "AMCS_PUBLIC_URL")
|
||||||
overrideString(&cfg.AI.LiteLLM.BaseURL, "AMCS_LITELLM_BASE_URL")
|
|
||||||
overrideString(&cfg.AI.LiteLLM.APIKey, "AMCS_LITELLM_API_KEY")
|
overrideProviderField(cfg, "AMCS_LITELLM_BASE_URL", "litellm", func(p *ProviderConfig, v string) { p.BaseURL = v })
|
||||||
overrideString(&cfg.AI.Ollama.BaseURL, "AMCS_OLLAMA_BASE_URL")
|
overrideProviderField(cfg, "AMCS_LITELLM_API_KEY", "litellm", func(p *ProviderConfig, v string) { p.APIKey = v })
|
||||||
overrideString(&cfg.AI.Ollama.APIKey, "AMCS_OLLAMA_API_KEY")
|
overrideProviderField(cfg, "AMCS_OLLAMA_BASE_URL", "ollama", func(p *ProviderConfig, v string) { p.BaseURL = v })
|
||||||
overrideString(&cfg.AI.OpenRouter.APIKey, "AMCS_OPENROUTER_API_KEY")
|
overrideProviderField(cfg, "AMCS_OLLAMA_API_KEY", "ollama", func(p *ProviderConfig, v string) { p.APIKey = v })
|
||||||
|
overrideProviderField(cfg, "AMCS_OPENROUTER_API_KEY", "openrouter", func(p *ProviderConfig, v string) { p.APIKey = v })
|
||||||
|
|
||||||
if value, ok := os.LookupEnv("AMCS_SERVER_PORT"); ok {
|
if value, ok := os.LookupEnv("AMCS_SERVER_PORT"); ok {
|
||||||
if port, err := strconv.Atoi(strings.TrimSpace(value)); err == nil {
|
if port, err := strconv.Atoi(strings.TrimSpace(value)); err == nil {
|
||||||
@@ -132,6 +175,24 @@ func applyEnvOverrides(cfg *Config) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// overrideProviderField applies an env var to every configured provider of the
|
||||||
|
// given type. This preserves the v1 behaviour where e.g. AMCS_LITELLM_API_KEY
|
||||||
|
// rewrote the single litellm block — in v2 it rewrites every litellm provider.
|
||||||
|
func overrideProviderField(cfg *Config, envKey, providerType string, apply func(*ProviderConfig, string)) {
|
||||||
|
value, ok := os.LookupEnv(envKey)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
value = strings.TrimSpace(value)
|
||||||
|
for name, p := range cfg.AI.Providers {
|
||||||
|
if p.Type != providerType {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
apply(&p, value)
|
||||||
|
cfg.AI.Providers[name] = p
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func overrideString(target *string, envKey string) {
|
func overrideString(target *string, envKey string) {
|
||||||
if value, ok := os.LookupEnv(envKey); ok {
|
if value, ok := os.LookupEnv(envKey); ok {
|
||||||
*target = strings.TrimSpace(value)
|
*target = strings.TrimSpace(value)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package config
|
|||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@@ -31,9 +32,8 @@ func TestResolvePathIgnoresBareYAMLExtension(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadAppliesEnvOverrides(t *testing.T) {
|
const v2ConfigYAML = `
|
||||||
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
version: 2
|
||||||
if err := os.WriteFile(configPath, []byte(`
|
|
||||||
server:
|
server:
|
||||||
port: 8080
|
port: 8080
|
||||||
mcp:
|
mcp:
|
||||||
@@ -46,18 +46,30 @@ auth:
|
|||||||
database:
|
database:
|
||||||
url: "postgres://from-file"
|
url: "postgres://from-file"
|
||||||
ai:
|
ai:
|
||||||
provider: "litellm"
|
providers:
|
||||||
|
default:
|
||||||
|
type: "litellm"
|
||||||
|
base_url: "http://localhost:4000/v1"
|
||||||
|
api_key: "file-key"
|
||||||
embeddings:
|
embeddings:
|
||||||
dimensions: 1536
|
dimensions: 1536
|
||||||
litellm:
|
primary:
|
||||||
base_url: "http://localhost:4000/v1"
|
provider: "default"
|
||||||
api_key: "file-key"
|
model: "text-embed"
|
||||||
|
metadata:
|
||||||
|
primary:
|
||||||
|
provider: "default"
|
||||||
|
model: "gpt-4"
|
||||||
search:
|
search:
|
||||||
default_limit: 10
|
default_limit: 10
|
||||||
max_limit: 50
|
max_limit: 50
|
||||||
logging:
|
logging:
|
||||||
level: "info"
|
level: "info"
|
||||||
`), 0o600); err != nil {
|
`
|
||||||
|
|
||||||
|
func TestLoadAppliesEnvOverrides(t *testing.T) {
|
||||||
|
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
||||||
|
if err := os.WriteFile(configPath, []byte(v2ConfigYAML), 0o600); err != nil {
|
||||||
t.Fatalf("write config: %v", err)
|
t.Fatalf("write config: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -76,8 +88,8 @@ logging:
|
|||||||
if cfg.Database.URL != "postgres://from-env" {
|
if cfg.Database.URL != "postgres://from-env" {
|
||||||
t.Fatalf("database url = %q, want env override", cfg.Database.URL)
|
t.Fatalf("database url = %q, want env override", cfg.Database.URL)
|
||||||
}
|
}
|
||||||
if cfg.AI.LiteLLM.APIKey != "env-key" {
|
if cfg.AI.Providers["default"].APIKey != "env-key" {
|
||||||
t.Fatalf("litellm api key = %q, want env override", cfg.AI.LiteLLM.APIKey)
|
t.Fatalf("litellm api key = %q, want env override", cfg.AI.Providers["default"].APIKey)
|
||||||
}
|
}
|
||||||
if cfg.Server.Port != 9090 {
|
if cfg.Server.Port != 9090 {
|
||||||
t.Fatalf("server port = %d, want 9090", cfg.Server.Port)
|
t.Fatalf("server port = %d, want 9090", cfg.Server.Port)
|
||||||
@@ -90,10 +102,12 @@ logging:
|
|||||||
func TestLoadAppliesOllamaEnvOverrides(t *testing.T) {
|
func TestLoadAppliesOllamaEnvOverrides(t *testing.T) {
|
||||||
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
||||||
if err := os.WriteFile(configPath, []byte(`
|
if err := os.WriteFile(configPath, []byte(`
|
||||||
|
version: 2
|
||||||
server:
|
server:
|
||||||
port: 8080
|
port: 8080
|
||||||
mcp:
|
mcp:
|
||||||
path: "/mcp"
|
path: "/mcp"
|
||||||
|
session_timeout: "10m"
|
||||||
auth:
|
auth:
|
||||||
keys:
|
keys:
|
||||||
- id: "test"
|
- id: "test"
|
||||||
@@ -101,15 +115,20 @@ auth:
|
|||||||
database:
|
database:
|
||||||
url: "postgres://from-file"
|
url: "postgres://from-file"
|
||||||
ai:
|
ai:
|
||||||
provider: "ollama"
|
providers:
|
||||||
|
local:
|
||||||
|
type: "ollama"
|
||||||
|
base_url: "http://localhost:11434/v1"
|
||||||
|
api_key: "ollama"
|
||||||
embeddings:
|
embeddings:
|
||||||
model: "nomic-embed-text"
|
|
||||||
dimensions: 768
|
dimensions: 768
|
||||||
|
primary:
|
||||||
|
provider: "local"
|
||||||
|
model: "nomic-embed-text"
|
||||||
metadata:
|
metadata:
|
||||||
model: "llama3.2"
|
primary:
|
||||||
ollama:
|
provider: "local"
|
||||||
base_url: "http://localhost:11434/v1"
|
model: "llama3.2"
|
||||||
api_key: "ollama"
|
|
||||||
search:
|
search:
|
||||||
default_limit: 10
|
default_limit: 10
|
||||||
max_limit: 50
|
max_limit: 50
|
||||||
@@ -127,10 +146,85 @@ logging:
|
|||||||
t.Fatalf("Load() error = %v", err)
|
t.Fatalf("Load() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.AI.Ollama.BaseURL != "https://ollama.example.com/v1" {
|
p := cfg.AI.Providers["local"]
|
||||||
t.Fatalf("ollama base url = %q, want env override", cfg.AI.Ollama.BaseURL)
|
if p.BaseURL != "https://ollama.example.com/v1" {
|
||||||
|
t.Fatalf("ollama base url = %q, want env override", p.BaseURL)
|
||||||
}
|
}
|
||||||
if cfg.AI.Ollama.APIKey != "remote-key" {
|
if p.APIKey != "remote-key" {
|
||||||
t.Fatalf("ollama api key = %q, want env override", cfg.AI.Ollama.APIKey)
|
t.Fatalf("ollama api key = %q, want env override", p.APIKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadMigratesV1Config(t *testing.T) {
|
||||||
|
configPath := filepath.Join(t.TempDir(), "v1.yaml")
|
||||||
|
v1 := `
|
||||||
|
server:
|
||||||
|
port: 8080
|
||||||
|
mcp:
|
||||||
|
path: "/mcp"
|
||||||
|
session_timeout: "10m"
|
||||||
|
auth:
|
||||||
|
keys:
|
||||||
|
- id: "test"
|
||||||
|
value: "secret"
|
||||||
|
database:
|
||||||
|
url: "postgres://from-file"
|
||||||
|
ai:
|
||||||
|
provider: "litellm"
|
||||||
|
embeddings:
|
||||||
|
model: "text-embed"
|
||||||
|
dimensions: 1536
|
||||||
|
metadata:
|
||||||
|
model: "gpt-4"
|
||||||
|
temperature: 0.2
|
||||||
|
fallback_models: ["gpt-3.5"]
|
||||||
|
litellm:
|
||||||
|
base_url: "http://localhost:4000/v1"
|
||||||
|
api_key: "file-key"
|
||||||
|
search:
|
||||||
|
default_limit: 10
|
||||||
|
max_limit: 50
|
||||||
|
logging:
|
||||||
|
level: "info"
|
||||||
|
`
|
||||||
|
if err := os.WriteFile(configPath, []byte(v1), 0o600); err != nil {
|
||||||
|
t.Fatalf("write config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg, _, err := Load(configPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Load() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.Version != CurrentConfigVersion {
|
||||||
|
t.Fatalf("version = %d, want %d", cfg.Version, CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
if p, ok := cfg.AI.Providers["default"]; !ok || p.Type != "litellm" || p.APIKey != "file-key" {
|
||||||
|
t.Fatalf("providers[default] = %+v, want litellm/file-key", p)
|
||||||
|
}
|
||||||
|
if cfg.AI.Embeddings.Primary.Model != "text-embed" || cfg.AI.Embeddings.Primary.Provider != "default" {
|
||||||
|
t.Fatalf("embeddings.primary = %+v, want default/text-embed", cfg.AI.Embeddings.Primary)
|
||||||
|
}
|
||||||
|
if cfg.AI.Metadata.Primary.Model != "gpt-4" || cfg.AI.Metadata.Primary.Provider != "default" {
|
||||||
|
t.Fatalf("metadata.primary = %+v, want default/gpt-4", cfg.AI.Metadata.Primary)
|
||||||
|
}
|
||||||
|
if len(cfg.AI.Metadata.Fallbacks) != 1 || cfg.AI.Metadata.Fallbacks[0].Model != "gpt-3.5" {
|
||||||
|
t.Fatalf("metadata.fallbacks = %+v, want [default/gpt-3.5]", cfg.AI.Metadata.Fallbacks)
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := filepath.Glob(configPath + ".bak.*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("glob backups: %v", err)
|
||||||
|
}
|
||||||
|
if len(entries) != 0 {
|
||||||
|
t.Fatalf("backup files = %d, want 0 (load should not rewrite config)", len(entries))
|
||||||
|
}
|
||||||
|
|
||||||
|
originalOnDisk, err := os.ReadFile(configPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("read original config: %v", err)
|
||||||
|
}
|
||||||
|
if !strings.Contains(string(originalOnDisk), "provider: \"litellm\"") {
|
||||||
|
t.Fatalf("expected source config to remain unchanged on disk")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
341
internal/config/migrate.go
Normal file
341
internal/config/migrate.go
Normal file
@@ -0,0 +1,341 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CurrentConfigVersion is the schema version this binary expects. Files at a
|
||||||
|
// lower version are migrated automatically when loaded.
|
||||||
|
const CurrentConfigVersion = 2
|
||||||
|
|
||||||
|
// ConfigMigration upgrades a raw YAML map by one version.
|
||||||
|
type ConfigMigration struct {
|
||||||
|
From, To int
|
||||||
|
Describe string
|
||||||
|
Apply func(map[string]any) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrations is the ordered ladder of upgrades. Add new entries at the end.
|
||||||
|
var migrations = []ConfigMigration{
|
||||||
|
{From: 1, To: 2, Describe: "named providers + role chains", Apply: migrateV1toV2},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate brings raw up to CurrentConfigVersion in place. Returns the list of
|
||||||
|
// migrations that were applied (may be empty if already current).
|
||||||
|
func Migrate(raw map[string]any) ([]ConfigMigration, error) {
|
||||||
|
if raw == nil {
|
||||||
|
return nil, fmt.Errorf("migrate: raw config is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
version := readVersion(raw)
|
||||||
|
if version > CurrentConfigVersion {
|
||||||
|
return nil, fmt.Errorf("migrate: config version %d is newer than supported version %d", version, CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
applied := make([]ConfigMigration, 0)
|
||||||
|
for {
|
||||||
|
if version >= CurrentConfigVersion {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
step, ok := findMigration(version)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("migrate: no migration registered from version %d", version)
|
||||||
|
}
|
||||||
|
if err := step.Apply(raw); err != nil {
|
||||||
|
return nil, fmt.Errorf("migrate v%d->v%d: %w", step.From, step.To, err)
|
||||||
|
}
|
||||||
|
raw["version"] = step.To
|
||||||
|
version = step.To
|
||||||
|
applied = append(applied, step)
|
||||||
|
}
|
||||||
|
return applied, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findMigration(from int) (ConfigMigration, bool) {
|
||||||
|
for _, m := range migrations {
|
||||||
|
if m.From == from {
|
||||||
|
return m, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ConfigMigration{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// readVersion returns the version from raw. Files without a version field are
|
||||||
|
// treated as version 1 (the original schema).
|
||||||
|
func readVersion(raw map[string]any) int {
|
||||||
|
v, ok := raw["version"]
|
||||||
|
if !ok {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
switch n := v.(type) {
|
||||||
|
case int:
|
||||||
|
return n
|
||||||
|
case int64:
|
||||||
|
return int(n)
|
||||||
|
case float64:
|
||||||
|
return int(n)
|
||||||
|
}
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateV1toV2 lifts the single-provider config into the named-providers +
|
||||||
|
// role-chains layout. The pre-v2 config implicitly used one provider for both
|
||||||
|
// embeddings and metadata; we materialise that as a provider named "default".
|
||||||
|
func migrateV1toV2(raw map[string]any) error {
|
||||||
|
aiRaw := mapValue(raw, "ai")
|
||||||
|
if aiRaw == nil {
|
||||||
|
aiRaw = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
providerType := stringValue(aiRaw, "provider")
|
||||||
|
if providerType == "" {
|
||||||
|
providerType = "litellm"
|
||||||
|
}
|
||||||
|
|
||||||
|
providers, embeddingModel, metadataModel, fallbackModels := buildV1Provider(aiRaw, providerType)
|
||||||
|
|
||||||
|
embeddingsOld := mapValue(aiRaw, "embeddings")
|
||||||
|
dimensions := intValue(embeddingsOld, "dimensions")
|
||||||
|
if dimensions <= 0 {
|
||||||
|
dimensions = 1536
|
||||||
|
}
|
||||||
|
if embeddingModel == "" {
|
||||||
|
embeddingModel = stringValue(embeddingsOld, "model")
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataOld := mapValue(aiRaw, "metadata")
|
||||||
|
if metadataModel == "" {
|
||||||
|
metadataModel = stringValue(metadataOld, "model")
|
||||||
|
}
|
||||||
|
temperature := floatValue(metadataOld, "temperature")
|
||||||
|
logConversations := boolValue(metadataOld, "log_conversations")
|
||||||
|
timeoutStr := stringValue(metadataOld, "timeout")
|
||||||
|
|
||||||
|
if list := stringListValue(metadataOld, "fallback_models"); len(list) > 0 {
|
||||||
|
fallbackModels = append(fallbackModels, list...)
|
||||||
|
}
|
||||||
|
if v := stringValue(metadataOld, "fallback_model"); v != "" {
|
||||||
|
fallbackModels = append(fallbackModels, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
embeddings := map[string]any{
|
||||||
|
"dimensions": dimensions,
|
||||||
|
"primary": map[string]any{"provider": "default", "model": embeddingModel},
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata := map[string]any{
|
||||||
|
"temperature": temperature,
|
||||||
|
"log_conversations": logConversations,
|
||||||
|
"primary": map[string]any{"provider": "default", "model": metadataModel},
|
||||||
|
}
|
||||||
|
if timeoutStr != "" {
|
||||||
|
metadata["timeout"] = timeoutStr
|
||||||
|
}
|
||||||
|
if fallbacks := chainTargets("default", fallbackModels); len(fallbacks) > 0 {
|
||||||
|
metadata["fallbacks"] = fallbacks
|
||||||
|
}
|
||||||
|
|
||||||
|
raw["ai"] = map[string]any{
|
||||||
|
"providers": providers,
|
||||||
|
"embeddings": embeddings,
|
||||||
|
"metadata": metadata,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildV1Provider(aiRaw map[string]any, providerType string) (map[string]any, string, string, []string) {
|
||||||
|
providers := map[string]any{}
|
||||||
|
defaultEntry := map[string]any{"type": providerType}
|
||||||
|
embedModel := ""
|
||||||
|
metaModel := ""
|
||||||
|
var fallbacks []string
|
||||||
|
|
||||||
|
switch providerType {
|
||||||
|
case "litellm":
|
||||||
|
block := mapValue(aiRaw, "litellm")
|
||||||
|
copyKeys(defaultEntry, block, "base_url", "api_key")
|
||||||
|
copyHeaders(defaultEntry, block, "request_headers")
|
||||||
|
embedModel = stringValue(block, "embedding_model")
|
||||||
|
metaModel = stringValue(block, "metadata_model")
|
||||||
|
if list := stringListValue(block, "fallback_metadata_models"); len(list) > 0 {
|
||||||
|
fallbacks = append(fallbacks, list...)
|
||||||
|
}
|
||||||
|
if v := stringValue(block, "fallback_metadata_model"); v != "" {
|
||||||
|
fallbacks = append(fallbacks, v)
|
||||||
|
}
|
||||||
|
case "ollama":
|
||||||
|
block := mapValue(aiRaw, "ollama")
|
||||||
|
copyKeys(defaultEntry, block, "base_url", "api_key")
|
||||||
|
copyHeaders(defaultEntry, block, "request_headers")
|
||||||
|
case "openrouter":
|
||||||
|
block := mapValue(aiRaw, "openrouter")
|
||||||
|
copyKeys(defaultEntry, block, "base_url", "api_key", "app_name", "site_url")
|
||||||
|
copyHeaders(defaultEntry, block, "extra_headers")
|
||||||
|
// rename: extra_headers → request_headers
|
||||||
|
if hdr, ok := defaultEntry["extra_headers"]; ok {
|
||||||
|
defaultEntry["request_headers"] = hdr
|
||||||
|
delete(defaultEntry, "extra_headers")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
providers["default"] = defaultEntry
|
||||||
|
return providers, embedModel, metaModel, fallbacks
|
||||||
|
}
|
||||||
|
|
||||||
|
func chainTargets(provider string, models []string) []any {
|
||||||
|
out := make([]any, 0, len(models))
|
||||||
|
seen := map[string]struct{}{}
|
||||||
|
for _, m := range models {
|
||||||
|
if m == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
key := provider + "|" + m
|
||||||
|
if _, ok := seen[key]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[key] = struct{}{}
|
||||||
|
out = append(out, map[string]any{"provider": provider, "model": m})
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapValue(raw map[string]any, key string) map[string]any {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
v, ok := raw[key]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch m := v.(type) {
|
||||||
|
case map[string]any:
|
||||||
|
return m
|
||||||
|
case map[any]any:
|
||||||
|
return convertAnyMap(m)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertAnyMap(in map[any]any) map[string]any {
|
||||||
|
out := make(map[string]any, len(in))
|
||||||
|
keys := make([]string, 0, len(in))
|
||||||
|
for k, v := range in {
|
||||||
|
ks, ok := k.(string)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
keys = append(keys, ks)
|
||||||
|
out[ks] = v
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringValue(raw map[string]any, key string) string {
|
||||||
|
if raw == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
v, ok := raw[key]
|
||||||
|
if !ok {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if s, ok := v.(string); ok {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func intValue(raw map[string]any, key string) int {
|
||||||
|
if raw == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
switch n := raw[key].(type) {
|
||||||
|
case int:
|
||||||
|
return n
|
||||||
|
case int64:
|
||||||
|
return int(n)
|
||||||
|
case float64:
|
||||||
|
return int(n)
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func floatValue(raw map[string]any, key string) float64 {
|
||||||
|
if raw == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
switch n := raw[key].(type) {
|
||||||
|
case float64:
|
||||||
|
return n
|
||||||
|
case int:
|
||||||
|
return float64(n)
|
||||||
|
case int64:
|
||||||
|
return float64(n)
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func boolValue(raw map[string]any, key string) bool {
|
||||||
|
if raw == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if b, ok := raw[key].(bool); ok {
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringListValue(raw map[string]any, key string) []string {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
v, ok := raw[key]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
list, ok := v.([]any)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out := make([]string, 0, len(list))
|
||||||
|
for _, item := range list {
|
||||||
|
if s, ok := item.(string); ok && s != "" {
|
||||||
|
out = append(out, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyKeys(dst, src map[string]any, keys ...string) {
|
||||||
|
if src == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, k := range keys {
|
||||||
|
if v, ok := src[k]; ok {
|
||||||
|
dst[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyHeaders(dst, src map[string]any, key string) {
|
||||||
|
if src == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
v, ok := src[key]
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch headers := v.(type) {
|
||||||
|
case map[string]any:
|
||||||
|
if len(headers) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
dst[key] = headers
|
||||||
|
case map[any]any:
|
||||||
|
if len(headers) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
dst[key] = convertAnyMap(headers)
|
||||||
|
}
|
||||||
|
}
|
||||||
77
internal/config/migrate_test.go
Normal file
77
internal/config/migrate_test.go
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestMigrateV1ToV2Litellm(t *testing.T) {
|
||||||
|
raw := map[string]any{
|
||||||
|
"ai": map[string]any{
|
||||||
|
"provider": "litellm",
|
||||||
|
"embeddings": map[string]any{
|
||||||
|
"model": "text-embedding-3-small",
|
||||||
|
"dimensions": 1536,
|
||||||
|
},
|
||||||
|
"metadata": map[string]any{
|
||||||
|
"model": "gpt-4o-mini",
|
||||||
|
"temperature": 0.2,
|
||||||
|
"fallback_models": []any{"gpt-4.1-mini"},
|
||||||
|
},
|
||||||
|
"litellm": map[string]any{
|
||||||
|
"base_url": "http://localhost:4000/v1",
|
||||||
|
"api_key": "secret",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
applied, err := Migrate(raw)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Migrate() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(applied) != 1 || applied[0].From != 1 || applied[0].To != 2 {
|
||||||
|
t.Fatalf("applied = %+v, want [v1->v2]", applied)
|
||||||
|
}
|
||||||
|
if got := readVersion(raw); got != CurrentConfigVersion {
|
||||||
|
t.Fatalf("version = %d, want %d", got, CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
ai := mapValue(raw, "ai")
|
||||||
|
providers := mapValue(ai, "providers")
|
||||||
|
def := mapValue(providers, "default")
|
||||||
|
if got := stringValue(def, "type"); got != "litellm" {
|
||||||
|
t.Fatalf("providers.default.type = %q, want litellm", got)
|
||||||
|
}
|
||||||
|
if got := stringValue(def, "base_url"); got != "http://localhost:4000/v1" {
|
||||||
|
t.Fatalf("providers.default.base_url = %q", got)
|
||||||
|
}
|
||||||
|
|
||||||
|
emb := mapValue(ai, "embeddings")
|
||||||
|
embPrimary := mapValue(emb, "primary")
|
||||||
|
if stringValue(embPrimary, "provider") != "default" || stringValue(embPrimary, "model") != "text-embedding-3-small" {
|
||||||
|
t.Fatalf("embeddings.primary = %+v, want default/text-embedding-3-small", embPrimary)
|
||||||
|
}
|
||||||
|
|
||||||
|
meta := mapValue(ai, "metadata")
|
||||||
|
metaPrimary := mapValue(meta, "primary")
|
||||||
|
if stringValue(metaPrimary, "provider") != "default" || stringValue(metaPrimary, "model") != "gpt-4o-mini" {
|
||||||
|
t.Fatalf("metadata.primary = %+v, want default/gpt-4o-mini", metaPrimary)
|
||||||
|
}
|
||||||
|
fallbacks, ok := meta["fallbacks"].([]any)
|
||||||
|
if !ok || len(fallbacks) != 1 {
|
||||||
|
t.Fatalf("metadata.fallbacks = %#v, want len=1", meta["fallbacks"])
|
||||||
|
}
|
||||||
|
firstFallback, ok := fallbacks[0].(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
t.Fatalf("metadata.fallbacks[0] type = %T, want map[string]any", fallbacks[0])
|
||||||
|
}
|
||||||
|
if stringValue(firstFallback, "provider") != "default" || stringValue(firstFallback, "model") != "gpt-4.1-mini" {
|
||||||
|
t.Fatalf("metadata fallback = %+v, want default/gpt-4.1-mini", firstFallback)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMigrateRejectsNewerVersion(t *testing.T) {
|
||||||
|
raw := map[string]any{"version": CurrentConfigVersion + 1}
|
||||||
|
|
||||||
|
_, err := Migrate(raw)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Migrate() error = nil, want error for newer config version")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -45,38 +45,8 @@ func (c Config) Validate() error {
|
|||||||
return fmt.Errorf("invalid config: mcp.session_timeout must be greater than zero")
|
return fmt.Errorf("invalid config: mcp.session_timeout must be greater than zero")
|
||||||
}
|
}
|
||||||
|
|
||||||
switch c.AI.Provider {
|
if err := c.AI.validate(); err != nil {
|
||||||
case "litellm", "ollama", "openrouter":
|
return err
|
||||||
default:
|
|
||||||
return fmt.Errorf("invalid config: unsupported ai.provider %q", c.AI.Provider)
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.AI.Embeddings.Dimensions <= 0 {
|
|
||||||
return fmt.Errorf("invalid config: ai.embeddings.dimensions must be greater than zero")
|
|
||||||
}
|
|
||||||
|
|
||||||
switch c.AI.Provider {
|
|
||||||
case "litellm":
|
|
||||||
if strings.TrimSpace(c.AI.LiteLLM.BaseURL) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.litellm.base_url is required when ai.provider=litellm")
|
|
||||||
}
|
|
||||||
if strings.TrimSpace(c.AI.LiteLLM.APIKey) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.litellm.api_key is required when ai.provider=litellm")
|
|
||||||
}
|
|
||||||
case "ollama":
|
|
||||||
if strings.TrimSpace(c.AI.Ollama.BaseURL) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.ollama.base_url is required when ai.provider=ollama")
|
|
||||||
}
|
|
||||||
if strings.TrimSpace(c.AI.Ollama.APIKey) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.ollama.api_key is required when ai.provider=ollama")
|
|
||||||
}
|
|
||||||
case "openrouter":
|
|
||||||
if strings.TrimSpace(c.AI.OpenRouter.BaseURL) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.openrouter.base_url is required when ai.provider=openrouter")
|
|
||||||
}
|
|
||||||
if strings.TrimSpace(c.AI.OpenRouter.APIKey) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.openrouter.api_key is required when ai.provider=openrouter")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.Server.Port <= 0 {
|
if c.Server.Port <= 0 {
|
||||||
@@ -108,3 +78,61 @@ func (c Config) Validate() error {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a AIConfig) validate() error {
|
||||||
|
if len(a.Providers) == 0 {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers must contain at least one entry")
|
||||||
|
}
|
||||||
|
for name, p := range a.Providers {
|
||||||
|
if strings.TrimSpace(name) == "" {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers contains an entry with an empty name")
|
||||||
|
}
|
||||||
|
switch p.Type {
|
||||||
|
case "litellm", "ollama", "openrouter":
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("invalid config: ai.providers.%s.type %q is not supported", name, p.Type)
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(p.BaseURL) == "" {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers.%s.base_url is required", name)
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(p.APIKey) == "" {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers.%s.api_key is required", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.Embeddings.Dimensions <= 0 {
|
||||||
|
return fmt.Errorf("invalid config: ai.embeddings.dimensions must be greater than zero")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := a.validateChain("ai.embeddings", a.Embeddings.Chain()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := a.validateChain("ai.metadata", a.Metadata.Chain()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if a.Background != nil {
|
||||||
|
if a.Background.Embeddings != nil {
|
||||||
|
if err := a.validateChain("ai.background.embeddings", a.Background.Embeddings.AsTargets()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if a.Background.Metadata != nil {
|
||||||
|
if err := a.validateChain("ai.background.metadata", a.Background.Metadata.AsTargets()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a AIConfig) validateChain(prefix string, chain []RoleTarget) error {
|
||||||
|
if len(chain) == 0 {
|
||||||
|
return fmt.Errorf("invalid config: %s.primary must reference a configured provider and model", prefix)
|
||||||
|
}
|
||||||
|
for i, target := range chain {
|
||||||
|
if _, ok := a.Providers[target.Provider]; !ok {
|
||||||
|
return fmt.Errorf("invalid config: %s[%d] references unknown provider %q", prefix, i, target.Provider)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,28 +7,23 @@ import (
|
|||||||
|
|
||||||
func validConfig() Config {
|
func validConfig() Config {
|
||||||
return Config{
|
return Config{
|
||||||
Server: ServerConfig{Port: 8080},
|
Version: CurrentConfigVersion,
|
||||||
MCP: MCPConfig{Path: "/mcp", SessionTimeout: 10 * time.Minute},
|
Server: ServerConfig{Port: 8080},
|
||||||
|
MCP: MCPConfig{Path: "/mcp", SessionTimeout: 10 * time.Minute},
|
||||||
Auth: AuthConfig{
|
Auth: AuthConfig{
|
||||||
Keys: []APIKey{{ID: "test", Value: "secret"}},
|
Keys: []APIKey{{ID: "test", Value: "secret"}},
|
||||||
},
|
},
|
||||||
Database: DatabaseConfig{URL: "postgres://example"},
|
Database: DatabaseConfig{URL: "postgres://example"},
|
||||||
AI: AIConfig{
|
AI: AIConfig{
|
||||||
Provider: "litellm",
|
Providers: map[string]ProviderConfig{
|
||||||
Embeddings: AIEmbeddingConfig{
|
"default": {Type: "litellm", BaseURL: "http://localhost:4000/v1", APIKey: "key"},
|
||||||
|
},
|
||||||
|
Embeddings: EmbeddingsRoleConfig{
|
||||||
Dimensions: 1536,
|
Dimensions: 1536,
|
||||||
|
Primary: RoleTarget{Provider: "default", Model: "text-embed"},
|
||||||
},
|
},
|
||||||
LiteLLM: LiteLLMConfig{
|
Metadata: MetadataRoleConfig{
|
||||||
BaseURL: "http://localhost:4000/v1",
|
Primary: RoleTarget{Provider: "default", Model: "gpt-4"},
|
||||||
APIKey: "key",
|
|
||||||
},
|
|
||||||
Ollama: OllamaConfig{
|
|
||||||
BaseURL: "http://localhost:11434/v1",
|
|
||||||
APIKey: "ollama",
|
|
||||||
},
|
|
||||||
OpenRouter: OpenRouterAIConfig{
|
|
||||||
BaseURL: "https://openrouter.ai/api/v1",
|
|
||||||
APIKey: "key",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Search: SearchConfig{DefaultLimit: 10, MaxLimit: 50},
|
Search: SearchConfig{DefaultLimit: 10, MaxLimit: 50},
|
||||||
@@ -36,29 +31,44 @@ func validConfig() Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateAcceptsSupportedProviders(t *testing.T) {
|
func TestValidateAcceptsSupportedProviderTypes(t *testing.T) {
|
||||||
cfg := validConfig()
|
for _, providerType := range []string{"litellm", "ollama", "openrouter"} {
|
||||||
if err := cfg.Validate(); err != nil {
|
cfg := validConfig()
|
||||||
t.Fatalf("Validate litellm error = %v", err)
|
p := cfg.AI.Providers["default"]
|
||||||
}
|
p.Type = providerType
|
||||||
|
cfg.AI.Providers["default"] = p
|
||||||
cfg.AI.Provider = "ollama"
|
if err := cfg.Validate(); err != nil {
|
||||||
if err := cfg.Validate(); err != nil {
|
t.Fatalf("Validate %s error = %v", providerType, err)
|
||||||
t.Fatalf("Validate ollama error = %v", err)
|
}
|
||||||
}
|
|
||||||
|
|
||||||
cfg.AI.Provider = "openrouter"
|
|
||||||
if err := cfg.Validate(); err != nil {
|
|
||||||
t.Fatalf("Validate openrouter error = %v", err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateRejectsInvalidProvider(t *testing.T) {
|
func TestValidateRejectsInvalidProviderType(t *testing.T) {
|
||||||
cfg := validConfig()
|
cfg := validConfig()
|
||||||
cfg.AI.Provider = "unknown"
|
p := cfg.AI.Providers["default"]
|
||||||
|
p.Type = "unknown"
|
||||||
|
cfg.AI.Providers["default"] = p
|
||||||
|
|
||||||
if err := cfg.Validate(); err == nil {
|
if err := cfg.Validate(); err == nil {
|
||||||
t.Fatal("Validate() error = nil, want error for unsupported provider")
|
t.Fatal("Validate() error = nil, want error for unsupported provider type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateRejectsChainWithUnknownProvider(t *testing.T) {
|
||||||
|
cfg := validConfig()
|
||||||
|
cfg.AI.Metadata.Primary = RoleTarget{Provider: "does-not-exist", Model: "x"}
|
||||||
|
|
||||||
|
if err := cfg.Validate(); err == nil {
|
||||||
|
t.Fatal("Validate() error = nil, want error for chain referencing unknown provider")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateRejectsEmptyProviders(t *testing.T) {
|
||||||
|
cfg := validConfig()
|
||||||
|
cfg.AI.Providers = map[string]ProviderConfig{}
|
||||||
|
|
||||||
|
if err := cfg.Validate(); err == nil {
|
||||||
|
t.Fatal("Validate() error = nil, want error for empty providers")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
67
internal/generatedmodels/sql_public_agent_guardrails.go
Normal file
67
internal/generatedmodels/sql_public_agent_guardrails.go
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicAgentGuardrails struct {
|
||||||
|
bun.BaseModel `bun:"table:public.agent_guardrails,alias:agent_guardrails"`
|
||||||
|
ID resolvespec_common.SqlUUID `bun:"id,type:uuid,pk,default:gen_random_uuid()," json:"id"`
|
||||||
|
Content resolvespec_common.SqlString `bun:"content,type:text,notnull," json:"content"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
Description resolvespec_common.SqlString `bun:"description,type:text,default:'',notnull," json:"description"`
|
||||||
|
Name resolvespec_common.SqlString `bun:"name,type:text,notnull," json:"name"`
|
||||||
|
Severity resolvespec_common.SqlString `bun:"severity,type:text,default:'medium',notnull," json:"severity"`
|
||||||
|
Tags resolvespec_common.SqlString `bun:"tags,type:text,nullzero," json:"tags"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),notnull," json:"updated_at"`
|
||||||
|
RelGuardrailIDPublicPlanGuardrails []*ModelPublicPlanGuardrails `bun:"rel:has-many,join:id=guardrail_id" json:"relguardrailidpublicplanguardrails,omitempty"` // Has many ModelPublicPlanGuardrails
|
||||||
|
RelGuardrailIDPublicProjectGuardrails []*ModelPublicProjectGuardrails `bun:"rel:has-many,join:id=guardrail_id" json:"relguardrailidpublicprojectguardrails,omitempty"` // Has many ModelPublicProjectGuardrails
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicAgentGuardrails
|
||||||
|
func (m ModelPublicAgentGuardrails) TableName() string {
|
||||||
|
return "public.agent_guardrails"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicAgentGuardrails
|
||||||
|
func (m ModelPublicAgentGuardrails) TableNameOnly() string {
|
||||||
|
return "agent_guardrails"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicAgentGuardrails
|
||||||
|
func (m ModelPublicAgentGuardrails) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicAgentGuardrails) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicAgentGuardrails) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicAgentGuardrails) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicAgentGuardrails) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicAgentGuardrails) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicAgentGuardrails) GetPrefix() string {
|
||||||
|
return "AGG"
|
||||||
|
}
|
||||||
67
internal/generatedmodels/sql_public_agent_skills.go
Normal file
67
internal/generatedmodels/sql_public_agent_skills.go
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicAgentSkills struct {
|
||||||
|
bun.BaseModel `bun:"table:public.agent_skills,alias:agent_skills"`
|
||||||
|
ID resolvespec_common.SqlUUID `bun:"id,type:uuid,pk,default:gen_random_uuid()," json:"id"`
|
||||||
|
Content resolvespec_common.SqlString `bun:"content,type:text,notnull," json:"content"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
Description resolvespec_common.SqlString `bun:"description,type:text,default:'',notnull," json:"description"`
|
||||||
|
Name resolvespec_common.SqlString `bun:"name,type:text,notnull," json:"name"`
|
||||||
|
Tags resolvespec_common.SqlString `bun:"tags,type:text,nullzero," json:"tags"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),notnull," json:"updated_at"`
|
||||||
|
RelRelatedSkillIDPublicLearnings []*ModelPublicLearnings `bun:"rel:has-many,join:id=related_skill_id" json:"relrelatedskillidpubliclearnings,omitempty"` // Has many ModelPublicLearnings
|
||||||
|
RelSkillIDPublicPlanSkills []*ModelPublicPlanSkills `bun:"rel:has-many,join:id=skill_id" json:"relskillidpublicplanskills,omitempty"` // Has many ModelPublicPlanSkills
|
||||||
|
RelSkillIDPublicProjectSkills []*ModelPublicProjectSkills `bun:"rel:has-many,join:id=skill_id" json:"relskillidpublicprojectskills,omitempty"` // Has many ModelPublicProjectSkills
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicAgentSkills
|
||||||
|
func (m ModelPublicAgentSkills) TableName() string {
|
||||||
|
return "public.agent_skills"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicAgentSkills
|
||||||
|
func (m ModelPublicAgentSkills) TableNameOnly() string {
|
||||||
|
return "agent_skills"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicAgentSkills
|
||||||
|
func (m ModelPublicAgentSkills) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicAgentSkills) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicAgentSkills) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicAgentSkills) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicAgentSkills) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicAgentSkills) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicAgentSkills) GetPrefix() string {
|
||||||
|
return "ASG"
|
||||||
|
}
|
||||||
69
internal/generatedmodels/sql_public_chat_histories.go
Normal file
69
internal/generatedmodels/sql_public_chat_histories.go
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicChatHistories struct {
|
||||||
|
bun.BaseModel `bun:"table:public.chat_histories,alias:chat_histories"`
|
||||||
|
ID resolvespec_common.SqlUUID `bun:"id,type:uuid,pk,default:gen_random_uuid()," json:"id"`
|
||||||
|
AgentID resolvespec_common.SqlString `bun:"agent_id,type:text,nullzero," json:"agent_id"`
|
||||||
|
Channel resolvespec_common.SqlString `bun:"channel,type:text,nullzero," json:"channel"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
Messages resolvespec_common.SqlJSONB `bun:"messages,type:jsonb,default:'[',notnull," json:"messages"`
|
||||||
|
Metadata resolvespec_common.SqlJSONB `bun:"metadata,type:jsonb,default:'{}',notnull," json:"metadata"`
|
||||||
|
ProjectID resolvespec_common.SqlUUID `bun:"project_id,type:uuid,nullzero," json:"project_id"`
|
||||||
|
SessionID resolvespec_common.SqlString `bun:"session_id,type:text,notnull," json:"session_id"`
|
||||||
|
Summary resolvespec_common.SqlString `bun:"summary,type:text,nullzero," json:"summary"`
|
||||||
|
Title resolvespec_common.SqlString `bun:"title,type:text,nullzero," json:"title"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),notnull," json:"updated_at"`
|
||||||
|
RelProjectID *ModelPublicProjects `bun:"rel:has-one,join:project_id=guid" json:"relprojectid,omitempty"` // Has one ModelPublicProjects
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicChatHistories
|
||||||
|
func (m ModelPublicChatHistories) TableName() string {
|
||||||
|
return "public.chat_histories"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicChatHistories
|
||||||
|
func (m ModelPublicChatHistories) TableNameOnly() string {
|
||||||
|
return "chat_histories"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicChatHistories
|
||||||
|
func (m ModelPublicChatHistories) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicChatHistories) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicChatHistories) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicChatHistories) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicChatHistories) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicChatHistories) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicChatHistories) GetPrefix() string {
|
||||||
|
return "CHH"
|
||||||
|
}
|
||||||
66
internal/generatedmodels/sql_public_embeddings.go
Normal file
66
internal/generatedmodels/sql_public_embeddings.go
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicEmbeddings struct {
|
||||||
|
bun.BaseModel `bun:"table:public.embeddings,alias:embeddings"`
|
||||||
|
ID resolvespec_common.SqlInt64 `bun:"id,type:bigserial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),nullzero," json:"created_at"`
|
||||||
|
Dim resolvespec_common.SqlInt32 `bun:"dim,type:int,notnull," json:"dim"`
|
||||||
|
Embedding resolvespec_common.SqlString `bun:"embedding,type:vector,notnull," json:"embedding"`
|
||||||
|
GUID resolvespec_common.SqlUUID `bun:"guid,type:uuid,default:gen_random_uuid(),notnull," json:"guid"`
|
||||||
|
Model resolvespec_common.SqlString `bun:"model,type:text,notnull,unique:uidx_embeddings_thought_id_model," json:"model"`
|
||||||
|
ThoughtID resolvespec_common.SqlUUID `bun:"thought_id,type:uuid,notnull,unique:uidx_embeddings_thought_id_model," json:"thought_id"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),nullzero," json:"updated_at"`
|
||||||
|
RelThoughtID *ModelPublicThoughts `bun:"rel:has-one,join:thought_id=guid" json:"relthoughtid,omitempty"` // Has one ModelPublicThoughts
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicEmbeddings
|
||||||
|
func (m ModelPublicEmbeddings) TableName() string {
|
||||||
|
return "public.embeddings"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicEmbeddings
|
||||||
|
func (m ModelPublicEmbeddings) TableNameOnly() string {
|
||||||
|
return "embeddings"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicEmbeddings
|
||||||
|
func (m ModelPublicEmbeddings) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicEmbeddings) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicEmbeddings) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicEmbeddings) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicEmbeddings) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicEmbeddings) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicEmbeddings) GetPrefix() string {
|
||||||
|
return "EMB"
|
||||||
|
}
|
||||||
83
internal/generatedmodels/sql_public_learnings.go
Normal file
83
internal/generatedmodels/sql_public_learnings.go
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicLearnings struct {
|
||||||
|
bun.BaseModel `bun:"table:public.learnings,alias:learnings"`
|
||||||
|
ID resolvespec_common.SqlUUID `bun:"id,type:uuid,pk,default:gen_random_uuid()," json:"id"`
|
||||||
|
ActionRequired bool `bun:"action_required,type:boolean,default:false,notnull," json:"action_required"`
|
||||||
|
Area resolvespec_common.SqlString `bun:"area,type:text,default:'other',notnull," json:"area"`
|
||||||
|
Category resolvespec_common.SqlString `bun:"category,type:text,default:'insight',notnull," json:"category"`
|
||||||
|
Confidence resolvespec_common.SqlString `bun:"confidence,type:text,default:'hypothesis',notnull," json:"confidence"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
Details resolvespec_common.SqlString `bun:"details,type:text,default:'',notnull," json:"details"`
|
||||||
|
DuplicateOfLearningID resolvespec_common.SqlUUID `bun:"duplicate_of_learning_id,type:uuid,nullzero," json:"duplicate_of_learning_id"`
|
||||||
|
Priority resolvespec_common.SqlString `bun:"priority,type:text,default:'medium',notnull," json:"priority"`
|
||||||
|
ProjectID resolvespec_common.SqlUUID `bun:"project_id,type:uuid,nullzero," json:"project_id"`
|
||||||
|
RelatedSkillID resolvespec_common.SqlUUID `bun:"related_skill_id,type:uuid,nullzero," json:"related_skill_id"`
|
||||||
|
RelatedThoughtID resolvespec_common.SqlUUID `bun:"related_thought_id,type:uuid,nullzero," json:"related_thought_id"`
|
||||||
|
ReviewedAt resolvespec_common.SqlTimeStamp `bun:"reviewed_at,type:timestamptz,nullzero," json:"reviewed_at"`
|
||||||
|
ReviewedBy resolvespec_common.SqlString `bun:"reviewed_by,type:text,nullzero," json:"reviewed_by"`
|
||||||
|
SourceRef resolvespec_common.SqlString `bun:"source_ref,type:text,nullzero," json:"source_ref"`
|
||||||
|
SourceType resolvespec_common.SqlString `bun:"source_type,type:text,nullzero," json:"source_type"`
|
||||||
|
Status resolvespec_common.SqlString `bun:"status,type:text,default:'pending',notnull," json:"status"`
|
||||||
|
Summary resolvespec_common.SqlString `bun:"summary,type:text,notnull," json:"summary"`
|
||||||
|
SupersedesLearningID resolvespec_common.SqlUUID `bun:"supersedes_learning_id,type:uuid,nullzero," json:"supersedes_learning_id"`
|
||||||
|
Tags resolvespec_common.SqlString `bun:"tags,type:text,nullzero," json:"tags"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),notnull," json:"updated_at"`
|
||||||
|
RelDuplicateOfLearningID *ModelPublicLearnings `bun:"rel:has-one,join:duplicate_of_learning_id=id" json:"relduplicateoflearningid,omitempty"` // Has one ModelPublicLearnings
|
||||||
|
RelProjectID *ModelPublicProjects `bun:"rel:has-one,join:project_id=guid" json:"relprojectid,omitempty"` // Has one ModelPublicProjects
|
||||||
|
RelRelatedSkillID *ModelPublicAgentSkills `bun:"rel:has-one,join:related_skill_id=id" json:"relrelatedskillid,omitempty"` // Has one ModelPublicAgentSkills
|
||||||
|
RelRelatedThoughtID *ModelPublicThoughts `bun:"rel:has-one,join:related_thought_id=guid" json:"relrelatedthoughtid,omitempty"` // Has one ModelPublicThoughts
|
||||||
|
RelSupersedesLearningID *ModelPublicLearnings `bun:"rel:has-one,join:supersedes_learning_id=id" json:"relsupersedeslearningid,omitempty"` // Has one ModelPublicLearnings
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicLearnings
|
||||||
|
func (m ModelPublicLearnings) TableName() string {
|
||||||
|
return "public.learnings"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicLearnings
|
||||||
|
func (m ModelPublicLearnings) TableNameOnly() string {
|
||||||
|
return "learnings"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicLearnings
|
||||||
|
func (m ModelPublicLearnings) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicLearnings) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicLearnings) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicLearnings) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicLearnings) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicLearnings) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicLearnings) GetPrefix() string {
|
||||||
|
return "LEA"
|
||||||
|
}
|
||||||
63
internal/generatedmodels/sql_public_plan_dependencies.go
Normal file
63
internal/generatedmodels/sql_public_plan_dependencies.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicPlanDependencies struct {
|
||||||
|
bun.BaseModel `bun:"table:public.plan_dependencies,alias:plan_dependencies"`
|
||||||
|
ID resolvespec_common.SqlInt32 `bun:"id,type:serial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
DependsOnPlanID resolvespec_common.SqlUUID `bun:"depends_on_plan_id,type:uuid,notnull,unique:uidx_plan_dependencies_plan_id_depends_on_plan_id," json:"depends_on_plan_id"`
|
||||||
|
PlanID resolvespec_common.SqlUUID `bun:"plan_id,type:uuid,notnull,unique:uidx_plan_dependencies_plan_id_depends_on_plan_id," json:"plan_id"`
|
||||||
|
RelDependsOnPlanID *ModelPublicPlans `bun:"rel:has-one,join:depends_on_plan_id=id" json:"reldependsonplanid,omitempty"` // Has one ModelPublicPlans
|
||||||
|
RelPlanID *ModelPublicPlans `bun:"rel:has-one,join:plan_id=id" json:"relplanid,omitempty"` // Has one ModelPublicPlans
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicPlanDependencies
|
||||||
|
func (m ModelPublicPlanDependencies) TableName() string {
|
||||||
|
return "public.plan_dependencies"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicPlanDependencies
|
||||||
|
func (m ModelPublicPlanDependencies) TableNameOnly() string {
|
||||||
|
return "plan_dependencies"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicPlanDependencies
|
||||||
|
func (m ModelPublicPlanDependencies) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicPlanDependencies) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicPlanDependencies) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicPlanDependencies) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicPlanDependencies) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicPlanDependencies) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicPlanDependencies) GetPrefix() string {
|
||||||
|
return "PDL"
|
||||||
|
}
|
||||||
63
internal/generatedmodels/sql_public_plan_guardrails.go
Normal file
63
internal/generatedmodels/sql_public_plan_guardrails.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicPlanGuardrails struct {
|
||||||
|
bun.BaseModel `bun:"table:public.plan_guardrails,alias:plan_guardrails"`
|
||||||
|
ID resolvespec_common.SqlInt32 `bun:"id,type:serial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
GuardrailID resolvespec_common.SqlUUID `bun:"guardrail_id,type:uuid,notnull,unique:uidx_plan_guardrails_plan_id_guardrail_id," json:"guardrail_id"`
|
||||||
|
PlanID resolvespec_common.SqlUUID `bun:"plan_id,type:uuid,notnull,unique:uidx_plan_guardrails_plan_id_guardrail_id," json:"plan_id"`
|
||||||
|
RelGuardrailID *ModelPublicAgentGuardrails `bun:"rel:has-one,join:guardrail_id=id" json:"relguardrailid,omitempty"` // Has one ModelPublicAgentGuardrails
|
||||||
|
RelPlanID *ModelPublicPlans `bun:"rel:has-one,join:plan_id=id" json:"relplanid,omitempty"` // Has one ModelPublicPlans
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicPlanGuardrails
|
||||||
|
func (m ModelPublicPlanGuardrails) TableName() string {
|
||||||
|
return "public.plan_guardrails"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicPlanGuardrails
|
||||||
|
func (m ModelPublicPlanGuardrails) TableNameOnly() string {
|
||||||
|
return "plan_guardrails"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicPlanGuardrails
|
||||||
|
func (m ModelPublicPlanGuardrails) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicPlanGuardrails) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicPlanGuardrails) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicPlanGuardrails) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicPlanGuardrails) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicPlanGuardrails) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicPlanGuardrails) GetPrefix() string {
|
||||||
|
return "PGL"
|
||||||
|
}
|
||||||
63
internal/generatedmodels/sql_public_plan_related_plans.go
Normal file
63
internal/generatedmodels/sql_public_plan_related_plans.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicPlanRelatedPlans struct {
|
||||||
|
bun.BaseModel `bun:"table:public.plan_related_plans,alias:plan_related_plans"`
|
||||||
|
ID resolvespec_common.SqlInt32 `bun:"id,type:serial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
PlanAID resolvespec_common.SqlUUID `bun:"plan_a_id,type:uuid,notnull,unique:uidx_plan_related_plans_plan_a_id_plan_b_id," json:"plan_a_id"`
|
||||||
|
PlanBID resolvespec_common.SqlUUID `bun:"plan_b_id,type:uuid,notnull,unique:uidx_plan_related_plans_plan_a_id_plan_b_id," json:"plan_b_id"`
|
||||||
|
RelPlanAID *ModelPublicPlans `bun:"rel:has-one,join:plan_a_id=id" json:"relplanaid,omitempty"` // Has one ModelPublicPlans
|
||||||
|
RelPlanBID *ModelPublicPlans `bun:"rel:has-one,join:plan_b_id=id" json:"relplanbid,omitempty"` // Has one ModelPublicPlans
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicPlanRelatedPlans
|
||||||
|
func (m ModelPublicPlanRelatedPlans) TableName() string {
|
||||||
|
return "public.plan_related_plans"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicPlanRelatedPlans
|
||||||
|
func (m ModelPublicPlanRelatedPlans) TableNameOnly() string {
|
||||||
|
return "plan_related_plans"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicPlanRelatedPlans
|
||||||
|
func (m ModelPublicPlanRelatedPlans) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicPlanRelatedPlans) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicPlanRelatedPlans) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicPlanRelatedPlans) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicPlanRelatedPlans) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicPlanRelatedPlans) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicPlanRelatedPlans) GetPrefix() string {
|
||||||
|
return "PRP"
|
||||||
|
}
|
||||||
63
internal/generatedmodels/sql_public_plan_skills.go
Normal file
63
internal/generatedmodels/sql_public_plan_skills.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicPlanSkills struct {
|
||||||
|
bun.BaseModel `bun:"table:public.plan_skills,alias:plan_skills"`
|
||||||
|
ID resolvespec_common.SqlInt32 `bun:"id,type:serial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
PlanID resolvespec_common.SqlUUID `bun:"plan_id,type:uuid,notnull,unique:uidx_plan_skills_plan_id_skill_id," json:"plan_id"`
|
||||||
|
SkillID resolvespec_common.SqlUUID `bun:"skill_id,type:uuid,notnull,unique:uidx_plan_skills_plan_id_skill_id," json:"skill_id"`
|
||||||
|
RelPlanID *ModelPublicPlans `bun:"rel:has-one,join:plan_id=id" json:"relplanid,omitempty"` // Has one ModelPublicPlans
|
||||||
|
RelSkillID *ModelPublicAgentSkills `bun:"rel:has-one,join:skill_id=id" json:"relskillid,omitempty"` // Has one ModelPublicAgentSkills
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicPlanSkills
|
||||||
|
func (m ModelPublicPlanSkills) TableName() string {
|
||||||
|
return "public.plan_skills"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicPlanSkills
|
||||||
|
func (m ModelPublicPlanSkills) TableNameOnly() string {
|
||||||
|
return "plan_skills"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicPlanSkills
|
||||||
|
func (m ModelPublicPlanSkills) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicPlanSkills) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicPlanSkills) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicPlanSkills) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicPlanSkills) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicPlanSkills) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicPlanSkills) GetPrefix() string {
|
||||||
|
return "PSL"
|
||||||
|
}
|
||||||
80
internal/generatedmodels/sql_public_plans.go
Normal file
80
internal/generatedmodels/sql_public_plans.go
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicPlans struct {
|
||||||
|
bun.BaseModel `bun:"table:public.plans,alias:plans"`
|
||||||
|
ID resolvespec_common.SqlUUID `bun:"id,type:uuid,pk,default:gen_random_uuid()," json:"id"`
|
||||||
|
CompletedAt resolvespec_common.SqlTimeStamp `bun:"completed_at,type:timestamptz,nullzero," json:"completed_at"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
Description resolvespec_common.SqlString `bun:"description,type:text,default:'',notnull," json:"description"`
|
||||||
|
DueDate resolvespec_common.SqlTimeStamp `bun:"due_date,type:timestamptz,nullzero," json:"due_date"`
|
||||||
|
LastReviewedAt resolvespec_common.SqlTimeStamp `bun:"last_reviewed_at,type:timestamptz,nullzero," json:"last_reviewed_at"`
|
||||||
|
Owner resolvespec_common.SqlString `bun:"owner,type:text,nullzero," json:"owner"`
|
||||||
|
Priority resolvespec_common.SqlString `bun:"priority,type:text,default:'medium',notnull," json:"priority"` // low, medium, high, critical
|
||||||
|
ProjectID resolvespec_common.SqlUUID `bun:"project_id,type:uuid,nullzero," json:"project_id"`
|
||||||
|
ReviewedBy resolvespec_common.SqlString `bun:"reviewed_by,type:text,nullzero," json:"reviewed_by"`
|
||||||
|
Status resolvespec_common.SqlString `bun:"status,type:text,default:'draft',notnull," json:"status"` // draft, active, blocked, completed, cancelled, superseded
|
||||||
|
SupersedesPlanID resolvespec_common.SqlUUID `bun:"supersedes_plan_id,type:uuid,nullzero," json:"supersedes_plan_id"`
|
||||||
|
Tags resolvespec_common.SqlString `bun:"tags,type:text,nullzero," json:"tags"`
|
||||||
|
Title resolvespec_common.SqlString `bun:"title,type:text,notnull," json:"title"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),notnull," json:"updated_at"`
|
||||||
|
RelProjectID *ModelPublicProjects `bun:"rel:has-one,join:project_id=guid" json:"relprojectid,omitempty"` // Has one ModelPublicProjects
|
||||||
|
RelSupersedesPlanID *ModelPublicPlans `bun:"rel:has-one,join:supersedes_plan_id=id" json:"relsupersedesplanid,omitempty"` // Has one ModelPublicPlans
|
||||||
|
RelDependsOnPlanIDPublicPlanDependencies []*ModelPublicPlanDependencies `bun:"rel:has-many,join:id=depends_on_plan_id" json:"reldependsonplanidpublicplandependencies,omitempty"` // Has many ModelPublicPlanDependencies
|
||||||
|
RelPlanIDPublicPlanDependencies []*ModelPublicPlanDependencies `bun:"rel:has-many,join:id=plan_id" json:"relplanidpublicplandependencies,omitempty"` // Has many ModelPublicPlanDependencies
|
||||||
|
RelPlanAIDPublicPlanRelatedPlans []*ModelPublicPlanRelatedPlans `bun:"rel:has-many,join:id=plan_a_id" json:"relplanaidpublicplanrelatedplans,omitempty"` // Has many ModelPublicPlanRelatedPlans
|
||||||
|
RelPlanBIDPublicPlanRelatedPlans []*ModelPublicPlanRelatedPlans `bun:"rel:has-many,join:id=plan_b_id" json:"relplanbidpublicplanrelatedplans,omitempty"` // Has many ModelPublicPlanRelatedPlans
|
||||||
|
RelPlanIDPublicPlanSkills []*ModelPublicPlanSkills `bun:"rel:has-many,join:id=plan_id" json:"relplanidpublicplanskills,omitempty"` // Has many ModelPublicPlanSkills
|
||||||
|
RelPlanIDPublicPlanGuardrails []*ModelPublicPlanGuardrails `bun:"rel:has-many,join:id=plan_id" json:"relplanidpublicplanguardrails,omitempty"` // Has many ModelPublicPlanGuardrails
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicPlans
|
||||||
|
func (m ModelPublicPlans) TableName() string {
|
||||||
|
return "public.plans"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicPlans
|
||||||
|
func (m ModelPublicPlans) TableNameOnly() string {
|
||||||
|
return "plans"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicPlans
|
||||||
|
func (m ModelPublicPlans) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicPlans) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicPlans) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicPlans) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicPlans) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicPlans) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicPlans) GetPrefix() string {
|
||||||
|
return "PLA"
|
||||||
|
}
|
||||||
63
internal/generatedmodels/sql_public_project_guardrails.go
Normal file
63
internal/generatedmodels/sql_public_project_guardrails.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicProjectGuardrails struct {
|
||||||
|
bun.BaseModel `bun:"table:public.project_guardrails,alias:project_guardrails"`
|
||||||
|
ID resolvespec_common.SqlInt32 `bun:"id,type:serial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
GuardrailID resolvespec_common.SqlUUID `bun:"guardrail_id,type:uuid,notnull," json:"guardrail_id"`
|
||||||
|
ProjectID resolvespec_common.SqlUUID `bun:"project_id,type:uuid,notnull," json:"project_id"`
|
||||||
|
RelGuardrailID *ModelPublicAgentGuardrails `bun:"rel:has-one,join:guardrail_id=id" json:"relguardrailid,omitempty"` // Has one ModelPublicAgentGuardrails
|
||||||
|
RelProjectID *ModelPublicProjects `bun:"rel:has-one,join:project_id=guid" json:"relprojectid,omitempty"` // Has one ModelPublicProjects
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicProjectGuardrails
|
||||||
|
func (m ModelPublicProjectGuardrails) TableName() string {
|
||||||
|
return "public.project_guardrails"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicProjectGuardrails
|
||||||
|
func (m ModelPublicProjectGuardrails) TableNameOnly() string {
|
||||||
|
return "project_guardrails"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicProjectGuardrails
|
||||||
|
func (m ModelPublicProjectGuardrails) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicProjectGuardrails) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicProjectGuardrails) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicProjectGuardrails) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicProjectGuardrails) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicProjectGuardrails) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicProjectGuardrails) GetPrefix() string {
|
||||||
|
return "PGR"
|
||||||
|
}
|
||||||
63
internal/generatedmodels/sql_public_project_skills.go
Normal file
63
internal/generatedmodels/sql_public_project_skills.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicProjectSkills struct {
|
||||||
|
bun.BaseModel `bun:"table:public.project_skills,alias:project_skills"`
|
||||||
|
ID resolvespec_common.SqlInt32 `bun:"id,type:serial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
ProjectID resolvespec_common.SqlUUID `bun:"project_id,type:uuid,notnull," json:"project_id"`
|
||||||
|
SkillID resolvespec_common.SqlUUID `bun:"skill_id,type:uuid,notnull," json:"skill_id"`
|
||||||
|
RelProjectID *ModelPublicProjects `bun:"rel:has-one,join:project_id=guid" json:"relprojectid,omitempty"` // Has one ModelPublicProjects
|
||||||
|
RelSkillID *ModelPublicAgentSkills `bun:"rel:has-one,join:skill_id=id" json:"relskillid,omitempty"` // Has one ModelPublicAgentSkills
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicProjectSkills
|
||||||
|
func (m ModelPublicProjectSkills) TableName() string {
|
||||||
|
return "public.project_skills"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicProjectSkills
|
||||||
|
func (m ModelPublicProjectSkills) TableNameOnly() string {
|
||||||
|
return "project_skills"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicProjectSkills
|
||||||
|
func (m ModelPublicProjectSkills) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicProjectSkills) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicProjectSkills) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicProjectSkills) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicProjectSkills) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicProjectSkills) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicProjectSkills) GetPrefix() string {
|
||||||
|
return "PSR"
|
||||||
|
}
|
||||||
71
internal/generatedmodels/sql_public_projects.go
Normal file
71
internal/generatedmodels/sql_public_projects.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicProjects struct {
|
||||||
|
bun.BaseModel `bun:"table:public.projects,alias:projects"`
|
||||||
|
ID resolvespec_common.SqlInt64 `bun:"id,type:bigserial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),nullzero," json:"created_at"`
|
||||||
|
Description resolvespec_common.SqlString `bun:"description,type:text,nullzero," json:"description"`
|
||||||
|
GUID resolvespec_common.SqlUUID `bun:"guid,type:uuid,default:gen_random_uuid(),notnull," json:"guid"`
|
||||||
|
LastActiveAt resolvespec_common.SqlTimeStamp `bun:"last_active_at,type:timestamptz,default:now(),nullzero," json:"last_active_at"`
|
||||||
|
Name resolvespec_common.SqlString `bun:"name,type:text,notnull," json:"name"`
|
||||||
|
ThoughtCount resolvespec_common.SqlInt64 `bun:"thought_count,scanonly" json:"thought_count"`
|
||||||
|
RelProjectIDPublicThoughts []*ModelPublicThoughts `bun:"rel:has-many,join:guid=project_id" json:"relprojectidpublicthoughts,omitempty"` // Has many ModelPublicThoughts
|
||||||
|
RelProjectIDPublicStoredFiles []*ModelPublicStoredFiles `bun:"rel:has-many,join:guid=project_id" json:"relprojectidpublicstoredfiles,omitempty"` // Has many ModelPublicStoredFiles
|
||||||
|
RelProjectIDPublicChatHistories []*ModelPublicChatHistories `bun:"rel:has-many,join:guid=project_id" json:"relprojectidpublicchathistories,omitempty"` // Has many ModelPublicChatHistories
|
||||||
|
RelProjectIDPublicLearnings []*ModelPublicLearnings `bun:"rel:has-many,join:guid=project_id" json:"relprojectidpubliclearnings,omitempty"` // Has many ModelPublicLearnings
|
||||||
|
RelProjectIDPublicPlans []*ModelPublicPlans `bun:"rel:has-many,join:guid=project_id" json:"relprojectidpublicplans,omitempty"` // Has many ModelPublicPlans
|
||||||
|
RelProjectIDPublicProjectSkills []*ModelPublicProjectSkills `bun:"rel:has-many,join:guid=project_id" json:"relprojectidpublicprojectskills,omitempty"` // Has many ModelPublicProjectSkills
|
||||||
|
RelProjectIDPublicProjectGuardrails []*ModelPublicProjectGuardrails `bun:"rel:has-many,join:guid=project_id" json:"relprojectidpublicprojectguardrails,omitempty"` // Has many ModelPublicProjectGuardrails
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicProjects
|
||||||
|
func (m ModelPublicProjects) TableName() string {
|
||||||
|
return "public.projects"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicProjects
|
||||||
|
func (m ModelPublicProjects) TableNameOnly() string {
|
||||||
|
return "projects"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicProjects
|
||||||
|
func (m ModelPublicProjects) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicProjects) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicProjects) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicProjects) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicProjects) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicProjects) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicProjects) GetPrefix() string {
|
||||||
|
return "PRO"
|
||||||
|
}
|
||||||
72
internal/generatedmodels/sql_public_stored_files.go
Normal file
72
internal/generatedmodels/sql_public_stored_files.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicStoredFiles struct {
|
||||||
|
bun.BaseModel `bun:"table:public.stored_files,alias:stored_files"`
|
||||||
|
ID resolvespec_common.SqlInt64 `bun:"id,type:bigserial,pk,autoincrement," json:"id"`
|
||||||
|
Content []byte `bun:"content,type:bytea,notnull," json:"content"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
Encoding resolvespec_common.SqlString `bun:"encoding,type:text,default:'base64',notnull," json:"encoding"`
|
||||||
|
GUID resolvespec_common.SqlUUID `bun:"guid,type:uuid,default:gen_random_uuid(),notnull," json:"guid"`
|
||||||
|
Kind resolvespec_common.SqlString `bun:"kind,type:text,default:'file',notnull," json:"kind"`
|
||||||
|
MediaType resolvespec_common.SqlString `bun:"media_type,type:text,notnull," json:"media_type"`
|
||||||
|
Name resolvespec_common.SqlString `bun:"name,type:text,notnull," json:"name"`
|
||||||
|
ProjectID resolvespec_common.SqlUUID `bun:"project_id,type:uuid,nullzero," json:"project_id"`
|
||||||
|
Sha256 resolvespec_common.SqlString `bun:"sha256,type:text,notnull," json:"sha256"`
|
||||||
|
SizeBytes int64 `bun:"size_bytes,type:bigint,notnull," json:"size_bytes"`
|
||||||
|
ThoughtID resolvespec_common.SqlUUID `bun:"thought_id,type:uuid,nullzero," json:"thought_id"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),notnull," json:"updated_at"`
|
||||||
|
RelProjectID *ModelPublicProjects `bun:"rel:has-one,join:project_id=guid" json:"relprojectid,omitempty"` // Has one ModelPublicProjects
|
||||||
|
RelThoughtID *ModelPublicThoughts `bun:"rel:has-one,join:thought_id=guid" json:"relthoughtid,omitempty"` // Has one ModelPublicThoughts
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicStoredFiles
|
||||||
|
func (m ModelPublicStoredFiles) TableName() string {
|
||||||
|
return "public.stored_files"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicStoredFiles
|
||||||
|
func (m ModelPublicStoredFiles) TableNameOnly() string {
|
||||||
|
return "stored_files"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicStoredFiles
|
||||||
|
func (m ModelPublicStoredFiles) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicStoredFiles) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicStoredFiles) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicStoredFiles) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicStoredFiles) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicStoredFiles) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicStoredFiles) GetPrefix() string {
|
||||||
|
return "SFT"
|
||||||
|
}
|
||||||
64
internal/generatedmodels/sql_public_thought_links.go
Normal file
64
internal/generatedmodels/sql_public_thought_links.go
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicThoughtLinks struct {
|
||||||
|
bun.BaseModel `bun:"table:public.thought_links,alias:thought_links"`
|
||||||
|
ID resolvespec_common.SqlInt32 `bun:"id,type:serial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),nullzero," json:"created_at"`
|
||||||
|
FromID int64 `bun:"from_id,type:bigint,notnull," json:"from_id"`
|
||||||
|
Relation resolvespec_common.SqlString `bun:"relation,type:text,notnull," json:"relation"`
|
||||||
|
ToID int64 `bun:"to_id,type:bigint,notnull," json:"to_id"`
|
||||||
|
RelFromID *ModelPublicThoughts `bun:"rel:has-one,join:from_id=id" json:"relfromid,omitempty"` // Has one ModelPublicThoughts
|
||||||
|
RelToID *ModelPublicThoughts `bun:"rel:has-one,join:to_id=id" json:"reltoid,omitempty"` // Has one ModelPublicThoughts
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicThoughtLinks
|
||||||
|
func (m ModelPublicThoughtLinks) TableName() string {
|
||||||
|
return "public.thought_links"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicThoughtLinks
|
||||||
|
func (m ModelPublicThoughtLinks) TableNameOnly() string {
|
||||||
|
return "thought_links"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicThoughtLinks
|
||||||
|
func (m ModelPublicThoughtLinks) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicThoughtLinks) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicThoughtLinks) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicThoughtLinks) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicThoughtLinks) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicThoughtLinks) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicThoughtLinks) GetPrefix() string {
|
||||||
|
return "TLH"
|
||||||
|
}
|
||||||
71
internal/generatedmodels/sql_public_thoughts.go
Normal file
71
internal/generatedmodels/sql_public_thoughts.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicThoughts struct {
|
||||||
|
bun.BaseModel `bun:"table:public.thoughts,alias:thoughts"`
|
||||||
|
ID resolvespec_common.SqlInt64 `bun:"id,type:bigserial,pk,autoincrement," json:"id"`
|
||||||
|
ArchivedAt resolvespec_common.SqlTimeStamp `bun:"archived_at,type:timestamptz,nullzero," json:"archived_at"`
|
||||||
|
Content resolvespec_common.SqlString `bun:"content,type:text,notnull," json:"content"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),nullzero," json:"created_at"`
|
||||||
|
GUID resolvespec_common.SqlUUID `bun:"guid,type:uuid,default:gen_random_uuid(),notnull," json:"guid"`
|
||||||
|
Metadata resolvespec_common.SqlJSONB `bun:"metadata,type:jsonb,default:'{}::jsonb',nullzero," json:"metadata"`
|
||||||
|
ProjectID resolvespec_common.SqlUUID `bun:"project_id,type:uuid,nullzero," json:"project_id"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),nullzero," json:"updated_at"`
|
||||||
|
RelProjectID *ModelPublicProjects `bun:"rel:has-one,join:project_id=guid" json:"relprojectid,omitempty"` // Has one ModelPublicProjects
|
||||||
|
RelFromIDPublicThoughtLinks []*ModelPublicThoughtLinks `bun:"rel:has-many,join:id=from_id" json:"relfromidpublicthoughtlinks,omitempty"` // Has many ModelPublicThoughtLinks
|
||||||
|
RelToIDPublicThoughtLinks []*ModelPublicThoughtLinks `bun:"rel:has-many,join:id=to_id" json:"reltoidpublicthoughtlinks,omitempty"` // Has many ModelPublicThoughtLinks
|
||||||
|
RelThoughtIDPublicEmbeddings []*ModelPublicEmbeddings `bun:"rel:has-many,join:guid=thought_id" json:"relthoughtidpublicembeddings,omitempty"` // Has many ModelPublicEmbeddings
|
||||||
|
RelThoughtIDPublicStoredFiles []*ModelPublicStoredFiles `bun:"rel:has-many,join:guid=thought_id" json:"relthoughtidpublicstoredfiles,omitempty"` // Has many ModelPublicStoredFiles
|
||||||
|
RelRelatedThoughtIDPublicLearnings []*ModelPublicLearnings `bun:"rel:has-many,join:guid=related_thought_id" json:"relrelatedthoughtidpubliclearnings,omitempty"` // Has many ModelPublicLearnings
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicThoughts
|
||||||
|
func (m ModelPublicThoughts) TableName() string {
|
||||||
|
return "public.thoughts"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicThoughts
|
||||||
|
func (m ModelPublicThoughts) TableNameOnly() string {
|
||||||
|
return "thoughts"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicThoughts
|
||||||
|
func (m ModelPublicThoughts) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicThoughts) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicThoughts) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicThoughts) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicThoughts) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicThoughts) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicThoughts) GetPrefix() string {
|
||||||
|
return "THO"
|
||||||
|
}
|
||||||
62
internal/generatedmodels/sql_public_tool_annotations.go
Normal file
62
internal/generatedmodels/sql_public_tool_annotations.go
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
// Code generated by relspecgo. DO NOT EDIT.
|
||||||
|
package generatedmodels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
resolvespec_common "github.com/bitechdev/ResolveSpec/pkg/spectypes"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ModelPublicToolAnnotations struct {
|
||||||
|
bun.BaseModel `bun:"table:public.tool_annotations,alias:tool_annotations"`
|
||||||
|
ID resolvespec_common.SqlInt64 `bun:"id,type:bigserial,pk,autoincrement," json:"id"`
|
||||||
|
CreatedAt resolvespec_common.SqlTimeStamp `bun:"created_at,type:timestamptz,default:now(),notnull," json:"created_at"`
|
||||||
|
Notes resolvespec_common.SqlString `bun:"notes,type:text,default:'',notnull," json:"notes"`
|
||||||
|
ToolName resolvespec_common.SqlString `bun:"tool_name,type:text,notnull," json:"tool_name"`
|
||||||
|
UpdatedAt resolvespec_common.SqlTimeStamp `bun:"updated_at,type:timestamptz,default:now(),notnull," json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName returns the table name for ModelPublicToolAnnotations
|
||||||
|
func (m ModelPublicToolAnnotations) TableName() string {
|
||||||
|
return "public.tool_annotations"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNameOnly returns the table name without schema for ModelPublicToolAnnotations
|
||||||
|
func (m ModelPublicToolAnnotations) TableNameOnly() string {
|
||||||
|
return "tool_annotations"
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaName returns the schema name for ModelPublicToolAnnotations
|
||||||
|
func (m ModelPublicToolAnnotations) SchemaName() string {
|
||||||
|
return "public"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the primary key value
|
||||||
|
func (m ModelPublicToolAnnotations) GetID() int64 {
|
||||||
|
return m.ID.Int64()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDStr returns the primary key as a string
|
||||||
|
func (m ModelPublicToolAnnotations) GetIDStr() string {
|
||||||
|
return fmt.Sprintf("%v", m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the primary key value
|
||||||
|
func (m ModelPublicToolAnnotations) SetID(newid int64) {
|
||||||
|
m.UpdateID(newid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateID updates the primary key value
|
||||||
|
func (m *ModelPublicToolAnnotations) UpdateID(newid int64) {
|
||||||
|
m.ID.FromString(fmt.Sprintf("%d", newid))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDName returns the name of the primary key column
|
||||||
|
func (m ModelPublicToolAnnotations) GetIDName() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrefix returns the table prefix
|
||||||
|
func (m ModelPublicToolAnnotations) GetPrefix() string {
|
||||||
|
return "TAO"
|
||||||
|
}
|
||||||
@@ -35,11 +35,13 @@ type ToolSet struct {
|
|||||||
Files *tools.FilesTool
|
Files *tools.FilesTool
|
||||||
Backfill *tools.BackfillTool
|
Backfill *tools.BackfillTool
|
||||||
Reparse *tools.ReparseMetadataTool
|
Reparse *tools.ReparseMetadataTool
|
||||||
RetryMetadata *tools.RetryMetadataTool
|
RetryMetadata *tools.RetryEnrichmentTool
|
||||||
Maintenance *tools.MaintenanceTool
|
//Maintenance *tools.MaintenanceTool
|
||||||
Skills *tools.SkillsTool
|
Skills *tools.SkillsTool
|
||||||
ChatHistory *tools.ChatHistoryTool
|
ChatHistory *tools.ChatHistoryTool
|
||||||
Describe *tools.DescribeTool
|
Describe *tools.DescribeTool
|
||||||
|
Learnings *tools.LearningsTool
|
||||||
|
Plans *tools.PlansTool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handlers groups the HTTP handlers produced for an MCP server instance.
|
// Handlers groups the HTTP handlers produced for an MCP server instance.
|
||||||
@@ -83,6 +85,8 @@ func NewHandlers(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onS
|
|||||||
registerSystemTools,
|
registerSystemTools,
|
||||||
registerThoughtTools,
|
registerThoughtTools,
|
||||||
registerProjectTools,
|
registerProjectTools,
|
||||||
|
registerLearningTools,
|
||||||
|
registerPlanTools,
|
||||||
registerFileTools,
|
registerFileTools,
|
||||||
registerMaintenanceTools,
|
registerMaintenanceTools,
|
||||||
registerSkillTools,
|
registerSkillTools,
|
||||||
@@ -249,6 +253,122 @@ func registerProjectTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func registerLearningTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "add_learning",
|
||||||
|
Description: "Create a curated learning record distinct from raw thoughts.",
|
||||||
|
}, toolSet.Learnings.Add); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "get_learning",
|
||||||
|
Description: "Retrieve a structured learning by id.",
|
||||||
|
}, toolSet.Learnings.Get); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "list_learnings",
|
||||||
|
Description: "List structured learnings with optional project, status, priority, tag, and text filters.",
|
||||||
|
}, toolSet.Learnings.List); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func registerPlanTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "create_plan",
|
||||||
|
Description: "Create a structured plan linked to a project.",
|
||||||
|
}, toolSet.Plans.Create); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "get_plan",
|
||||||
|
Description: "Retrieve a plan with its dependencies, related plans, skills, and guardrails.",
|
||||||
|
}, toolSet.Plans.Get); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "update_plan",
|
||||||
|
Description: "Update plan fields; only provided fields are changed.",
|
||||||
|
}, toolSet.Plans.Update); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "delete_plan",
|
||||||
|
Description: "Hard-delete a plan by id.",
|
||||||
|
}, toolSet.Plans.Delete); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "list_plans",
|
||||||
|
Description: "List plans with optional project, status, priority, owner, tag, and text filters.",
|
||||||
|
}, toolSet.Plans.List); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "add_plan_dependency",
|
||||||
|
Description: "Mark plan_id as depending on depends_on_plan_id (must complete first).",
|
||||||
|
}, toolSet.Plans.AddDependency); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "remove_plan_dependency",
|
||||||
|
Description: "Remove a dependency between two plans.",
|
||||||
|
}, toolSet.Plans.RemoveDependency); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "add_related_plan",
|
||||||
|
Description: "Link two plans as thematically related (bidirectional).",
|
||||||
|
}, toolSet.Plans.AddRelated); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "remove_related_plan",
|
||||||
|
Description: "Unlink two related plans.",
|
||||||
|
}, toolSet.Plans.RemoveRelated); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "add_plan_skill",
|
||||||
|
Description: "Link an agent skill to a plan.",
|
||||||
|
}, toolSet.Plans.AddSkill); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "remove_plan_skill",
|
||||||
|
Description: "Unlink an agent skill from a plan.",
|
||||||
|
}, toolSet.Plans.RemoveSkill); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "list_plan_skills",
|
||||||
|
Description: "List skills linked to a plan.",
|
||||||
|
}, toolSet.Plans.ListSkills); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "add_plan_guardrail",
|
||||||
|
Description: "Link an agent guardrail to a plan.",
|
||||||
|
}, toolSet.Plans.AddGuardrail); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "remove_plan_guardrail",
|
||||||
|
Description: "Unlink an agent guardrail from a plan.",
|
||||||
|
}, toolSet.Plans.RemoveGuardrail); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "list_plan_guardrails",
|
||||||
|
Description: "List guardrails linked to a plan.",
|
||||||
|
}, toolSet.Plans.ListGuardrails); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
server.AddResourceTemplate(&mcp.ResourceTemplate{
|
server.AddResourceTemplate(&mcp.ResourceTemplate{
|
||||||
Name: "stored_file",
|
Name: "stored_file",
|
||||||
@@ -302,30 +422,30 @@ func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
|||||||
}, toolSet.RetryMetadata.Handle); err != nil {
|
}, toolSet.RetryMetadata.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
// if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "add_maintenance_task",
|
// Name: "add_maintenance_task",
|
||||||
Description: "Create a recurring or one-time home maintenance task.",
|
// Description: "Create a recurring or one-time home maintenance task.",
|
||||||
}, toolSet.Maintenance.AddTask); err != nil {
|
// }, toolSet.Maintenance.AddTask); err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
// if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "log_maintenance",
|
// Name: "log_maintenance",
|
||||||
Description: "Log completed maintenance; updates next due date.",
|
// Description: "Log completed maintenance; updates next due date.",
|
||||||
}, toolSet.Maintenance.LogWork); err != nil {
|
// }, toolSet.Maintenance.LogWork); err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
// if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "get_upcoming_maintenance",
|
// Name: "get_upcoming_maintenance",
|
||||||
Description: "List maintenance tasks due within the next N days.",
|
// Description: "List maintenance tasks due within the next N days.",
|
||||||
}, toolSet.Maintenance.GetUpcoming); err != nil {
|
// }, toolSet.Maintenance.GetUpcoming); err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
// if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "search_maintenance_history",
|
// Name: "search_maintenance_history",
|
||||||
Description: "Search the maintenance log by task name, category, or date range.",
|
// Description: "Search the maintenance log by task name, category, or date range.",
|
||||||
}, toolSet.Maintenance.SearchHistory); err != nil {
|
// }, toolSet.Maintenance.SearchHistory); err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -436,7 +556,7 @@ func registerChatHistoryTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
|||||||
func registerDescribeTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerDescribeTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "describe_tools",
|
Name: "describe_tools",
|
||||||
Description: "Call first each session. All tools with categories and usage notes. Categories: system, thoughts, projects, files, admin, maintenance, skills, chat, meta.",
|
Description: "Call first each session. All tools with categories and usage notes. Categories: system, thoughts, projects, files, admin, maintenance, skills, plans, chat, meta.",
|
||||||
}, toolSet.Describe.Describe); err != nil {
|
}, toolSet.Describe.Describe); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -477,6 +597,28 @@ func BuildToolCatalog() []tools.ToolEntry {
|
|||||||
{Name: "get_active_project", Description: "Return the active project for the current MCP session. If your client does not preserve MCP sessions, pass project explicitly to project-scoped tools instead of relying on this.", Category: "projects"},
|
{Name: "get_active_project", Description: "Return the active project for the current MCP session. If your client does not preserve MCP sessions, pass project explicitly to project-scoped tools instead of relying on this.", Category: "projects"},
|
||||||
{Name: "get_project_context", Description: "Get recent and semantic context for a project. Uses the explicit project when provided, otherwise the active MCP session project. Falls back to full-text search when no embeddings exist.", Category: "projects"},
|
{Name: "get_project_context", Description: "Get recent and semantic context for a project. Uses the explicit project when provided, otherwise the active MCP session project. Falls back to full-text search when no embeddings exist.", Category: "projects"},
|
||||||
|
|
||||||
|
// learnings
|
||||||
|
{Name: "add_learning", Description: "Create a curated learning record distinct from raw thoughts.", Category: "projects"},
|
||||||
|
{Name: "get_learning", Description: "Retrieve a structured learning by id.", Category: "projects"},
|
||||||
|
{Name: "list_learnings", Description: "List structured learnings with optional project, category, area, status, priority, tag, and text filters.", Category: "projects"},
|
||||||
|
|
||||||
|
// plans
|
||||||
|
{Name: "create_plan", Description: "Create a structured plan with status, priority, owner, due date, and optional project link.", Category: "plans"},
|
||||||
|
{Name: "get_plan", Description: "Retrieve a full plan including dependencies (depends_on/blocks), related plans, linked skills, and guardrails.", Category: "plans"},
|
||||||
|
{Name: "update_plan", Description: "Partially update a plan; only provided fields are changed. Use mark_reviewed to stamp last_reviewed_at.", Category: "plans"},
|
||||||
|
{Name: "delete_plan", Description: "Hard-delete a plan by id.", Category: "plans"},
|
||||||
|
{Name: "list_plans", Description: "List plans with optional filters: project, status, priority, owner, tag, and full-text query.", Category: "plans"},
|
||||||
|
{Name: "add_plan_dependency", Description: "Declare that plan_id cannot proceed until depends_on_plan_id is complete.", Category: "plans"},
|
||||||
|
{Name: "remove_plan_dependency", Description: "Remove a directional dependency between two plans.", Category: "plans"},
|
||||||
|
{Name: "add_related_plan", Description: "Link two plans as thematically related (bidirectional, order-independent).", Category: "plans"},
|
||||||
|
{Name: "remove_related_plan", Description: "Unlink two related plans.", Category: "plans"},
|
||||||
|
{Name: "add_plan_skill", Description: "Link an agent skill to a plan so it is loaded with the plan's context.", Category: "plans"},
|
||||||
|
{Name: "remove_plan_skill", Description: "Unlink an agent skill from a plan.", Category: "plans"},
|
||||||
|
{Name: "list_plan_skills", Description: "List all skills linked to a plan.", Category: "plans"},
|
||||||
|
{Name: "add_plan_guardrail", Description: "Link an agent guardrail to a plan so it applies during plan execution.", Category: "plans"},
|
||||||
|
{Name: "remove_plan_guardrail", Description: "Unlink an agent guardrail from a plan.", Category: "plans"},
|
||||||
|
{Name: "list_plan_guardrails", Description: "List all guardrails linked to a plan.", Category: "plans"},
|
||||||
|
|
||||||
// files
|
// files
|
||||||
{Name: "upload_file", Description: "Stage a file and get an amcs://files/{id} resource URI. Use content_path (absolute server-side path, no size limit) for large or binary files, or content_base64 (≤10 MB) for small files. Pass thought_id/project to link immediately, or omit and pass the URI to save_file later.", Category: "files"},
|
{Name: "upload_file", Description: "Stage a file and get an amcs://files/{id} resource URI. Use content_path (absolute server-side path, no size limit) for large or binary files, or content_base64 (≤10 MB) for small files. Pass thought_id/project to link immediately, or omit and pass the URI to save_file later.", Category: "files"},
|
||||||
{Name: "save_file", Description: "Store a file and optionally link it to a thought. Use content_base64 (≤10 MB) for small files, or content_uri (amcs://files/{id} from a prior upload_file) for previously staged files. For files larger than 10 MB, use upload_file with content_path first. If the goal is to retain the artifact, store the file directly instead of reading or summarising it first.", Category: "files"},
|
{Name: "save_file", Description: "Store a file and optionally link it to a thought. Use content_base64 (≤10 MB) for small files, or content_uri (amcs://files/{id} from a prior upload_file) for previously staged files. For files larger than 10 MB, use upload_file with content_path first. If the goal is to retain the artifact, store the file directly instead of reading or summarising it first.", Category: "files"},
|
||||||
@@ -515,7 +657,7 @@ func BuildToolCatalog() []tools.ToolEntry {
|
|||||||
{Name: "delete_chat_history", Description: "Permanently delete a saved chat history by id.", Category: "chat"},
|
{Name: "delete_chat_history", Description: "Permanently delete a saved chat history by id.", Category: "chat"},
|
||||||
|
|
||||||
// meta
|
// meta
|
||||||
{Name: "describe_tools", Description: "Call this first in every session. Returns all available MCP tools with names, descriptions, categories, and your accumulated usage notes. Filter by category to narrow results. Available categories: system, thoughts, projects, files, admin, household, maintenance, calendar, meals, crm, skills, chat, meta.", Category: "meta"},
|
{Name: "describe_tools", Description: "Call this first in every session. Returns all available MCP tools with names, descriptions, categories, and your accumulated usage notes. Filter by category to narrow results. Available categories: system, thoughts, projects, files, admin, household, maintenance, calendar, meals, crm, skills, plans, chat, meta.", Category: "meta"},
|
||||||
{Name: "annotate_tool", Description: "Persist usage notes, gotchas, or workflow patterns for a specific tool. Notes survive across sessions and are returned by describe_tools. Call this whenever you discover something non-obvious about a tool's behaviour. Pass an empty string to clear notes.", Category: "meta"},
|
{Name: "annotate_tool", Description: "Persist usage notes, gotchas, or workflow patterns for a specific tool. Notes survive across sessions and are returned by describe_tools. Call this whenever you discover something non-obvious about a tool's behaviour. Pass an empty string to clear notes.", Category: "meta"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
|||||||
|
|
||||||
want := []string{
|
want := []string{
|
||||||
"add_guardrail",
|
"add_guardrail",
|
||||||
|
"add_learning",
|
||||||
"add_maintenance_task",
|
"add_maintenance_task",
|
||||||
"add_project_guardrail",
|
"add_project_guardrail",
|
||||||
"add_project_skill",
|
"add_project_skill",
|
||||||
@@ -43,6 +44,7 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
|||||||
"describe_tools",
|
"describe_tools",
|
||||||
"get_active_project",
|
"get_active_project",
|
||||||
"get_chat_history",
|
"get_chat_history",
|
||||||
|
"get_learning",
|
||||||
"get_project_context",
|
"get_project_context",
|
||||||
"get_thought",
|
"get_thought",
|
||||||
"get_upcoming_maintenance",
|
"get_upcoming_maintenance",
|
||||||
@@ -51,6 +53,7 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
|||||||
"list_chat_histories",
|
"list_chat_histories",
|
||||||
"list_files",
|
"list_files",
|
||||||
"list_guardrails",
|
"list_guardrails",
|
||||||
|
"list_learnings",
|
||||||
"list_project_guardrails",
|
"list_project_guardrails",
|
||||||
"list_project_skills",
|
"list_project_skills",
|
||||||
"list_projects",
|
"list_projects",
|
||||||
|
|||||||
@@ -105,6 +105,86 @@ func TestStreamableHTTPReturnsStructuredToolErrors(t *testing.T) {
|
|||||||
t.Fatalf("build_date = %#v, want %q", got["build_date"], "2026-03-31T00:00:00Z")
|
t.Fatalf("build_date = %#v, want %q", got["build_date"], "2026-03-31T00:00:00Z")
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("add_learning_requires_summary", func(t *testing.T) {
|
||||||
|
_, err := cs.CallTool(context.Background(), &mcp.CallToolParams{
|
||||||
|
Name: "add_learning",
|
||||||
|
Arguments: map[string]any{},
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("CallTool(add_learning) error = nil, want error")
|
||||||
|
}
|
||||||
|
|
||||||
|
rpcErr, data := requireWireError(t, err)
|
||||||
|
if rpcErr.Code != jsonrpc.CodeInvalidParams {
|
||||||
|
t.Fatalf("add_learning code = %d, want %d", rpcErr.Code, jsonrpc.CodeInvalidParams)
|
||||||
|
}
|
||||||
|
if data.Type != mcperrors.TypeInvalidArguments {
|
||||||
|
t.Fatalf("add_learning data.type = %q, want %q", data.Type, mcperrors.TypeInvalidArguments)
|
||||||
|
}
|
||||||
|
if data.Field != "summary" {
|
||||||
|
t.Fatalf("add_learning data.field = %q, want %q", data.Field, "summary")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("get_learning_requires_id", func(t *testing.T) {
|
||||||
|
_, err := cs.CallTool(context.Background(), &mcp.CallToolParams{
|
||||||
|
Name: "get_learning",
|
||||||
|
Arguments: map[string]any{},
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("CallTool(get_learning) error = nil, want error")
|
||||||
|
}
|
||||||
|
|
||||||
|
rpcErr, data := requireWireError(t, err)
|
||||||
|
if rpcErr.Code != jsonrpc.CodeInvalidParams {
|
||||||
|
t.Fatalf("get_learning code = %d, want %d", rpcErr.Code, jsonrpc.CodeInvalidParams)
|
||||||
|
}
|
||||||
|
if data.Type != mcperrors.TypeInvalidArguments {
|
||||||
|
t.Fatalf("get_learning data.type = %q, want %q", data.Type, mcperrors.TypeInvalidArguments)
|
||||||
|
}
|
||||||
|
if data.Field != "id" {
|
||||||
|
t.Fatalf("get_learning data.field = %q, want %q", data.Field, "id")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("add_learning_unconfigured_returns_structured_error", func(t *testing.T) {
|
||||||
|
_, err := cs.CallTool(context.Background(), &mcp.CallToolParams{
|
||||||
|
Name: "add_learning",
|
||||||
|
Arguments: map[string]any{
|
||||||
|
"summary": "Learning with configured check",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("CallTool(add_learning) error = nil, want error")
|
||||||
|
}
|
||||||
|
|
||||||
|
rpcErr, data := requireWireError(t, err)
|
||||||
|
if rpcErr.Code != jsonrpc.CodeInvalidParams {
|
||||||
|
t.Fatalf("add_learning code = %d, want %d", rpcErr.Code, jsonrpc.CodeInvalidParams)
|
||||||
|
}
|
||||||
|
if data.Type != mcperrors.TypeInvalidInput {
|
||||||
|
t.Fatalf("add_learning data.type = %q, want %q", data.Type, mcperrors.TypeInvalidInput)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("list_learnings_unconfigured_returns_structured_error", func(t *testing.T) {
|
||||||
|
_, err := cs.CallTool(context.Background(), &mcp.CallToolParams{
|
||||||
|
Name: "list_learnings",
|
||||||
|
Arguments: map[string]any{},
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("CallTool(list_learnings) error = nil, want error")
|
||||||
|
}
|
||||||
|
|
||||||
|
rpcErr, data := requireWireError(t, err)
|
||||||
|
if rpcErr.Code != jsonrpc.CodeInvalidParams {
|
||||||
|
t.Fatalf("list_learnings code = %d, want %d", rpcErr.Code, jsonrpc.CodeInvalidParams)
|
||||||
|
}
|
||||||
|
if data.Type != mcperrors.TypeInvalidInput {
|
||||||
|
t.Fatalf("list_learnings data.type = %q, want %q", data.Type, mcperrors.TypeInvalidInput)
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func streamableTestToolSet() ToolSet {
|
func streamableTestToolSet() ToolSet {
|
||||||
@@ -126,8 +206,8 @@ func streamableTestToolSet() ToolSet {
|
|||||||
Files: new(tools.FilesTool),
|
Files: new(tools.FilesTool),
|
||||||
Backfill: new(tools.BackfillTool),
|
Backfill: new(tools.BackfillTool),
|
||||||
Reparse: new(tools.ReparseMetadataTool),
|
Reparse: new(tools.ReparseMetadataTool),
|
||||||
RetryMetadata: new(tools.RetryMetadataTool),
|
RetryMetadata: new(tools.RetryEnrichmentTool),
|
||||||
Maintenance: new(tools.MaintenanceTool),
|
//Maintenance: new(tools.MaintenanceTool),
|
||||||
Skills: new(tools.SkillsTool),
|
Skills: new(tools.SkillsTool),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,25 @@
|
|||||||
package observability
|
package observability
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/requestip"
|
||||||
)
|
)
|
||||||
|
|
||||||
type contextKey string
|
type contextKey string
|
||||||
|
|
||||||
const requestIDContextKey contextKey = "request_id"
|
const requestIDContextKey contextKey = "request_id"
|
||||||
|
const mcpToolContextKey contextKey = "mcp_tool"
|
||||||
|
|
||||||
func Chain(h http.Handler, middlewares ...func(http.Handler) http.Handler) http.Handler {
|
func Chain(h http.Handler, middlewares ...func(http.Handler) http.Handler) http.Handler {
|
||||||
for i := len(middlewares) - 1; i >= 0; i-- {
|
for i := len(middlewares) - 1; i >= 0; i-- {
|
||||||
@@ -57,18 +63,27 @@ func Recover(log *slog.Logger) func(http.Handler) http.Handler {
|
|||||||
func AccessLog(log *slog.Logger) func(http.Handler) http.Handler {
|
func AccessLog(log *slog.Logger) func(http.Handler) http.Handler {
|
||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if tool := mcpToolFromRequest(r); tool != "" {
|
||||||
|
r = r.WithContext(context.WithValue(r.Context(), mcpToolContextKey, tool))
|
||||||
|
}
|
||||||
|
|
||||||
recorder := &statusRecorder{ResponseWriter: w, status: http.StatusOK}
|
recorder := &statusRecorder{ResponseWriter: w, status: http.StatusOK}
|
||||||
started := time.Now()
|
started := time.Now()
|
||||||
next.ServeHTTP(recorder, r)
|
next.ServeHTTP(recorder, r)
|
||||||
|
|
||||||
log.Info("http request",
|
attrs := []any{
|
||||||
slog.String("request_id", RequestIDFromContext(r.Context())),
|
slog.String("request_id", RequestIDFromContext(r.Context())),
|
||||||
slog.String("method", r.Method),
|
slog.String("method", r.Method),
|
||||||
slog.String("path", r.URL.Path),
|
slog.String("path", r.URL.Path),
|
||||||
slog.Int("status", recorder.status),
|
slog.Int("status", recorder.status),
|
||||||
slog.Duration("duration", time.Since(started)),
|
slog.Duration("duration", time.Since(started)),
|
||||||
slog.String("remote_addr", stripPort(r.RemoteAddr)),
|
slog.String("remote_addr", requestip.FromRequest(r)),
|
||||||
)
|
slog.String("mcp_session_id", mcpSessionIDFromRequest(r)),
|
||||||
|
}
|
||||||
|
if tool, _ := r.Context().Value(mcpToolContextKey).(string); strings.TrimSpace(tool) != "" {
|
||||||
|
attrs = append(attrs, slog.String("tool", tool), slog.String("tool_call", tool))
|
||||||
|
}
|
||||||
|
log.Info("http request", attrs...)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -91,6 +106,11 @@ func RequestIDFromContext(ctx context.Context) string {
|
|||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func MCPToolFromContext(ctx context.Context) string {
|
||||||
|
value, _ := ctx.Value(mcpToolContextKey).(string)
|
||||||
|
return strings.TrimSpace(value)
|
||||||
|
}
|
||||||
|
|
||||||
type statusRecorder struct {
|
type statusRecorder struct {
|
||||||
http.ResponseWriter
|
http.ResponseWriter
|
||||||
status int
|
status int
|
||||||
@@ -101,10 +121,67 @@ func (s *statusRecorder) WriteHeader(statusCode int) {
|
|||||||
s.ResponseWriter.WriteHeader(statusCode)
|
s.ResponseWriter.WriteHeader(statusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
func stripPort(remote string) string {
|
func mcpToolFromRequest(r *http.Request) string {
|
||||||
host, _, err := net.SplitHostPort(remote)
|
if r == nil || r.Method != http.MethodPost || !strings.HasPrefix(r.URL.Path, "/mcp") || r.Body == nil {
|
||||||
if err != nil {
|
return ""
|
||||||
return remote
|
|
||||||
}
|
}
|
||||||
return host
|
|
||||||
|
raw, err := io.ReadAll(r.Body)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
r.Body = io.NopCloser(bytes.NewReader(raw))
|
||||||
|
if len(raw) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support both single and batch JSON-RPC payloads.
|
||||||
|
if strings.HasPrefix(strings.TrimSpace(string(raw)), "[") {
|
||||||
|
var batch []rpcEnvelope
|
||||||
|
if err := json.Unmarshal(raw, &batch); err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
for _, msg := range batch {
|
||||||
|
if tool := msg.toolName(); tool != "" {
|
||||||
|
return tool
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var msg rpcEnvelope
|
||||||
|
if err := json.Unmarshal(raw, &msg); err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return msg.toolName()
|
||||||
|
}
|
||||||
|
|
||||||
|
func mcpSessionIDFromRequest(r *http.Request) string {
|
||||||
|
if r == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if v := strings.TrimSpace(r.Header.Get("MCP-Session-Id")); v != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
// Some clients/proxies may propagate the session in query params.
|
||||||
|
for _, key := range []string{"session_id", "sessionId", "mcp_session_id"} {
|
||||||
|
if v := strings.TrimSpace(r.URL.Query().Get(key)); v != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
type rpcEnvelope struct {
|
||||||
|
Method string `json:"method"`
|
||||||
|
Params struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
} `json:"params"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m rpcEnvelope) toolName() string {
|
||||||
|
if m.Method != "tools/call" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.TrimSpace(m.Params.Name)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
package observability
|
package observability
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@@ -57,3 +60,99 @@ func TestRecoverHandlesPanic(t *testing.T) {
|
|||||||
t.Fatalf("status = %d, want %d", rec.Code, http.StatusInternalServerError)
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAccessLogUsesForwardedClientIP(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/mcp", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.10:1234"
|
||||||
|
req.Header.Set("X-Real-IP", "203.0.113.7")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "remote_addr=203.0.113.7") {
|
||||||
|
t.Fatalf("log output = %q, want remote_addr=203.0.113.7", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogIncludesMCPToolName(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
payload := map[string]any{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "1",
|
||||||
|
"method": "tools/call",
|
||||||
|
"params": map[string]any{
|
||||||
|
"name": "list_projects",
|
||||||
|
"arguments": map[string]any{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("json.Marshal() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body))
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "tool=list_projects") {
|
||||||
|
t.Fatalf("log output = %q, want tool=list_projects", buf.String())
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "tool_call=list_projects") {
|
||||||
|
t.Fatalf("log output = %q, want tool_call=list_projects", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogIncludesMCPSessionIDHeader(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/sse", nil)
|
||||||
|
req.Header.Set("MCP-Session-Id", "sess-123")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "mcp_session_id=sess-123") {
|
||||||
|
t.Fatalf("log output = %q, want mcp_session_id=sess-123", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogIncludesMCPSessionIDQueryParam(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/sse?session_id=sess-q-1", nil)
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "mcp_session_id=sess-q-1") {
|
||||||
|
t.Fatalf("log output = %q, want mcp_session_id=sess-q-1", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
72
internal/requestip/requestip.go
Normal file
72
internal/requestip/requestip.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
package requestip
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FromRequest returns the best-effort client IP/host for a request, preferring
|
||||||
|
// proxy headers before falling back to RemoteAddr.
|
||||||
|
//
|
||||||
|
// Header precedence:
|
||||||
|
// 1) X-Real-IP
|
||||||
|
// 2) X-Forwarded-For (first value)
|
||||||
|
// 3) Forwarded (for=...)
|
||||||
|
// 4) RemoteAddr (host part)
|
||||||
|
func FromRequest(r *http.Request) string {
|
||||||
|
if r == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if v := firstAddressToken(r.Header.Get("X-Real-IP")); v != "" {
|
||||||
|
return stripPort(v)
|
||||||
|
}
|
||||||
|
if v := firstAddressToken(r.Header.Get("X-Forwarded-For")); v != "" {
|
||||||
|
return stripPort(v)
|
||||||
|
}
|
||||||
|
if v := forwardedForValue(r.Header.Get("Forwarded")); v != "" {
|
||||||
|
return stripPort(v)
|
||||||
|
}
|
||||||
|
return stripPort(strings.TrimSpace(r.RemoteAddr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func firstAddressToken(v string) string {
|
||||||
|
if v == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
part := strings.TrimSpace(strings.Split(v, ",")[0])
|
||||||
|
part = strings.Trim(part, `"`)
|
||||||
|
return strings.TrimSpace(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
func forwardedForValue(v string) string {
|
||||||
|
for _, part := range strings.Split(v, ",") {
|
||||||
|
for _, kv := range strings.Split(part, ";") {
|
||||||
|
k, raw, ok := strings.Cut(strings.TrimSpace(kv), "=")
|
||||||
|
if !ok || !strings.EqualFold(strings.TrimSpace(k), "for") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
candidate := strings.Trim(strings.TrimSpace(raw), `"`)
|
||||||
|
if candidate == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return candidate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func stripPort(addr string) string {
|
||||||
|
addr = strings.TrimSpace(addr)
|
||||||
|
if addr == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
// RFC 7239 quoted values may wrap IPv6 with brackets.
|
||||||
|
addr = strings.Trim(addr, "[]")
|
||||||
|
host, _, err := net.SplitHostPort(addr)
|
||||||
|
if err == nil {
|
||||||
|
return host
|
||||||
|
}
|
||||||
|
return addr
|
||||||
|
}
|
||||||
37
internal/requestip/requestip_test.go
Normal file
37
internal/requestip/requestip_test.go
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
package requestip
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFromRequestPrefersXRealIP(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.10:5555"
|
||||||
|
req.Header.Set("X-Forwarded-For", "198.51.100.1")
|
||||||
|
req.Header.Set("X-Real-IP", "203.0.113.10")
|
||||||
|
|
||||||
|
if got := FromRequest(req); got != "203.0.113.10" {
|
||||||
|
t.Fatalf("FromRequest() = %q, want %q", got, "203.0.113.10")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFromRequestUsesXForwardedForFirstValue(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.10:5555"
|
||||||
|
req.Header.Set("X-Forwarded-For", "198.51.100.7, 10.1.1.2")
|
||||||
|
|
||||||
|
if got := FromRequest(req); got != "198.51.100.7" {
|
||||||
|
t.Fatalf("FromRequest() = %q, want %q", got, "198.51.100.7")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFromRequestFallsBackToRemoteAddr(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
req.RemoteAddr = "192.0.2.5:1234"
|
||||||
|
|
||||||
|
if got := FromRequest(req); got != "192.0.2.5" {
|
||||||
|
t.Fatalf("FromRequest() = %q, want %q", got, "192.0.2.5")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,210 +1,206 @@
|
|||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
|
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// "git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
)
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
// )
|
||||||
|
|
||||||
func (db *DB) AddFamilyMember(ctx context.Context, m ext.FamilyMember) (ext.FamilyMember, error) {
|
// func (db *DB) AddFamilyMember(ctx context.Context, m ext.FamilyMember) (ext.FamilyMember, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into family_members (name, relationship, birth_date, notes)
|
// insert into family_members (name, relationship, birth_date, notes)
|
||||||
values ($1, $2, $3, $4)
|
// values ($1, $2, $3, $4)
|
||||||
returning id, created_at
|
// returning id, created_at
|
||||||
`, m.Name, nullStr(m.Relationship), m.BirthDate, nullStr(m.Notes))
|
// `, m.Name, nullStr(m.Relationship), m.BirthDate, nullStr(m.Notes))
|
||||||
|
|
||||||
created := m
|
// created := m
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt); err != nil {
|
// var model generatedmodels.ModelPublicFamilyMembers
|
||||||
return ext.FamilyMember{}, fmt.Errorf("insert family member: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt); err != nil {
|
||||||
}
|
// return ext.FamilyMember{}, fmt.Errorf("insert family member: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) ListFamilyMembers(ctx context.Context) ([]ext.FamilyMember, error) {
|
// func (db *DB) ListFamilyMembers(ctx context.Context) ([]ext.FamilyMember, error) {
|
||||||
rows, err := db.pool.Query(ctx, `select id, name, relationship, birth_date, notes, created_at from family_members order by name`)
|
// rows, err := db.pool.Query(ctx, `select id, name, relationship, birth_date, notes, created_at from family_members order by name`)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("list family members: %w", err)
|
// return nil, fmt.Errorf("list family members: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var members []ext.FamilyMember
|
// var members []ext.FamilyMember
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var m ext.FamilyMember
|
// var model generatedmodels.ModelPublicFamilyMembers
|
||||||
var relationship, notes *string
|
// if err := rows.Scan(&model.ID, &model.Name, &model.Relationship, &model.BirthDate, &model.Notes, &model.CreatedAt); err != nil {
|
||||||
if err := rows.Scan(&m.ID, &m.Name, &relationship, &m.BirthDate, ¬es, &m.CreatedAt); err != nil {
|
// return nil, fmt.Errorf("scan family member: %w", err)
|
||||||
return nil, fmt.Errorf("scan family member: %w", err)
|
// }
|
||||||
}
|
// members = append(members, familyMemberFromModel(model))
|
||||||
m.Relationship = strVal(relationship)
|
// }
|
||||||
m.Notes = strVal(notes)
|
// return members, rows.Err()
|
||||||
members = append(members, m)
|
// }
|
||||||
}
|
|
||||||
return members, rows.Err()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (db *DB) AddActivity(ctx context.Context, a ext.Activity) (ext.Activity, error) {
|
// func (db *DB) AddActivity(ctx context.Context, a ext.Activity) (ext.Activity, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into activities (family_member_id, title, activity_type, day_of_week, start_time, end_time, start_date, end_date, location, notes)
|
// insert into activities (family_member_id, title, activity_type, day_of_week, start_time, end_time, start_date, end_date, location, notes)
|
||||||
values ($1, $2, $3, $4, $5::time, $6::time, $7, $8, $9, $10)
|
// values ($1, $2, $3, $4, $5::time, $6::time, $7, $8, $9, $10)
|
||||||
returning id, created_at
|
// returning id, created_at
|
||||||
`, a.FamilyMemberID, a.Title, nullStr(a.ActivityType), nullStr(a.DayOfWeek),
|
// `, a.FamilyMemberID, a.Title, nullStr(a.ActivityType), nullStr(a.DayOfWeek),
|
||||||
nullStr(a.StartTime), nullStr(a.EndTime), a.StartDate, a.EndDate,
|
// nullStr(a.StartTime), nullStr(a.EndTime), a.StartDate, a.EndDate,
|
||||||
nullStr(a.Location), nullStr(a.Notes))
|
// nullStr(a.Location), nullStr(a.Notes))
|
||||||
|
|
||||||
created := a
|
// created := a
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt); err != nil {
|
// var model generatedmodels.ModelPublicActivities
|
||||||
return ext.Activity{}, fmt.Errorf("insert activity: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt); err != nil {
|
||||||
}
|
// return ext.Activity{}, fmt.Errorf("insert activity: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) GetWeekSchedule(ctx context.Context, weekStart time.Time) ([]ext.Activity, error) {
|
// func (db *DB) GetWeekSchedule(ctx context.Context, weekStart time.Time) ([]ext.Activity, error) {
|
||||||
weekEnd := weekStart.AddDate(0, 0, 7)
|
// weekEnd := weekStart.AddDate(0, 0, 7)
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, `
|
// rows, err := db.pool.Query(ctx, `
|
||||||
select a.id, a.family_member_id, fm.name, a.title, a.activity_type,
|
// select a.id, a.family_member_id, fm.name, a.title, a.activity_type,
|
||||||
a.day_of_week, a.start_time::text, a.end_time::text,
|
// a.day_of_week, a.start_time::text, a.end_time::text,
|
||||||
a.start_date, a.end_date, a.location, a.notes, a.created_at
|
// a.start_date, a.end_date, a.location, a.notes, a.created_at
|
||||||
from activities a
|
// from activities a
|
||||||
left join family_members fm on fm.id = a.family_member_id
|
// left join family_members fm on fm.id = a.family_member_id
|
||||||
where (a.start_date >= $1 and a.start_date < $2)
|
// where (a.start_date >= $1 and a.start_date < $2)
|
||||||
or (a.day_of_week is not null and (a.end_date is null or a.end_date >= $1))
|
// or (a.day_of_week is not null and (a.end_date is null or a.end_date >= $1))
|
||||||
order by a.start_date, a.start_time
|
// order by a.start_date, a.start_time
|
||||||
`, weekStart, weekEnd)
|
// `, weekStart, weekEnd)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("get week schedule: %w", err)
|
// return nil, fmt.Errorf("get week schedule: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
return scanActivities(rows)
|
// return scanActivities(rows)
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (db *DB) SearchActivities(ctx context.Context, query, activityType string, memberID *uuid.UUID) ([]ext.Activity, error) {
|
// func (db *DB) SearchActivities(ctx context.Context, query, activityType string, memberID *uuid.UUID) ([]ext.Activity, error) {
|
||||||
args := []any{}
|
// args := []any{}
|
||||||
conditions := []string{}
|
// conditions := []string{}
|
||||||
|
|
||||||
if q := strings.TrimSpace(query); q != "" {
|
// if q := strings.TrimSpace(query); q != "" {
|
||||||
args = append(args, "%"+q+"%")
|
// args = append(args, "%"+q+"%")
|
||||||
conditions = append(conditions, fmt.Sprintf("(a.title ILIKE $%d OR a.notes ILIKE $%d)", len(args), len(args)))
|
// conditions = append(conditions, fmt.Sprintf("(a.title ILIKE $%d OR a.notes ILIKE $%d)", len(args), len(args)))
|
||||||
}
|
// }
|
||||||
if t := strings.TrimSpace(activityType); t != "" {
|
// if t := strings.TrimSpace(activityType); t != "" {
|
||||||
args = append(args, t)
|
// args = append(args, t)
|
||||||
conditions = append(conditions, fmt.Sprintf("a.activity_type = $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("a.activity_type = $%d", len(args)))
|
||||||
}
|
// }
|
||||||
if memberID != nil {
|
// if memberID != nil {
|
||||||
args = append(args, *memberID)
|
// args = append(args, *memberID)
|
||||||
conditions = append(conditions, fmt.Sprintf("a.family_member_id = $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("a.family_member_id = $%d", len(args)))
|
||||||
}
|
// }
|
||||||
|
|
||||||
q := `
|
// q := `
|
||||||
select a.id, a.family_member_id, fm.name, a.title, a.activity_type,
|
// select a.id, a.family_member_id, fm.name, a.title, a.activity_type,
|
||||||
a.day_of_week, a.start_time::text, a.end_time::text,
|
// a.day_of_week, a.start_time::text, a.end_time::text,
|
||||||
a.start_date, a.end_date, a.location, a.notes, a.created_at
|
// a.start_date, a.end_date, a.location, a.notes, a.created_at
|
||||||
from activities a
|
// from activities a
|
||||||
left join family_members fm on fm.id = a.family_member_id
|
// left join family_members fm on fm.id = a.family_member_id
|
||||||
`
|
// `
|
||||||
if len(conditions) > 0 {
|
// if len(conditions) > 0 {
|
||||||
q += " where " + strings.Join(conditions, " and ")
|
// q += " where " + strings.Join(conditions, " and ")
|
||||||
}
|
// }
|
||||||
q += " order by a.start_date, a.start_time"
|
// q += " order by a.start_date, a.start_time"
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, q, args...)
|
// rows, err := db.pool.Query(ctx, q, args...)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("search activities: %w", err)
|
// return nil, fmt.Errorf("search activities: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
return scanActivities(rows)
|
// return scanActivities(rows)
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (db *DB) AddImportantDate(ctx context.Context, d ext.ImportantDate) (ext.ImportantDate, error) {
|
// func (db *DB) AddImportantDate(ctx context.Context, d ext.ImportantDate) (ext.ImportantDate, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into important_dates (family_member_id, title, date_value, recurring_yearly, reminder_days_before, notes)
|
// insert into important_dates (family_member_id, title, date_value, recurring_yearly, reminder_days_before, notes)
|
||||||
values ($1, $2, $3, $4, $5, $6)
|
// values ($1, $2, $3, $4, $5, $6)
|
||||||
returning id, created_at
|
// returning id, created_at
|
||||||
`, d.FamilyMemberID, d.Title, d.DateValue, d.RecurringYearly, d.ReminderDaysBefore, nullStr(d.Notes))
|
// `, d.FamilyMemberID, d.Title, d.DateValue, d.RecurringYearly, d.ReminderDaysBefore, nullStr(d.Notes))
|
||||||
|
|
||||||
created := d
|
// created := d
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt); err != nil {
|
// var model generatedmodels.ModelPublicImportantDates
|
||||||
return ext.ImportantDate{}, fmt.Errorf("insert important date: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt); err != nil {
|
||||||
}
|
// return ext.ImportantDate{}, fmt.Errorf("insert important date: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) GetUpcomingDates(ctx context.Context, daysAhead int) ([]ext.ImportantDate, error) {
|
// func (db *DB) GetUpcomingDates(ctx context.Context, daysAhead int) ([]ext.ImportantDate, error) {
|
||||||
if daysAhead <= 0 {
|
// if daysAhead <= 0 {
|
||||||
daysAhead = 30
|
// daysAhead = 30
|
||||||
}
|
// }
|
||||||
now := time.Now()
|
// now := time.Now()
|
||||||
cutoff := now.AddDate(0, 0, daysAhead)
|
// cutoff := now.AddDate(0, 0, daysAhead)
|
||||||
|
|
||||||
// For yearly recurring events, check if this year's occurrence falls in range
|
// // For yearly recurring events, check if this year's occurrence falls in range
|
||||||
rows, err := db.pool.Query(ctx, `
|
// rows, err := db.pool.Query(ctx, `
|
||||||
select d.id, d.family_member_id, fm.name, d.title, d.date_value,
|
// select d.id, d.family_member_id, fm.name, d.title, d.date_value,
|
||||||
d.recurring_yearly, d.reminder_days_before, d.notes, d.created_at
|
// d.recurring_yearly, d.reminder_days_before, d.notes, d.created_at
|
||||||
from important_dates d
|
// from important_dates d
|
||||||
left join family_members fm on fm.id = d.family_member_id
|
// left join family_members fm on fm.id = d.family_member_id
|
||||||
where (
|
// where (
|
||||||
(d.recurring_yearly = false and d.date_value between $1 and $2)
|
// (d.recurring_yearly = false and d.date_value between $1 and $2)
|
||||||
or
|
// or
|
||||||
(d.recurring_yearly = true and
|
// (d.recurring_yearly = true and
|
||||||
make_date(extract(year from now())::int, extract(month from d.date_value)::int, extract(day from d.date_value)::int)
|
// make_date(extract(year from now())::int, extract(month from d.date_value)::int, extract(day from d.date_value)::int)
|
||||||
between $1 and $2)
|
// between $1 and $2)
|
||||||
)
|
// )
|
||||||
order by d.date_value
|
// order by d.date_value
|
||||||
`, now, cutoff)
|
// `, now, cutoff)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("get upcoming dates: %w", err)
|
// return nil, fmt.Errorf("get upcoming dates: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var dates []ext.ImportantDate
|
// var dates []ext.ImportantDate
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var d ext.ImportantDate
|
// var model generatedmodels.ModelPublicImportantDates
|
||||||
var memberID *uuid.UUID
|
// var memberName *string
|
||||||
var memberName, notes *string
|
// if err := rows.Scan(&model.ID, &model.FamilyMemberID, &memberName, &model.Title, &model.DateValue,
|
||||||
if err := rows.Scan(&d.ID, &memberID, &memberName, &d.Title, &d.DateValue,
|
// &model.RecurringYearly, &model.ReminderDaysBefore, &model.Notes, &model.CreatedAt); err != nil {
|
||||||
&d.RecurringYearly, &d.ReminderDaysBefore, ¬es, &d.CreatedAt); err != nil {
|
// return nil, fmt.Errorf("scan important date: %w", err)
|
||||||
return nil, fmt.Errorf("scan important date: %w", err)
|
// }
|
||||||
}
|
// dates = append(dates, importantDateFromModel(model, strVal(memberName)))
|
||||||
d.FamilyMemberID = memberID
|
// }
|
||||||
d.MemberName = strVal(memberName)
|
// return dates, rows.Err()
|
||||||
d.Notes = strVal(notes)
|
// }
|
||||||
dates = append(dates, d)
|
|
||||||
}
|
|
||||||
return dates, rows.Err()
|
|
||||||
}
|
|
||||||
|
|
||||||
func scanActivities(rows interface {
|
// func scanActivities(rows interface {
|
||||||
Next() bool
|
// Next() bool
|
||||||
Scan(...any) error
|
// Scan(...any) error
|
||||||
Err() error
|
// Err() error
|
||||||
Close()
|
// Close()
|
||||||
}) ([]ext.Activity, error) {
|
// }) ([]ext.Activity, error) {
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
var activities []ext.Activity
|
// var activities []ext.Activity
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var a ext.Activity
|
// var model generatedmodels.ModelPublicActivities
|
||||||
var memberName, activityType, dayOfWeek, startTime, endTime, location, notes *string
|
// var memberName *string
|
||||||
if err := rows.Scan(
|
// if err := rows.Scan(
|
||||||
&a.ID, &a.FamilyMemberID, &memberName, &a.Title, &activityType,
|
// &model.ID, &model.FamilyMemberID, &memberName, &model.Title, &model.ActivityType,
|
||||||
&dayOfWeek, &startTime, &endTime,
|
// &model.DayOfWeek, &model.StartTime, &model.EndTime,
|
||||||
&a.StartDate, &a.EndDate, &location, ¬es, &a.CreatedAt,
|
// &model.StartDate, &model.EndDate, &model.Location, &model.Notes, &model.CreatedAt,
|
||||||
); err != nil {
|
// ); err != nil {
|
||||||
return nil, fmt.Errorf("scan activity: %w", err)
|
// return nil, fmt.Errorf("scan activity: %w", err)
|
||||||
}
|
// }
|
||||||
a.MemberName = strVal(memberName)
|
// activities = append(activities, activityFromModel(model, strVal(memberName)))
|
||||||
a.ActivityType = strVal(activityType)
|
// }
|
||||||
a.DayOfWeek = strVal(dayOfWeek)
|
// return activities, rows.Err()
|
||||||
a.StartTime = strVal(startTime)
|
// }
|
||||||
a.EndTime = strVal(endTime)
|
|
||||||
a.Location = strVal(location)
|
|
||||||
a.Notes = strVal(notes)
|
|
||||||
activities = append(activities, a)
|
|
||||||
}
|
|
||||||
return activities, rows.Err()
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v5"
|
"github.com/jackc/pgx/v5"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -34,9 +35,13 @@ func (db *DB) SaveChatHistory(ctx context.Context, h ext.ChatHistory) (ext.ChatH
|
|||||||
)
|
)
|
||||||
|
|
||||||
created := h
|
created := h
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
var model generatedmodels.ModelPublicChatHistories
|
||||||
|
if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return ext.ChatHistory{}, fmt.Errorf("insert chat history: %w", err)
|
return ext.ChatHistory{}, fmt.Errorf("insert chat history: %w", err)
|
||||||
}
|
}
|
||||||
|
created.ID = model.ID.UUID()
|
||||||
|
created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
return created, nil
|
return created, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -157,26 +162,33 @@ type rowScanner interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func scanChatHistory(row rowScanner) (ext.ChatHistory, error) {
|
func scanChatHistory(row rowScanner) (ext.ChatHistory, error) {
|
||||||
var h ext.ChatHistory
|
var model generatedmodels.ModelPublicChatHistories
|
||||||
var title, channel, agentID, summary *string
|
|
||||||
var messagesJSON, metaJSON []byte
|
|
||||||
|
|
||||||
if err := row.Scan(
|
if err := row.Scan(
|
||||||
&h.ID, &h.SessionID, &title, &channel, &agentID, &h.ProjectID,
|
&model.ID, &model.SessionID, &model.Title, &model.Channel, &model.AgentID, &model.ProjectID,
|
||||||
&messagesJSON, &summary, &metaJSON, &h.CreatedAt, &h.UpdatedAt,
|
&model.Messages, &model.Summary, &model.Metadata, &model.CreatedAt, &model.UpdatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return ext.ChatHistory{}, err
|
return ext.ChatHistory{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
h.Title = strVal(title)
|
h := ext.ChatHistory{
|
||||||
h.Channel = strVal(channel)
|
ID: model.ID.UUID(),
|
||||||
h.AgentID = strVal(agentID)
|
SessionID: model.SessionID.String(),
|
||||||
h.Summary = strVal(summary)
|
Title: model.Title.String(),
|
||||||
|
Channel: model.Channel.String(),
|
||||||
|
AgentID: model.AgentID.String(),
|
||||||
|
Summary: model.Summary.String(),
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
|
if model.ProjectID.Valid {
|
||||||
|
id := model.ProjectID.UUID()
|
||||||
|
h.ProjectID = &id
|
||||||
|
}
|
||||||
|
|
||||||
if err := json.Unmarshal(messagesJSON, &h.Messages); err != nil {
|
if err := json.Unmarshal(model.Messages, &h.Messages); err != nil {
|
||||||
return ext.ChatHistory{}, fmt.Errorf("unmarshal messages: %w", err)
|
return ext.ChatHistory{}, fmt.Errorf("unmarshal messages: %w", err)
|
||||||
}
|
}
|
||||||
if err := json.Unmarshal(metaJSON, &h.Metadata); err != nil {
|
if err := json.Unmarshal(model.Metadata, &h.Metadata); err != nil {
|
||||||
return ext.ChatHistory{}, fmt.Errorf("unmarshal metadata: %w", err)
|
return ext.ChatHistory{}, fmt.Errorf("unmarshal metadata: %w", err)
|
||||||
}
|
}
|
||||||
if h.Messages == nil {
|
if h.Messages == nil {
|
||||||
|
|||||||
@@ -1,247 +1,235 @@
|
|||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
|
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// "git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
)
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
// )
|
||||||
|
|
||||||
func (db *DB) AddProfessionalContact(ctx context.Context, c ext.ProfessionalContact) (ext.ProfessionalContact, error) {
|
// func (db *DB) AddProfessionalContact(ctx context.Context, c ext.ProfessionalContact) (ext.ProfessionalContact, error) {
|
||||||
if c.Tags == nil {
|
// if c.Tags == nil {
|
||||||
c.Tags = []string{}
|
// c.Tags = []string{}
|
||||||
}
|
// }
|
||||||
|
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into professional_contacts (name, company, title, email, phone, linkedin_url, how_we_met, tags, notes, follow_up_date)
|
// insert into professional_contacts (name, company, title, email, phone, linkedin_url, how_we_met, tags, notes, follow_up_date)
|
||||||
values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
// values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||||
returning id, created_at, updated_at
|
// returning id, created_at, updated_at
|
||||||
`, c.Name, nullStr(c.Company), nullStr(c.Title), nullStr(c.Email), nullStr(c.Phone),
|
// `, c.Name, nullStr(c.Company), nullStr(c.Title), nullStr(c.Email), nullStr(c.Phone),
|
||||||
nullStr(c.LinkedInURL), nullStr(c.HowWeMet), c.Tags, nullStr(c.Notes), c.FollowUpDate)
|
// nullStr(c.LinkedInURL), nullStr(c.HowWeMet), c.Tags, nullStr(c.Notes), c.FollowUpDate)
|
||||||
|
|
||||||
created := c
|
// created := c
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
// var model generatedmodels.ModelPublicProfessionalContacts
|
||||||
return ext.ProfessionalContact{}, fmt.Errorf("insert contact: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
}
|
// return ext.ProfessionalContact{}, fmt.Errorf("insert contact: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) SearchContacts(ctx context.Context, query string, tags []string) ([]ext.ProfessionalContact, error) {
|
// func (db *DB) SearchContacts(ctx context.Context, query string, tags []string) ([]ext.ProfessionalContact, error) {
|
||||||
args := []any{}
|
// args := []any{}
|
||||||
conditions := []string{}
|
// conditions := []string{}
|
||||||
|
|
||||||
if q := strings.TrimSpace(query); q != "" {
|
// if q := strings.TrimSpace(query); q != "" {
|
||||||
args = append(args, "%"+q+"%")
|
// args = append(args, "%"+q+"%")
|
||||||
idx := len(args)
|
// idx := len(args)
|
||||||
conditions = append(conditions, fmt.Sprintf(
|
// conditions = append(conditions, fmt.Sprintf(
|
||||||
"(name ILIKE $%[1]d OR company ILIKE $%[1]d OR title ILIKE $%[1]d OR notes ILIKE $%[1]d)", idx))
|
// "(name ILIKE $%[1]d OR company ILIKE $%[1]d OR title ILIKE $%[1]d OR notes ILIKE $%[1]d)", idx))
|
||||||
}
|
// }
|
||||||
if len(tags) > 0 {
|
// if len(tags) > 0 {
|
||||||
args = append(args, tags)
|
// args = append(args, tags)
|
||||||
conditions = append(conditions, fmt.Sprintf("tags @> $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("tags @> $%d", len(args)))
|
||||||
}
|
// }
|
||||||
|
|
||||||
q := `select id, name, company, title, email, phone, linkedin_url, how_we_met, tags, notes, last_contacted, follow_up_date, created_at, updated_at from professional_contacts`
|
// q := `select id, name, company, title, email, phone, linkedin_url, how_we_met, tags::text[], notes, last_contacted, follow_up_date, created_at, updated_at from professional_contacts`
|
||||||
if len(conditions) > 0 {
|
// if len(conditions) > 0 {
|
||||||
q += " where " + strings.Join(conditions, " and ")
|
// q += " where " + strings.Join(conditions, " and ")
|
||||||
}
|
// }
|
||||||
q += " order by name"
|
// q += " order by name"
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, q, args...)
|
// rows, err := db.pool.Query(ctx, q, args...)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("search contacts: %w", err)
|
// return nil, fmt.Errorf("search contacts: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
return scanContacts(rows)
|
// return scanContacts(rows)
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (db *DB) GetContact(ctx context.Context, id uuid.UUID) (ext.ProfessionalContact, error) {
|
// func (db *DB) GetContact(ctx context.Context, id uuid.UUID) (ext.ProfessionalContact, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
select id, name, company, title, email, phone, linkedin_url, how_we_met, tags, notes, last_contacted, follow_up_date, created_at, updated_at
|
// select id, name, company, title, email, phone, linkedin_url, how_we_met, tags::text[], notes, last_contacted, follow_up_date, created_at, updated_at
|
||||||
from professional_contacts where id = $1
|
// from professional_contacts where id = $1
|
||||||
`, id)
|
// `, id)
|
||||||
|
|
||||||
var c ext.ProfessionalContact
|
// var model generatedmodels.ModelPublicProfessionalContacts
|
||||||
var company, title, email, phone, linkedInURL, howWeMet, notes *string
|
// var tags []string
|
||||||
if err := row.Scan(&c.ID, &c.Name, &company, &title, &email, &phone,
|
// if err := row.Scan(&model.ID, &model.Name, &model.Company, &model.Title, &model.Email, &model.Phone,
|
||||||
&linkedInURL, &howWeMet, &c.Tags, ¬es, &c.LastContacted, &c.FollowUpDate,
|
// &model.LinkedinURL, &model.HowWeMet, &tags, &model.Notes, &model.LastContacted, &model.FollowUpDate,
|
||||||
&c.CreatedAt, &c.UpdatedAt); err != nil {
|
// &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return ext.ProfessionalContact{}, fmt.Errorf("get contact: %w", err)
|
// return ext.ProfessionalContact{}, fmt.Errorf("get contact: %w", err)
|
||||||
}
|
// }
|
||||||
c.Company = strVal(company)
|
// c := professionalContactFromModel(model, tags)
|
||||||
c.Title = strVal(title)
|
// return c, nil
|
||||||
c.Email = strVal(email)
|
// }
|
||||||
c.Phone = strVal(phone)
|
|
||||||
c.LinkedInURL = strVal(linkedInURL)
|
|
||||||
c.HowWeMet = strVal(howWeMet)
|
|
||||||
c.Notes = strVal(notes)
|
|
||||||
if c.Tags == nil {
|
|
||||||
c.Tags = []string{}
|
|
||||||
}
|
|
||||||
return c, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (db *DB) LogInteraction(ctx context.Context, interaction ext.ContactInteraction) (ext.ContactInteraction, error) {
|
// func (db *DB) LogInteraction(ctx context.Context, interaction ext.ContactInteraction) (ext.ContactInteraction, error) {
|
||||||
occurredAt := interaction.OccurredAt
|
// occurredAt := interaction.OccurredAt
|
||||||
if occurredAt.IsZero() {
|
// if occurredAt.IsZero() {
|
||||||
occurredAt = time.Now()
|
// occurredAt = time.Now()
|
||||||
}
|
// }
|
||||||
|
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into contact_interactions (contact_id, interaction_type, occurred_at, summary, follow_up_needed, follow_up_notes)
|
// insert into contact_interactions (contact_id, interaction_type, occurred_at, summary, follow_up_needed, follow_up_notes)
|
||||||
values ($1, $2, $3, $4, $5, $6)
|
// values ($1, $2, $3, $4, $5, $6)
|
||||||
returning id, created_at
|
// returning id, created_at
|
||||||
`, interaction.ContactID, interaction.InteractionType, occurredAt, interaction.Summary,
|
// `, interaction.ContactID, interaction.InteractionType, occurredAt, interaction.Summary,
|
||||||
interaction.FollowUpNeeded, nullStr(interaction.FollowUpNotes))
|
// interaction.FollowUpNeeded, nullStr(interaction.FollowUpNotes))
|
||||||
|
|
||||||
created := interaction
|
// created := interaction
|
||||||
created.OccurredAt = occurredAt
|
// created.OccurredAt = occurredAt
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt); err != nil {
|
// var model generatedmodels.ModelPublicContactInteractions
|
||||||
return ext.ContactInteraction{}, fmt.Errorf("insert interaction: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt); err != nil {
|
||||||
}
|
// return ext.ContactInteraction{}, fmt.Errorf("insert interaction: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) GetContactHistory(ctx context.Context, contactID uuid.UUID) (ext.ContactHistory, error) {
|
// func (db *DB) GetContactHistory(ctx context.Context, contactID uuid.UUID) (ext.ContactHistory, error) {
|
||||||
contact, err := db.GetContact(ctx, contactID)
|
// contact, err := db.GetContact(ctx, contactID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.ContactHistory{}, err
|
// return ext.ContactHistory{}, err
|
||||||
}
|
// }
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, `
|
// rows, err := db.pool.Query(ctx, `
|
||||||
select id, contact_id, interaction_type, occurred_at, summary, follow_up_needed, follow_up_notes, created_at
|
// select id, contact_id, interaction_type, occurred_at, summary, follow_up_needed, follow_up_notes, created_at
|
||||||
from contact_interactions where contact_id = $1 order by occurred_at desc
|
// from contact_interactions where contact_id = $1 order by occurred_at desc
|
||||||
`, contactID)
|
// `, contactID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.ContactHistory{}, fmt.Errorf("get interactions: %w", err)
|
// return ext.ContactHistory{}, fmt.Errorf("get interactions: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var interactions []ext.ContactInteraction
|
// var interactions []ext.ContactInteraction
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var i ext.ContactInteraction
|
// var model generatedmodels.ModelPublicContactInteractions
|
||||||
var followUpNotes *string
|
// if err := rows.Scan(&model.ID, &model.ContactID, &model.InteractionType, &model.OccurredAt, &model.Summary,
|
||||||
if err := rows.Scan(&i.ID, &i.ContactID, &i.InteractionType, &i.OccurredAt, &i.Summary,
|
// &model.FollowUpNeeded, &model.FollowUpNotes, &model.CreatedAt); err != nil {
|
||||||
&i.FollowUpNeeded, &followUpNotes, &i.CreatedAt); err != nil {
|
// return ext.ContactHistory{}, fmt.Errorf("scan interaction: %w", err)
|
||||||
return ext.ContactHistory{}, fmt.Errorf("scan interaction: %w", err)
|
// }
|
||||||
}
|
// interactions = append(interactions, contactInteractionFromModel(model))
|
||||||
i.FollowUpNotes = strVal(followUpNotes)
|
// }
|
||||||
interactions = append(interactions, i)
|
// if err := rows.Err(); err != nil {
|
||||||
}
|
// return ext.ContactHistory{}, err
|
||||||
if err := rows.Err(); err != nil {
|
// }
|
||||||
return ext.ContactHistory{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
oppRows, err := db.pool.Query(ctx, `
|
// oppRows, err := db.pool.Query(ctx, `
|
||||||
select id, contact_id, title, description, stage, value, expected_close_date, notes, created_at, updated_at
|
// select id, contact_id, title, description, stage, value, expected_close_date, notes, created_at, updated_at
|
||||||
from opportunities where contact_id = $1 order by created_at desc
|
// from opportunities where contact_id = $1 order by created_at desc
|
||||||
`, contactID)
|
// `, contactID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.ContactHistory{}, fmt.Errorf("get opportunities: %w", err)
|
// return ext.ContactHistory{}, fmt.Errorf("get opportunities: %w", err)
|
||||||
}
|
// }
|
||||||
defer oppRows.Close()
|
// defer oppRows.Close()
|
||||||
|
|
||||||
var opportunities []ext.Opportunity
|
// var opportunities []ext.Opportunity
|
||||||
for oppRows.Next() {
|
// for oppRows.Next() {
|
||||||
var o ext.Opportunity
|
// var model generatedmodels.ModelPublicOpportunities
|
||||||
var description, notes *string
|
// if err := oppRows.Scan(&model.ID, &model.ContactID, &model.Title, &model.Description, &model.Stage, &model.Value,
|
||||||
if err := oppRows.Scan(&o.ID, &o.ContactID, &o.Title, &description, &o.Stage, &o.Value,
|
// &model.ExpectedCloseDate, &model.Notes, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
&o.ExpectedCloseDate, ¬es, &o.CreatedAt, &o.UpdatedAt); err != nil {
|
// return ext.ContactHistory{}, fmt.Errorf("scan opportunity: %w", err)
|
||||||
return ext.ContactHistory{}, fmt.Errorf("scan opportunity: %w", err)
|
// }
|
||||||
}
|
// opportunities = append(opportunities, opportunityFromModel(model))
|
||||||
o.Description = strVal(description)
|
// }
|
||||||
o.Notes = strVal(notes)
|
// if err := oppRows.Err(); err != nil {
|
||||||
opportunities = append(opportunities, o)
|
// return ext.ContactHistory{}, err
|
||||||
}
|
// }
|
||||||
if err := oppRows.Err(); err != nil {
|
|
||||||
return ext.ContactHistory{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ext.ContactHistory{
|
// return ext.ContactHistory{
|
||||||
Contact: contact,
|
// Contact: contact,
|
||||||
Interactions: interactions,
|
// Interactions: interactions,
|
||||||
Opportunities: opportunities,
|
// Opportunities: opportunities,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (db *DB) CreateOpportunity(ctx context.Context, o ext.Opportunity) (ext.Opportunity, error) {
|
// func (db *DB) CreateOpportunity(ctx context.Context, o ext.Opportunity) (ext.Opportunity, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into opportunities (contact_id, title, description, stage, value, expected_close_date, notes)
|
// insert into opportunities (contact_id, title, description, stage, value, expected_close_date, notes)
|
||||||
values ($1, $2, $3, $4, $5, $6, $7)
|
// values ($1, $2, $3, $4, $5, $6, $7)
|
||||||
returning id, created_at, updated_at
|
// returning id, created_at, updated_at
|
||||||
`, o.ContactID, o.Title, nullStr(o.Description), o.Stage, o.Value, o.ExpectedCloseDate, nullStr(o.Notes))
|
// `, o.ContactID, o.Title, nullStr(o.Description), o.Stage, o.Value, o.ExpectedCloseDate, nullStr(o.Notes))
|
||||||
|
|
||||||
created := o
|
// created := o
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
// var model generatedmodels.ModelPublicOpportunities
|
||||||
return ext.Opportunity{}, fmt.Errorf("insert opportunity: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
}
|
// return ext.Opportunity{}, fmt.Errorf("insert opportunity: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) GetFollowUpsDue(ctx context.Context, daysAhead int) ([]ext.ProfessionalContact, error) {
|
// func (db *DB) GetFollowUpsDue(ctx context.Context, daysAhead int) ([]ext.ProfessionalContact, error) {
|
||||||
if daysAhead <= 0 {
|
// if daysAhead <= 0 {
|
||||||
daysAhead = 7
|
// daysAhead = 7
|
||||||
}
|
// }
|
||||||
cutoff := time.Now().AddDate(0, 0, daysAhead)
|
// cutoff := time.Now().AddDate(0, 0, daysAhead)
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, `
|
// rows, err := db.pool.Query(ctx, `
|
||||||
select id, name, company, title, email, phone, linkedin_url, how_we_met, tags, notes, last_contacted, follow_up_date, created_at, updated_at
|
// select id, name, company, title, email, phone, linkedin_url, how_we_met, tags::text[], notes, last_contacted, follow_up_date, created_at, updated_at
|
||||||
from professional_contacts
|
// from professional_contacts
|
||||||
where follow_up_date <= $1
|
// where follow_up_date <= $1
|
||||||
order by follow_up_date asc
|
// order by follow_up_date asc
|
||||||
`, cutoff)
|
// `, cutoff)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("get follow-ups: %w", err)
|
// return nil, fmt.Errorf("get follow-ups: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
return scanContacts(rows)
|
// return scanContacts(rows)
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (db *DB) AppendThoughtToContactNotes(ctx context.Context, contactID uuid.UUID, thoughtContent string) error {
|
// func (db *DB) AppendThoughtToContactNotes(ctx context.Context, contactID uuid.UUID, thoughtContent string) error {
|
||||||
_, err := db.pool.Exec(ctx, `
|
// _, err := db.pool.Exec(ctx, `
|
||||||
update professional_contacts
|
// update professional_contacts
|
||||||
set notes = coalesce(notes, '') || $2
|
// set notes = coalesce(notes, '') || $2
|
||||||
where id = $1
|
// where id = $1
|
||||||
`, contactID, thoughtContent)
|
// `, contactID, thoughtContent)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return fmt.Errorf("append thought to contact: %w", err)
|
// return fmt.Errorf("append thought to contact: %w", err)
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
func scanContacts(rows interface {
|
// func scanContacts(rows interface {
|
||||||
Next() bool
|
// Next() bool
|
||||||
Scan(...any) error
|
// Scan(...any) error
|
||||||
Err() error
|
// Err() error
|
||||||
Close()
|
// Close()
|
||||||
}) ([]ext.ProfessionalContact, error) {
|
// }) ([]ext.ProfessionalContact, error) {
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
var contacts []ext.ProfessionalContact
|
// var contacts []ext.ProfessionalContact
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var c ext.ProfessionalContact
|
// var model generatedmodels.ModelPublicProfessionalContacts
|
||||||
var company, title, email, phone, linkedInURL, howWeMet, notes *string
|
// var tags []string
|
||||||
if err := rows.Scan(&c.ID, &c.Name, &company, &title, &email, &phone,
|
// if err := rows.Scan(&model.ID, &model.Name, &model.Company, &model.Title, &model.Email, &model.Phone,
|
||||||
&linkedInURL, &howWeMet, &c.Tags, ¬es, &c.LastContacted, &c.FollowUpDate,
|
// &model.LinkedinURL, &model.HowWeMet, &tags, &model.Notes, &model.LastContacted, &model.FollowUpDate,
|
||||||
&c.CreatedAt, &c.UpdatedAt); err != nil {
|
// &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return nil, fmt.Errorf("scan contact: %w", err)
|
// return nil, fmt.Errorf("scan contact: %w", err)
|
||||||
}
|
// }
|
||||||
c.Company = strVal(company)
|
// contacts = append(contacts, professionalContactFromModel(model, tags))
|
||||||
c.Title = strVal(title)
|
// }
|
||||||
c.Email = strVal(email)
|
// return contacts, rows.Err()
|
||||||
c.Phone = strVal(phone)
|
// }
|
||||||
c.LinkedInURL = strVal(linkedInURL)
|
|
||||||
c.HowWeMet = strVal(howWeMet)
|
|
||||||
c.Notes = strVal(notes)
|
|
||||||
if c.Tags == nil {
|
|
||||||
c.Tags = []string{}
|
|
||||||
}
|
|
||||||
contacts = append(contacts, c)
|
|
||||||
}
|
|
||||||
return contacts, rows.Err()
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -2,18 +2,23 @@ package store
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/jackc/pgx/v5"
|
"github.com/jackc/pgx/v5"
|
||||||
"github.com/jackc/pgx/v5/pgxpool"
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
pgxvec "github.com/pgvector/pgvector-go/pgx"
|
pgxvec "github.com/pgvector/pgvector-go/pgx"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
"github.com/uptrace/bun/dialect/pgdialect"
|
||||||
|
"github.com/uptrace/bun/driver/pgdriver"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
type DB struct {
|
type DB struct {
|
||||||
pool *pgxpool.Pool
|
pool *pgxpool.Pool
|
||||||
|
bun *bun.DB
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(ctx context.Context, cfg config.DatabaseConfig) (*DB, error) {
|
func New(ctx context.Context, cfg config.DatabaseConfig) (*DB, error) {
|
||||||
@@ -35,8 +40,20 @@ func New(ctx context.Context, cfg config.DatabaseConfig) (*DB, error) {
|
|||||||
return nil, fmt.Errorf("create database pool: %w", err)
|
return nil, fmt.Errorf("create database pool: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
db := &DB{pool: pool}
|
bunSQLDB := sql.OpenDB(pgdriver.NewConnector(pgdriver.WithDSN(cfg.URL)))
|
||||||
|
bunSQLDB.SetMaxOpenConns(int(cfg.MaxConns))
|
||||||
|
bunSQLDB.SetMaxIdleConns(int(cfg.MinConns))
|
||||||
|
bunSQLDB.SetConnMaxLifetime(cfg.MaxConnLifetime)
|
||||||
|
bunSQLDB.SetConnMaxIdleTime(cfg.MaxConnIdleTime)
|
||||||
|
|
||||||
|
db := &DB{
|
||||||
|
pool: pool,
|
||||||
|
bun: bun.NewDB(bunSQLDB, pgdialect.New()),
|
||||||
|
}
|
||||||
if err := db.Ping(ctx); err != nil {
|
if err := db.Ping(ctx); err != nil {
|
||||||
|
if db.bun != nil {
|
||||||
|
_ = db.bun.Close()
|
||||||
|
}
|
||||||
pool.Close()
|
pool.Close()
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -45,11 +62,16 @@ func New(ctx context.Context, cfg config.DatabaseConfig) (*DB, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) Close() {
|
func (db *DB) Close() {
|
||||||
if db == nil || db.pool == nil {
|
if db == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
db.pool.Close()
|
if db.bun != nil {
|
||||||
|
_ = db.bun.Close()
|
||||||
|
}
|
||||||
|
if db.pool != nil {
|
||||||
|
db.pool.Close()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) Ping(ctx context.Context) error {
|
func (db *DB) Ping(ctx context.Context) error {
|
||||||
@@ -102,3 +124,10 @@ func (db *DB) VerifyRequirements(ctx context.Context) error {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (db *DB) Bun() *bun.DB {
|
||||||
|
if db == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return db.bun
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v5"
|
"github.com/jackc/pgx/v5"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -19,24 +20,24 @@ func (db *DB) InsertStoredFile(ctx context.Context, file thoughttypes.StoredFile
|
|||||||
returning guid, thought_id, project_id, name, media_type, kind, encoding, size_bytes, sha256, created_at, updated_at
|
returning guid, thought_id, project_id, name, media_type, kind, encoding, size_bytes, sha256, created_at, updated_at
|
||||||
`, file.ThoughtID, file.ProjectID, file.Name, file.MediaType, file.Kind, file.Encoding, file.SizeBytes, file.SHA256, file.Content)
|
`, file.ThoughtID, file.ProjectID, file.Name, file.MediaType, file.Kind, file.Encoding, file.SizeBytes, file.SHA256, file.Content)
|
||||||
|
|
||||||
var created thoughttypes.StoredFile
|
var model generatedmodels.ModelPublicStoredFiles
|
||||||
if err := row.Scan(
|
if err := row.Scan(
|
||||||
&created.ID,
|
&model.GUID,
|
||||||
&created.ThoughtID,
|
&model.ThoughtID,
|
||||||
&created.ProjectID,
|
&model.ProjectID,
|
||||||
&created.Name,
|
&model.Name,
|
||||||
&created.MediaType,
|
&model.MediaType,
|
||||||
&created.Kind,
|
&model.Kind,
|
||||||
&created.Encoding,
|
&model.Encoding,
|
||||||
&created.SizeBytes,
|
&model.SizeBytes,
|
||||||
&created.SHA256,
|
&model.Sha256,
|
||||||
&created.CreatedAt,
|
&model.CreatedAt,
|
||||||
&created.UpdatedAt,
|
&model.UpdatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return thoughttypes.StoredFile{}, fmt.Errorf("insert stored file: %w", err)
|
return thoughttypes.StoredFile{}, fmt.Errorf("insert stored file: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return created, nil
|
return storedFileFromModel(model), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) GetStoredFile(ctx context.Context, id uuid.UUID) (thoughttypes.StoredFile, error) {
|
func (db *DB) GetStoredFile(ctx context.Context, id uuid.UUID) (thoughttypes.StoredFile, error) {
|
||||||
@@ -46,20 +47,20 @@ func (db *DB) GetStoredFile(ctx context.Context, id uuid.UUID) (thoughttypes.Sto
|
|||||||
where guid = $1
|
where guid = $1
|
||||||
`, id)
|
`, id)
|
||||||
|
|
||||||
var file thoughttypes.StoredFile
|
var model generatedmodels.ModelPublicStoredFiles
|
||||||
if err := row.Scan(
|
if err := row.Scan(
|
||||||
&file.ID,
|
&model.GUID,
|
||||||
&file.ThoughtID,
|
&model.ThoughtID,
|
||||||
&file.ProjectID,
|
&model.ProjectID,
|
||||||
&file.Name,
|
&model.Name,
|
||||||
&file.MediaType,
|
&model.MediaType,
|
||||||
&file.Kind,
|
&model.Kind,
|
||||||
&file.Encoding,
|
&model.Encoding,
|
||||||
&file.SizeBytes,
|
&model.SizeBytes,
|
||||||
&file.SHA256,
|
&model.Sha256,
|
||||||
&file.Content,
|
&model.Content,
|
||||||
&file.CreatedAt,
|
&model.CreatedAt,
|
||||||
&file.UpdatedAt,
|
&model.UpdatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
if err == pgx.ErrNoRows {
|
if err == pgx.ErrNoRows {
|
||||||
return thoughttypes.StoredFile{}, err
|
return thoughttypes.StoredFile{}, err
|
||||||
@@ -67,7 +68,7 @@ func (db *DB) GetStoredFile(ctx context.Context, id uuid.UUID) (thoughttypes.Sto
|
|||||||
return thoughttypes.StoredFile{}, fmt.Errorf("get stored file: %w", err)
|
return thoughttypes.StoredFile{}, fmt.Errorf("get stored file: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return file, nil
|
return storedFileFromModel(model), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) ListStoredFiles(ctx context.Context, filter thoughttypes.StoredFileFilter) ([]thoughttypes.StoredFile, error) {
|
func (db *DB) ListStoredFiles(ctx context.Context, filter thoughttypes.StoredFileFilter) ([]thoughttypes.StoredFile, error) {
|
||||||
@@ -106,23 +107,23 @@ func (db *DB) ListStoredFiles(ctx context.Context, filter thoughttypes.StoredFil
|
|||||||
|
|
||||||
files := make([]thoughttypes.StoredFile, 0, filter.Limit)
|
files := make([]thoughttypes.StoredFile, 0, filter.Limit)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var file thoughttypes.StoredFile
|
var model generatedmodels.ModelPublicStoredFiles
|
||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&file.ID,
|
&model.GUID,
|
||||||
&file.ThoughtID,
|
&model.ThoughtID,
|
||||||
&file.ProjectID,
|
&model.ProjectID,
|
||||||
&file.Name,
|
&model.Name,
|
||||||
&file.MediaType,
|
&model.MediaType,
|
||||||
&file.Kind,
|
&model.Kind,
|
||||||
&file.Encoding,
|
&model.Encoding,
|
||||||
&file.SizeBytes,
|
&model.SizeBytes,
|
||||||
&file.SHA256,
|
&model.Sha256,
|
||||||
&file.CreatedAt,
|
&model.CreatedAt,
|
||||||
&file.UpdatedAt,
|
&model.UpdatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, fmt.Errorf("scan stored file: %w", err)
|
return nil, fmt.Errorf("scan stored file: %w", err)
|
||||||
}
|
}
|
||||||
files = append(files, file)
|
files = append(files, storedFileFromModel(model))
|
||||||
}
|
}
|
||||||
if err := rows.Err(); err != nil {
|
if err := rows.Err(); err != nil {
|
||||||
return nil, fmt.Errorf("iterate stored files: %w", err)
|
return nil, fmt.Errorf("iterate stored files: %w", err)
|
||||||
|
|||||||
@@ -1,150 +1,133 @@
|
|||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
|
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// "git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
)
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
// )
|
||||||
|
|
||||||
func (db *DB) AddHouseholdItem(ctx context.Context, item ext.HouseholdItem) (ext.HouseholdItem, error) {
|
// func (db *DB) AddHouseholdItem(ctx context.Context, item ext.HouseholdItem) (ext.HouseholdItem, error) {
|
||||||
details, err := json.Marshal(item.Details)
|
// details, err := json.Marshal(item.Details)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.HouseholdItem{}, fmt.Errorf("marshal details: %w", err)
|
// return ext.HouseholdItem{}, fmt.Errorf("marshal details: %w", err)
|
||||||
}
|
// }
|
||||||
|
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into household_items (name, category, location, details, notes)
|
// insert into household_items (name, category, location, details, notes)
|
||||||
values ($1, $2, $3, $4::jsonb, $5)
|
// values ($1, $2, $3, $4::jsonb, $5)
|
||||||
returning id, created_at, updated_at
|
// returning id, created_at, updated_at
|
||||||
`, item.Name, nullStr(item.Category), nullStr(item.Location), details, nullStr(item.Notes))
|
// `, item.Name, nullStr(item.Category), nullStr(item.Location), details, nullStr(item.Notes))
|
||||||
|
|
||||||
created := item
|
// created := item
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
// var model generatedmodels.ModelPublicHouseholdItems
|
||||||
return ext.HouseholdItem{}, fmt.Errorf("insert household item: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
}
|
// return ext.HouseholdItem{}, fmt.Errorf("insert household item: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) SearchHouseholdItems(ctx context.Context, query, category, location string) ([]ext.HouseholdItem, error) {
|
// func (db *DB) SearchHouseholdItems(ctx context.Context, query, category, location string) ([]ext.HouseholdItem, error) {
|
||||||
args := []any{}
|
// args := []any{}
|
||||||
conditions := []string{}
|
// conditions := []string{}
|
||||||
|
|
||||||
if q := strings.TrimSpace(query); q != "" {
|
// if q := strings.TrimSpace(query); q != "" {
|
||||||
args = append(args, "%"+q+"%")
|
// args = append(args, "%"+q+"%")
|
||||||
conditions = append(conditions, fmt.Sprintf("(name ILIKE $%d OR notes ILIKE $%d)", len(args), len(args)))
|
// conditions = append(conditions, fmt.Sprintf("(name ILIKE $%d OR notes ILIKE $%d)", len(args), len(args)))
|
||||||
}
|
// }
|
||||||
if c := strings.TrimSpace(category); c != "" {
|
// if c := strings.TrimSpace(category); c != "" {
|
||||||
args = append(args, c)
|
// args = append(args, c)
|
||||||
conditions = append(conditions, fmt.Sprintf("category = $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("category = $%d", len(args)))
|
||||||
}
|
// }
|
||||||
if l := strings.TrimSpace(location); l != "" {
|
// if l := strings.TrimSpace(location); l != "" {
|
||||||
args = append(args, "%"+l+"%")
|
// args = append(args, "%"+l+"%")
|
||||||
conditions = append(conditions, fmt.Sprintf("location ILIKE $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("location ILIKE $%d", len(args)))
|
||||||
}
|
// }
|
||||||
|
|
||||||
q := `select id, name, category, location, details, notes, created_at, updated_at from household_items`
|
// q := `select id, name, category, location, details, notes, created_at, updated_at from household_items`
|
||||||
if len(conditions) > 0 {
|
// if len(conditions) > 0 {
|
||||||
q += " where " + strings.Join(conditions, " and ")
|
// q += " where " + strings.Join(conditions, " and ")
|
||||||
}
|
// }
|
||||||
q += " order by name"
|
// q += " order by name"
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, q, args...)
|
// rows, err := db.pool.Query(ctx, q, args...)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("search household items: %w", err)
|
// return nil, fmt.Errorf("search household items: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var items []ext.HouseholdItem
|
// var items []ext.HouseholdItem
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var item ext.HouseholdItem
|
// var model generatedmodels.ModelPublicHouseholdItems
|
||||||
var detailsBytes []byte
|
// if err := rows.Scan(&model.ID, &model.Name, &model.Category, &model.Location, &model.Details, &model.Notes, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
var category, location, notes *string
|
// return nil, fmt.Errorf("scan household item: %w", err)
|
||||||
if err := rows.Scan(&item.ID, &item.Name, &category, &location, &detailsBytes, ¬es, &item.CreatedAt, &item.UpdatedAt); err != nil {
|
// }
|
||||||
return nil, fmt.Errorf("scan household item: %w", err)
|
// items = append(items, householdItemFromModel(model))
|
||||||
}
|
// }
|
||||||
item.Category = strVal(category)
|
// return items, rows.Err()
|
||||||
item.Location = strVal(location)
|
// }
|
||||||
item.Notes = strVal(notes)
|
|
||||||
if err := json.Unmarshal(detailsBytes, &item.Details); err != nil {
|
|
||||||
item.Details = map[string]any{}
|
|
||||||
}
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
return items, rows.Err()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (db *DB) GetHouseholdItem(ctx context.Context, id uuid.UUID) (ext.HouseholdItem, error) {
|
// func (db *DB) GetHouseholdItem(ctx context.Context, id uuid.UUID) (ext.HouseholdItem, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
select id, name, category, location, details, notes, created_at, updated_at
|
// select id, name, category, location, details, notes, created_at, updated_at
|
||||||
from household_items where id = $1
|
// from household_items where id = $1
|
||||||
`, id)
|
// `, id)
|
||||||
|
|
||||||
var item ext.HouseholdItem
|
// var model generatedmodels.ModelPublicHouseholdItems
|
||||||
var detailsBytes []byte
|
// if err := row.Scan(&model.ID, &model.Name, &model.Category, &model.Location, &model.Details, &model.Notes, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
var category, location, notes *string
|
// return ext.HouseholdItem{}, fmt.Errorf("get household item: %w", err)
|
||||||
if err := row.Scan(&item.ID, &item.Name, &category, &location, &detailsBytes, ¬es, &item.CreatedAt, &item.UpdatedAt); err != nil {
|
// }
|
||||||
return ext.HouseholdItem{}, fmt.Errorf("get household item: %w", err)
|
// return householdItemFromModel(model), nil
|
||||||
}
|
// }
|
||||||
item.Category = strVal(category)
|
|
||||||
item.Location = strVal(location)
|
|
||||||
item.Notes = strVal(notes)
|
|
||||||
if err := json.Unmarshal(detailsBytes, &item.Details); err != nil {
|
|
||||||
item.Details = map[string]any{}
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (db *DB) AddVendor(ctx context.Context, v ext.HouseholdVendor) (ext.HouseholdVendor, error) {
|
// func (db *DB) AddVendor(ctx context.Context, v ext.HouseholdVendor) (ext.HouseholdVendor, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into household_vendors (name, service_type, phone, email, website, notes, rating, last_used)
|
// insert into household_vendors (name, service_type, phone, email, website, notes, rating, last_used)
|
||||||
values ($1, $2, $3, $4, $5, $6, $7, $8)
|
// values ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
returning id, created_at
|
// returning id, created_at
|
||||||
`, v.Name, nullStr(v.ServiceType), nullStr(v.Phone), nullStr(v.Email),
|
// `, v.Name, nullStr(v.ServiceType), nullStr(v.Phone), nullStr(v.Email),
|
||||||
nullStr(v.Website), nullStr(v.Notes), v.Rating, v.LastUsed)
|
// nullStr(v.Website), nullStr(v.Notes), v.Rating, v.LastUsed)
|
||||||
|
|
||||||
created := v
|
// created := v
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt); err != nil {
|
// var model generatedmodels.ModelPublicHouseholdVendors
|
||||||
return ext.HouseholdVendor{}, fmt.Errorf("insert vendor: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt); err != nil {
|
||||||
}
|
// return ext.HouseholdVendor{}, fmt.Errorf("insert vendor: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) ListVendors(ctx context.Context, serviceType string) ([]ext.HouseholdVendor, error) {
|
// func (db *DB) ListVendors(ctx context.Context, serviceType string) ([]ext.HouseholdVendor, error) {
|
||||||
args := []any{}
|
// args := []any{}
|
||||||
q := `select id, name, service_type, phone, email, website, notes, rating, last_used, created_at from household_vendors`
|
// q := `select id, name, service_type, phone, email, website, notes, rating, last_used, created_at from household_vendors`
|
||||||
if st := strings.TrimSpace(serviceType); st != "" {
|
// if st := strings.TrimSpace(serviceType); st != "" {
|
||||||
args = append(args, st)
|
// args = append(args, st)
|
||||||
q += " where service_type = $1"
|
// q += " where service_type = $1"
|
||||||
}
|
// }
|
||||||
q += " order by name"
|
// q += " order by name"
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, q, args...)
|
// rows, err := db.pool.Query(ctx, q, args...)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("list vendors: %w", err)
|
// return nil, fmt.Errorf("list vendors: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var vendors []ext.HouseholdVendor
|
// var vendors []ext.HouseholdVendor
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var v ext.HouseholdVendor
|
// var model generatedmodels.ModelPublicHouseholdVendors
|
||||||
var serviceType, phone, email, website, notes *string
|
// if err := rows.Scan(&model.ID, &model.Name, &model.ServiceType, &model.Phone, &model.Email, &model.Website, &model.Notes, &model.Rating, &model.LastUsed, &model.CreatedAt); err != nil {
|
||||||
var lastUsed *time.Time
|
// return nil, fmt.Errorf("scan vendor: %w", err)
|
||||||
if err := rows.Scan(&v.ID, &v.Name, &serviceType, &phone, &email, &website, ¬es, &v.Rating, &lastUsed, &v.CreatedAt); err != nil {
|
// }
|
||||||
return nil, fmt.Errorf("scan vendor: %w", err)
|
// vendors = append(vendors, householdVendorFromModel(model))
|
||||||
}
|
// }
|
||||||
v.ServiceType = strVal(serviceType)
|
// return vendors, rows.Err()
|
||||||
v.Phone = strVal(phone)
|
// }
|
||||||
v.Email = strVal(email)
|
|
||||||
v.Website = strVal(website)
|
|
||||||
v.Notes = strVal(notes)
|
|
||||||
v.LastUsed = lastUsed
|
|
||||||
vendors = append(vendors, v)
|
|
||||||
}
|
|
||||||
return vendors, rows.Err()
|
|
||||||
}
|
|
||||||
|
|||||||
208
internal/store/learnings.go
Normal file
208
internal/store/learnings.go
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
package store
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
|
||||||
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (db *DB) CreateLearning(ctx context.Context, learning thoughttypes.Learning) (thoughttypes.Learning, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
insert into learnings (
|
||||||
|
summary, details, category, area, status, priority, confidence,
|
||||||
|
action_required, source_type, source_ref, project_id, related_thought_id,
|
||||||
|
related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id,
|
||||||
|
supersedes_learning_id, tags
|
||||||
|
) values (
|
||||||
|
$1, $2, $3, $4, $5, $6, $7,
|
||||||
|
$8, $9, $10, $11, $12,
|
||||||
|
$13, $14, $15, $16,
|
||||||
|
$17, $18
|
||||||
|
)
|
||||||
|
returning id, created_at, updated_at
|
||||||
|
`,
|
||||||
|
strings.TrimSpace(learning.Summary),
|
||||||
|
strings.TrimSpace(learning.Details),
|
||||||
|
strings.TrimSpace(learning.Category),
|
||||||
|
strings.TrimSpace(learning.Area),
|
||||||
|
string(learning.Status),
|
||||||
|
string(learning.Priority),
|
||||||
|
string(learning.Confidence),
|
||||||
|
learning.ActionRequired,
|
||||||
|
nullableText(learning.SourceType),
|
||||||
|
nullableText(learning.SourceRef),
|
||||||
|
learning.ProjectID,
|
||||||
|
learning.RelatedThoughtID,
|
||||||
|
learning.RelatedSkillID,
|
||||||
|
nullableTextPtr(learning.ReviewedBy),
|
||||||
|
learning.ReviewedAt,
|
||||||
|
learning.DuplicateOfLearningID,
|
||||||
|
learning.SupersedesLearningID,
|
||||||
|
learning.Tags,
|
||||||
|
)
|
||||||
|
|
||||||
|
created := learning
|
||||||
|
var model generatedmodels.ModelPublicLearnings
|
||||||
|
if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
|
return thoughttypes.Learning{}, fmt.Errorf("create learning: %w", err)
|
||||||
|
}
|
||||||
|
created.ID = model.ID.UUID()
|
||||||
|
created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
|
return created, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) GetLearning(ctx context.Context, id uuid.UUID) (thoughttypes.Learning, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
select id, summary, details, category, area, status, priority, confidence,
|
||||||
|
action_required, source_type, source_ref, project_id, related_thought_id,
|
||||||
|
related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id,
|
||||||
|
supersedes_learning_id, tags::text[], created_at, updated_at
|
||||||
|
from learnings
|
||||||
|
where id = $1
|
||||||
|
`, id)
|
||||||
|
|
||||||
|
learning, err := scanLearning(row)
|
||||||
|
if err != nil {
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return thoughttypes.Learning{}, fmt.Errorf("learning not found: %s", id)
|
||||||
|
}
|
||||||
|
return thoughttypes.Learning{}, fmt.Errorf("get learning: %w", err)
|
||||||
|
}
|
||||||
|
return learning, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) ListLearnings(ctx context.Context, filter thoughttypes.LearningFilter) ([]thoughttypes.Learning, error) {
|
||||||
|
args := make([]any, 0, 8)
|
||||||
|
conditions := make([]string, 0, 8)
|
||||||
|
|
||||||
|
if filter.ProjectID != nil {
|
||||||
|
args = append(args, *filter.ProjectID)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("project_id = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Category); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("category = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Area); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("area = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Status); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("status = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Priority); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("priority = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Tag); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("$%d = any(tags)", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Query); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("to_tsvector('simple', summary || ' ' || coalesce(details, '')) @@ websearch_to_tsquery('simple', $%d)", len(args)))
|
||||||
|
}
|
||||||
|
|
||||||
|
query := `
|
||||||
|
select id, summary, details, category, area, status, priority, confidence,
|
||||||
|
action_required, source_type, source_ref, project_id, related_thought_id,
|
||||||
|
related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id,
|
||||||
|
supersedes_learning_id, tags::text[], created_at, updated_at
|
||||||
|
from learnings
|
||||||
|
`
|
||||||
|
if len(conditions) > 0 {
|
||||||
|
query += " where " + strings.Join(conditions, " and ")
|
||||||
|
}
|
||||||
|
query += " order by updated_at desc"
|
||||||
|
if filter.Limit > 0 {
|
||||||
|
args = append(args, filter.Limit)
|
||||||
|
query += fmt.Sprintf(" limit $%d", len(args))
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := db.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("list learnings: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
items := make([]thoughttypes.Learning, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
item, err := scanLearning(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("scan learning: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("iterate learnings: %w", err)
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type learningScanner interface {
|
||||||
|
Scan(dest ...any) error
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanLearning(row learningScanner) (thoughttypes.Learning, error) {
|
||||||
|
var model generatedmodels.ModelPublicLearnings
|
||||||
|
var tags []string
|
||||||
|
|
||||||
|
err := row.Scan(
|
||||||
|
&model.ID,
|
||||||
|
&model.Summary,
|
||||||
|
&model.Details,
|
||||||
|
&model.Category,
|
||||||
|
&model.Area,
|
||||||
|
&model.Status,
|
||||||
|
&model.Priority,
|
||||||
|
&model.Confidence,
|
||||||
|
&model.ActionRequired,
|
||||||
|
&model.SourceType,
|
||||||
|
&model.SourceRef,
|
||||||
|
&model.ProjectID,
|
||||||
|
&model.RelatedThoughtID,
|
||||||
|
&model.RelatedSkillID,
|
||||||
|
&model.ReviewedBy,
|
||||||
|
&model.ReviewedAt,
|
||||||
|
&model.DuplicateOfLearningID,
|
||||||
|
&model.SupersedesLearningID,
|
||||||
|
&tags,
|
||||||
|
&model.CreatedAt,
|
||||||
|
&model.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return thoughttypes.Learning{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if tags == nil {
|
||||||
|
tags = []string{}
|
||||||
|
}
|
||||||
|
return learningFromModel(model, tags), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableText(value string) *string {
|
||||||
|
trimmed := strings.TrimSpace(value)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &trimmed
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableTextPtr(value *string) *string {
|
||||||
|
if value == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
trimmed := strings.TrimSpace(*value)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &trimmed
|
||||||
|
}
|
||||||
@@ -2,11 +2,11 @@ package store
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -44,24 +44,26 @@ func (db *DB) LinkedThoughts(ctx context.Context, thoughtID uuid.UUID) ([]though
|
|||||||
links := make([]thoughttypes.LinkedThought, 0)
|
links := make([]thoughttypes.LinkedThought, 0)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var linked thoughttypes.LinkedThought
|
var linked thoughttypes.LinkedThought
|
||||||
var metadataBytes []byte
|
var model generatedmodels.ModelPublicThoughts
|
||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&linked.Thought.ID,
|
&model.GUID,
|
||||||
&linked.Thought.Content,
|
&model.Content,
|
||||||
&metadataBytes,
|
&model.Metadata,
|
||||||
&linked.Thought.ProjectID,
|
&model.ProjectID,
|
||||||
&linked.Thought.ArchivedAt,
|
&model.ArchivedAt,
|
||||||
&linked.Thought.CreatedAt,
|
&model.CreatedAt,
|
||||||
&linked.Thought.UpdatedAt,
|
&model.UpdatedAt,
|
||||||
&linked.Relation,
|
&linked.Relation,
|
||||||
&linked.Direction,
|
&linked.Direction,
|
||||||
&linked.CreatedAt,
|
&linked.CreatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, fmt.Errorf("scan linked thought: %w", err)
|
return nil, fmt.Errorf("scan linked thought: %w", err)
|
||||||
}
|
}
|
||||||
if err := json.Unmarshal(metadataBytes, &linked.Thought.Metadata); err != nil {
|
thought, err := thoughtFromModel(model)
|
||||||
return nil, fmt.Errorf("decode linked thought metadata: %w", err)
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("map linked thought: %w", err)
|
||||||
}
|
}
|
||||||
|
linked.Thought = thought
|
||||||
links = append(links, linked)
|
links = append(links, linked)
|
||||||
}
|
}
|
||||||
if err := rows.Err(); err != nil {
|
if err := rows.Err(); err != nil {
|
||||||
|
|||||||
@@ -1,142 +1,137 @@
|
|||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
|
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// "git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
)
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
// )
|
||||||
|
|
||||||
func (db *DB) AddMaintenanceTask(ctx context.Context, t ext.MaintenanceTask) (ext.MaintenanceTask, error) {
|
// func (db *DB) AddMaintenanceTask(ctx context.Context, t ext.MaintenanceTask) (ext.MaintenanceTask, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into maintenance_tasks (name, category, frequency_days, next_due, priority, notes)
|
// insert into maintenance_tasks (name, category, frequency_days, next_due, priority, notes)
|
||||||
values ($1, $2, $3, $4, $5, $6)
|
// values ($1, $2, $3, $4, $5, $6)
|
||||||
returning id, created_at, updated_at
|
// returning id, created_at, updated_at
|
||||||
`, t.Name, nullStr(t.Category), t.FrequencyDays, t.NextDue, t.Priority, nullStr(t.Notes))
|
// `, t.Name, nullStr(t.Category), t.FrequencyDays, t.NextDue, t.Priority, nullStr(t.Notes))
|
||||||
|
|
||||||
created := t
|
// created := t
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
// var model generatedmodels.ModelPublicMaintenanceTasks
|
||||||
return ext.MaintenanceTask{}, fmt.Errorf("insert maintenance task: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
}
|
// return ext.MaintenanceTask{}, fmt.Errorf("insert maintenance task: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) LogMaintenance(ctx context.Context, log ext.MaintenanceLog) (ext.MaintenanceLog, error) {
|
// func (db *DB) LogMaintenance(ctx context.Context, log ext.MaintenanceLog) (ext.MaintenanceLog, error) {
|
||||||
completedAt := log.CompletedAt
|
// completedAt := log.CompletedAt
|
||||||
if completedAt.IsZero() {
|
// if completedAt.IsZero() {
|
||||||
completedAt = time.Now()
|
// completedAt = time.Now()
|
||||||
}
|
// }
|
||||||
|
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into maintenance_logs (task_id, completed_at, performed_by, cost, notes, next_action)
|
// insert into maintenance_logs (task_id, completed_at, performed_by, cost, notes, next_action)
|
||||||
values ($1, $2, $3, $4, $5, $6)
|
// values ($1, $2, $3, $4, $5, $6)
|
||||||
returning id
|
// returning id
|
||||||
`, log.TaskID, completedAt, nullStr(log.PerformedBy), log.Cost, nullStr(log.Notes), nullStr(log.NextAction))
|
// `, log.TaskID, completedAt, nullStr(log.PerformedBy), log.Cost, nullStr(log.Notes), nullStr(log.NextAction))
|
||||||
|
|
||||||
created := log
|
// created := log
|
||||||
created.CompletedAt = completedAt
|
// created.CompletedAt = completedAt
|
||||||
if err := row.Scan(&created.ID); err != nil {
|
// var model generatedmodels.ModelPublicMaintenanceLogs
|
||||||
return ext.MaintenanceLog{}, fmt.Errorf("insert maintenance log: %w", err)
|
// if err := row.Scan(&model.ID); err != nil {
|
||||||
}
|
// return ext.MaintenanceLog{}, fmt.Errorf("insert maintenance log: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) GetUpcomingMaintenance(ctx context.Context, daysAhead int) ([]ext.MaintenanceTask, error) {
|
// func (db *DB) GetUpcomingMaintenance(ctx context.Context, daysAhead int) ([]ext.MaintenanceTask, error) {
|
||||||
if daysAhead <= 0 {
|
// if daysAhead <= 0 {
|
||||||
daysAhead = 30
|
// daysAhead = 30
|
||||||
}
|
// }
|
||||||
cutoff := time.Now().Add(time.Duration(daysAhead) * 24 * time.Hour)
|
// cutoff := time.Now().Add(time.Duration(daysAhead) * 24 * time.Hour)
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, `
|
// rows, err := db.pool.Query(ctx, `
|
||||||
select id, name, category, frequency_days, last_completed, next_due, priority, notes, created_at, updated_at
|
// select id, name, category, frequency_days, last_completed, next_due, priority, notes, created_at, updated_at
|
||||||
from maintenance_tasks
|
// from maintenance_tasks
|
||||||
where next_due <= $1 or next_due is null
|
// where next_due <= $1 or next_due is null
|
||||||
order by next_due asc nulls last, priority desc
|
// order by next_due asc nulls last, priority desc
|
||||||
`, cutoff)
|
// `, cutoff)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("get upcoming maintenance: %w", err)
|
// return nil, fmt.Errorf("get upcoming maintenance: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
return scanMaintenanceTasks(rows)
|
// tasks := make([]ext.MaintenanceTask, 0)
|
||||||
}
|
// for rows.Next() {
|
||||||
|
// var model generatedmodels.ModelPublicMaintenanceTasks
|
||||||
|
// if err := rows.Scan(&model.ID, &model.Name, &model.Category, &model.FrequencyDays, &model.LastCompleted, &model.NextDue, &model.Priority, &model.Notes, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
|
// return nil, fmt.Errorf("scan maintenance task: %w", err)
|
||||||
|
// }
|
||||||
|
// tasks = append(tasks, maintenanceTaskFromModel(model))
|
||||||
|
// }
|
||||||
|
// return tasks, rows.Err()
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) SearchMaintenanceHistory(ctx context.Context, query, category string, start, end *time.Time) ([]ext.MaintenanceLogWithTask, error) {
|
// func (db *DB) SearchMaintenanceHistory(ctx context.Context, query, category string, start, end *time.Time) ([]ext.MaintenanceLogWithTask, error) {
|
||||||
args := []any{}
|
// args := []any{}
|
||||||
conditions := []string{}
|
// conditions := []string{}
|
||||||
|
|
||||||
if q := strings.TrimSpace(query); q != "" {
|
// if q := strings.TrimSpace(query); q != "" {
|
||||||
args = append(args, "%"+q+"%")
|
// args = append(args, "%"+q+"%")
|
||||||
conditions = append(conditions, fmt.Sprintf("(mt.name ILIKE $%d OR ml.notes ILIKE $%d)", len(args), len(args)))
|
// conditions = append(conditions, fmt.Sprintf("(mt.name ILIKE $%d OR ml.notes ILIKE $%d)", len(args), len(args)))
|
||||||
}
|
// }
|
||||||
if c := strings.TrimSpace(category); c != "" {
|
// if c := strings.TrimSpace(category); c != "" {
|
||||||
args = append(args, c)
|
// args = append(args, c)
|
||||||
conditions = append(conditions, fmt.Sprintf("mt.category = $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("mt.category = $%d", len(args)))
|
||||||
}
|
// }
|
||||||
if start != nil {
|
// if start != nil {
|
||||||
args = append(args, *start)
|
// args = append(args, *start)
|
||||||
conditions = append(conditions, fmt.Sprintf("ml.completed_at >= $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("ml.completed_at >= $%d", len(args)))
|
||||||
}
|
// }
|
||||||
if end != nil {
|
// if end != nil {
|
||||||
args = append(args, *end)
|
// args = append(args, *end)
|
||||||
conditions = append(conditions, fmt.Sprintf("ml.completed_at <= $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("ml.completed_at <= $%d", len(args)))
|
||||||
}
|
// }
|
||||||
|
|
||||||
q := `
|
// q := `
|
||||||
select ml.id, ml.task_id, ml.completed_at, ml.performed_by, ml.cost, ml.notes, ml.next_action,
|
// select ml.id, ml.task_id, ml.completed_at, ml.performed_by, ml.cost, ml.notes, ml.next_action,
|
||||||
mt.name, mt.category
|
// mt.name, mt.category
|
||||||
from maintenance_logs ml
|
// from maintenance_logs ml
|
||||||
join maintenance_tasks mt on mt.id = ml.task_id
|
// join maintenance_tasks mt on mt.id = ml.task_id
|
||||||
`
|
// `
|
||||||
if len(conditions) > 0 {
|
// if len(conditions) > 0 {
|
||||||
q += " where " + strings.Join(conditions, " and ")
|
// q += " where " + strings.Join(conditions, " and ")
|
||||||
}
|
// }
|
||||||
q += " order by ml.completed_at desc"
|
// q += " order by ml.completed_at desc"
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, q, args...)
|
// rows, err := db.pool.Query(ctx, q, args...)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("search maintenance history: %w", err)
|
// return nil, fmt.Errorf("search maintenance history: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var logs []ext.MaintenanceLogWithTask
|
// var logs []ext.MaintenanceLogWithTask
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var l ext.MaintenanceLogWithTask
|
// var model generatedmodels.ModelPublicMaintenanceLogs
|
||||||
var performedBy, notes, nextAction, taskCategory *string
|
// var taskName, taskCategory string
|
||||||
if err := rows.Scan(
|
// if err := rows.Scan(
|
||||||
&l.ID, &l.TaskID, &l.CompletedAt, &performedBy, &l.Cost, ¬es, &nextAction,
|
// &model.ID, &model.TaskID, &model.CompletedAt, &model.PerformedBy, &model.Cost, &model.Notes, &model.NextAction,
|
||||||
&l.TaskName, &taskCategory,
|
// &taskName, &taskCategory,
|
||||||
); err != nil {
|
// ); err != nil {
|
||||||
return nil, fmt.Errorf("scan maintenance log: %w", err)
|
// return nil, fmt.Errorf("scan maintenance log: %w", err)
|
||||||
}
|
// }
|
||||||
l.PerformedBy = strVal(performedBy)
|
// l := ext.MaintenanceLogWithTask{
|
||||||
l.Notes = strVal(notes)
|
// MaintenanceLog: maintenanceLogFromModel(model),
|
||||||
l.NextAction = strVal(nextAction)
|
// TaskName: taskName,
|
||||||
l.TaskCategory = strVal(taskCategory)
|
// TaskCategory: taskCategory,
|
||||||
logs = append(logs, l)
|
// }
|
||||||
}
|
// logs = append(logs, l)
|
||||||
return logs, rows.Err()
|
// }
|
||||||
}
|
// return logs, rows.Err()
|
||||||
|
// }
|
||||||
func scanMaintenanceTasks(rows interface {
|
|
||||||
Next() bool
|
|
||||||
Scan(...any) error
|
|
||||||
Err() error
|
|
||||||
Close()
|
|
||||||
}) ([]ext.MaintenanceTask, error) {
|
|
||||||
defer rows.Close()
|
|
||||||
var tasks []ext.MaintenanceTask
|
|
||||||
for rows.Next() {
|
|
||||||
var t ext.MaintenanceTask
|
|
||||||
var category, notes *string
|
|
||||||
if err := rows.Scan(&t.ID, &t.Name, &category, &t.FrequencyDays, &t.LastCompleted, &t.NextDue, &t.Priority, ¬es, &t.CreatedAt, &t.UpdatedAt); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan maintenance task: %w", err)
|
|
||||||
}
|
|
||||||
t.Category = strVal(category)
|
|
||||||
t.Notes = strVal(notes)
|
|
||||||
tasks = append(tasks, t)
|
|
||||||
}
|
|
||||||
return tasks, rows.Err()
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,289 +1,280 @@
|
|||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
|
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// "git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
)
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
// )
|
||||||
|
|
||||||
func (db *DB) AddRecipe(ctx context.Context, r ext.Recipe) (ext.Recipe, error) {
|
// func (db *DB) AddRecipe(ctx context.Context, r ext.Recipe) (ext.Recipe, error) {
|
||||||
ingredients, err := json.Marshal(r.Ingredients)
|
// ingredients, err := json.Marshal(r.Ingredients)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.Recipe{}, fmt.Errorf("marshal ingredients: %w", err)
|
// return ext.Recipe{}, fmt.Errorf("marshal ingredients: %w", err)
|
||||||
}
|
// }
|
||||||
instructions, err := json.Marshal(r.Instructions)
|
// instructions, err := json.Marshal(r.Instructions)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.Recipe{}, fmt.Errorf("marshal instructions: %w", err)
|
// return ext.Recipe{}, fmt.Errorf("marshal instructions: %w", err)
|
||||||
}
|
// }
|
||||||
if r.Tags == nil {
|
// if r.Tags == nil {
|
||||||
r.Tags = []string{}
|
// r.Tags = []string{}
|
||||||
}
|
// }
|
||||||
|
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into recipes (name, cuisine, prep_time_minutes, cook_time_minutes, servings, ingredients, instructions, tags, rating, notes)
|
// insert into recipes (name, cuisine, prep_time_minutes, cook_time_minutes, servings, ingredients, instructions, tags, rating, notes)
|
||||||
values ($1, $2, $3, $4, $5, $6::jsonb, $7::jsonb, $8, $9, $10)
|
// values ($1, $2, $3, $4, $5, $6::jsonb, $7::jsonb, $8, $9, $10)
|
||||||
returning id, created_at, updated_at
|
// returning id, created_at, updated_at
|
||||||
`, r.Name, nullStr(r.Cuisine), r.PrepTimeMinutes, r.CookTimeMinutes, r.Servings,
|
// `, r.Name, nullStr(r.Cuisine), r.PrepTimeMinutes, r.CookTimeMinutes, r.Servings,
|
||||||
ingredients, instructions, r.Tags, r.Rating, nullStr(r.Notes))
|
// ingredients, instructions, r.Tags, r.Rating, nullStr(r.Notes))
|
||||||
|
|
||||||
created := r
|
// created := r
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
// var model generatedmodels.ModelPublicRecipes
|
||||||
return ext.Recipe{}, fmt.Errorf("insert recipe: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
}
|
// return ext.Recipe{}, fmt.Errorf("insert recipe: %w", err)
|
||||||
return created, nil
|
// }
|
||||||
}
|
// created.ID = model.ID.UUID()
|
||||||
|
// created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
|
// return created, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) SearchRecipes(ctx context.Context, query, cuisine string, tags []string, ingredient string) ([]ext.Recipe, error) {
|
// func (db *DB) SearchRecipes(ctx context.Context, query, cuisine string, tags []string, ingredient string) ([]ext.Recipe, error) {
|
||||||
args := []any{}
|
// args := []any{}
|
||||||
conditions := []string{}
|
// conditions := []string{}
|
||||||
|
|
||||||
if q := strings.TrimSpace(query); q != "" {
|
// if q := strings.TrimSpace(query); q != "" {
|
||||||
args = append(args, "%"+q+"%")
|
// args = append(args, "%"+q+"%")
|
||||||
conditions = append(conditions, fmt.Sprintf("name ILIKE $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("name ILIKE $%d", len(args)))
|
||||||
}
|
// }
|
||||||
if c := strings.TrimSpace(cuisine); c != "" {
|
// if c := strings.TrimSpace(cuisine); c != "" {
|
||||||
args = append(args, c)
|
// args = append(args, c)
|
||||||
conditions = append(conditions, fmt.Sprintf("cuisine = $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("cuisine = $%d", len(args)))
|
||||||
}
|
// }
|
||||||
if len(tags) > 0 {
|
// if len(tags) > 0 {
|
||||||
args = append(args, tags)
|
// args = append(args, tags)
|
||||||
conditions = append(conditions, fmt.Sprintf("tags @> $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("tags @> $%d", len(args)))
|
||||||
}
|
// }
|
||||||
if ing := strings.TrimSpace(ingredient); ing != "" {
|
// if ing := strings.TrimSpace(ingredient); ing != "" {
|
||||||
args = append(args, "%"+ing+"%")
|
// args = append(args, "%"+ing+"%")
|
||||||
conditions = append(conditions, fmt.Sprintf("ingredients::text ILIKE $%d", len(args)))
|
// conditions = append(conditions, fmt.Sprintf("ingredients::text ILIKE $%d", len(args)))
|
||||||
}
|
// }
|
||||||
|
|
||||||
q := `select id, name, cuisine, prep_time_minutes, cook_time_minutes, servings, ingredients, instructions, tags, rating, notes, created_at, updated_at from recipes`
|
// q := `select id, name, cuisine, prep_time_minutes, cook_time_minutes, servings, ingredients, instructions, tags::text[], rating, notes, created_at, updated_at from recipes`
|
||||||
if len(conditions) > 0 {
|
// if len(conditions) > 0 {
|
||||||
q += " where " + strings.Join(conditions, " and ")
|
// q += " where " + strings.Join(conditions, " and ")
|
||||||
}
|
// }
|
||||||
q += " order by name"
|
// q += " order by name"
|
||||||
|
|
||||||
rows, err := db.pool.Query(ctx, q, args...)
|
// rows, err := db.pool.Query(ctx, q, args...)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("search recipes: %w", err)
|
// return nil, fmt.Errorf("search recipes: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var recipes []ext.Recipe
|
// var recipes []ext.Recipe
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
r, err := scanRecipeRow(rows)
|
// r, err := scanRecipeRow(rows)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, err
|
// return nil, err
|
||||||
}
|
// }
|
||||||
recipes = append(recipes, r)
|
// recipes = append(recipes, r)
|
||||||
}
|
// }
|
||||||
return recipes, rows.Err()
|
// return recipes, rows.Err()
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (db *DB) GetRecipe(ctx context.Context, id uuid.UUID) (ext.Recipe, error) {
|
// func (db *DB) GetRecipe(ctx context.Context, id uuid.UUID) (ext.Recipe, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
select id, name, cuisine, prep_time_minutes, cook_time_minutes, servings, ingredients, instructions, tags, rating, notes, created_at, updated_at
|
// select id, name, cuisine, prep_time_minutes, cook_time_minutes, servings, ingredients, instructions, tags::text[], rating, notes, created_at, updated_at
|
||||||
from recipes where id = $1
|
// from recipes where id = $1
|
||||||
`, id)
|
// `, id)
|
||||||
|
|
||||||
var r ext.Recipe
|
// var model generatedmodels.ModelPublicRecipes
|
||||||
var cuisine, notes *string
|
// var tags []string
|
||||||
var ingredientsBytes, instructionsBytes []byte
|
// if err := row.Scan(&model.ID, &model.Name, &model.Cuisine, &model.PrepTimeMinutes, &model.CookTimeMinutes, &model.Servings,
|
||||||
if err := row.Scan(&r.ID, &r.Name, &cuisine, &r.PrepTimeMinutes, &r.CookTimeMinutes, &r.Servings,
|
// &model.Ingredients, &model.Instructions, &tags, &model.Rating, &model.Notes, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
&ingredientsBytes, &instructionsBytes, &r.Tags, &r.Rating, ¬es, &r.CreatedAt, &r.UpdatedAt); err != nil {
|
// return ext.Recipe{}, fmt.Errorf("get recipe: %w", err)
|
||||||
return ext.Recipe{}, fmt.Errorf("get recipe: %w", err)
|
// }
|
||||||
}
|
// if tags == nil {
|
||||||
r.Cuisine = strVal(cuisine)
|
// tags = []string{}
|
||||||
r.Notes = strVal(notes)
|
// }
|
||||||
if r.Tags == nil {
|
// return recipeFromModel(model, tags), nil
|
||||||
r.Tags = []string{}
|
// }
|
||||||
}
|
|
||||||
if err := json.Unmarshal(ingredientsBytes, &r.Ingredients); err != nil {
|
|
||||||
r.Ingredients = []ext.Ingredient{}
|
|
||||||
}
|
|
||||||
if err := json.Unmarshal(instructionsBytes, &r.Instructions); err != nil {
|
|
||||||
r.Instructions = []string{}
|
|
||||||
}
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (db *DB) UpdateRecipe(ctx context.Context, id uuid.UUID, r ext.Recipe) (ext.Recipe, error) {
|
// func (db *DB) UpdateRecipe(ctx context.Context, id uuid.UUID, r ext.Recipe) (ext.Recipe, error) {
|
||||||
ingredients, err := json.Marshal(r.Ingredients)
|
// ingredients, err := json.Marshal(r.Ingredients)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.Recipe{}, fmt.Errorf("marshal ingredients: %w", err)
|
// return ext.Recipe{}, fmt.Errorf("marshal ingredients: %w", err)
|
||||||
}
|
// }
|
||||||
instructions, err := json.Marshal(r.Instructions)
|
// instructions, err := json.Marshal(r.Instructions)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.Recipe{}, fmt.Errorf("marshal instructions: %w", err)
|
// return ext.Recipe{}, fmt.Errorf("marshal instructions: %w", err)
|
||||||
}
|
// }
|
||||||
if r.Tags == nil {
|
// if r.Tags == nil {
|
||||||
r.Tags = []string{}
|
// r.Tags = []string{}
|
||||||
}
|
// }
|
||||||
|
|
||||||
_, err = db.pool.Exec(ctx, `
|
// _, err = db.pool.Exec(ctx, `
|
||||||
update recipes set
|
// update recipes set
|
||||||
name = $2, cuisine = $3, prep_time_minutes = $4, cook_time_minutes = $5,
|
// name = $2, cuisine = $3, prep_time_minutes = $4, cook_time_minutes = $5,
|
||||||
servings = $6, ingredients = $7::jsonb, instructions = $8::jsonb,
|
// servings = $6, ingredients = $7::jsonb, instructions = $8::jsonb,
|
||||||
tags = $9, rating = $10, notes = $11
|
// tags = $9, rating = $10, notes = $11
|
||||||
where id = $1
|
// where id = $1
|
||||||
`, id, r.Name, nullStr(r.Cuisine), r.PrepTimeMinutes, r.CookTimeMinutes, r.Servings,
|
// `, id, r.Name, nullStr(r.Cuisine), r.PrepTimeMinutes, r.CookTimeMinutes, r.Servings,
|
||||||
ingredients, instructions, r.Tags, r.Rating, nullStr(r.Notes))
|
// ingredients, instructions, r.Tags, r.Rating, nullStr(r.Notes))
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.Recipe{}, fmt.Errorf("update recipe: %w", err)
|
// return ext.Recipe{}, fmt.Errorf("update recipe: %w", err)
|
||||||
}
|
// }
|
||||||
return db.GetRecipe(ctx, id)
|
// return db.GetRecipe(ctx, id)
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (db *DB) CreateMealPlan(ctx context.Context, weekStart time.Time, entries []ext.MealPlanInput) ([]ext.MealPlanEntry, error) {
|
// func (db *DB) CreateMealPlan(ctx context.Context, weekStart time.Time, entries []ext.MealPlanInput) ([]ext.MealPlanEntry, error) {
|
||||||
if _, err := db.pool.Exec(ctx, `delete from meal_plans where week_start = $1`, weekStart); err != nil {
|
// if _, err := db.pool.Exec(ctx, `delete from meal_plans where week_start = $1`, weekStart); err != nil {
|
||||||
return nil, fmt.Errorf("clear meal plan: %w", err)
|
// return nil, fmt.Errorf("clear meal plan: %w", err)
|
||||||
}
|
// }
|
||||||
|
|
||||||
var results []ext.MealPlanEntry
|
// var results []ext.MealPlanEntry
|
||||||
for _, e := range entries {
|
// for _, e := range entries {
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into meal_plans (week_start, day_of_week, meal_type, recipe_id, custom_meal, servings, notes)
|
// insert into meal_plans (week_start, day_of_week, meal_type, recipe_id, custom_meal, servings, notes)
|
||||||
values ($1, $2, $3, $4, $5, $6, $7)
|
// values ($1, $2, $3, $4, $5, $6, $7)
|
||||||
returning id, created_at
|
// returning id, created_at
|
||||||
`, weekStart, e.DayOfWeek, e.MealType, e.RecipeID, nullStr(e.CustomMeal), e.Servings, nullStr(e.Notes))
|
// `, weekStart, e.DayOfWeek, e.MealType, e.RecipeID, nullStr(e.CustomMeal), e.Servings, nullStr(e.Notes))
|
||||||
|
|
||||||
entry := ext.MealPlanEntry{
|
// entry := ext.MealPlanEntry{
|
||||||
WeekStart: weekStart,
|
// WeekStart: weekStart,
|
||||||
DayOfWeek: e.DayOfWeek,
|
// DayOfWeek: e.DayOfWeek,
|
||||||
MealType: e.MealType,
|
// MealType: e.MealType,
|
||||||
RecipeID: e.RecipeID,
|
// RecipeID: e.RecipeID,
|
||||||
CustomMeal: e.CustomMeal,
|
// CustomMeal: e.CustomMeal,
|
||||||
Servings: e.Servings,
|
// Servings: e.Servings,
|
||||||
Notes: e.Notes,
|
// Notes: e.Notes,
|
||||||
}
|
// }
|
||||||
if err := row.Scan(&entry.ID, &entry.CreatedAt); err != nil {
|
// var model generatedmodels.ModelPublicMealPlans
|
||||||
return nil, fmt.Errorf("insert meal plan entry: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt); err != nil {
|
||||||
}
|
// return nil, fmt.Errorf("insert meal plan entry: %w", err)
|
||||||
results = append(results, entry)
|
// }
|
||||||
}
|
// entry.ID = model.ID.UUID()
|
||||||
return results, nil
|
// entry.CreatedAt = model.CreatedAt.Time()
|
||||||
}
|
// results = append(results, entry)
|
||||||
|
// }
|
||||||
|
// return results, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func (db *DB) GetMealPlan(ctx context.Context, weekStart time.Time) ([]ext.MealPlanEntry, error) {
|
// func (db *DB) GetMealPlan(ctx context.Context, weekStart time.Time) ([]ext.MealPlanEntry, error) {
|
||||||
rows, err := db.pool.Query(ctx, `
|
// rows, err := db.pool.Query(ctx, `
|
||||||
select mp.id, mp.week_start, mp.day_of_week, mp.meal_type, mp.recipe_id, r.name, mp.custom_meal, mp.servings, mp.notes, mp.created_at
|
// select mp.id, mp.week_start, mp.day_of_week, mp.meal_type, mp.recipe_id, r.name, mp.custom_meal, mp.servings, mp.notes, mp.created_at
|
||||||
from meal_plans mp
|
// from meal_plans mp
|
||||||
left join recipes r on r.id = mp.recipe_id
|
// left join recipes r on r.id = mp.recipe_id
|
||||||
where mp.week_start = $1
|
// where mp.week_start = $1
|
||||||
order by
|
// order by
|
||||||
case mp.day_of_week
|
// case mp.day_of_week
|
||||||
when 'monday' then 1 when 'tuesday' then 2 when 'wednesday' then 3
|
// when 'monday' then 1 when 'tuesday' then 2 when 'wednesday' then 3
|
||||||
when 'thursday' then 4 when 'friday' then 5 when 'saturday' then 6
|
// when 'thursday' then 4 when 'friday' then 5 when 'saturday' then 6
|
||||||
when 'sunday' then 7 else 8
|
// when 'sunday' then 7 else 8
|
||||||
end,
|
// end,
|
||||||
case mp.meal_type
|
// case mp.meal_type
|
||||||
when 'breakfast' then 1 when 'lunch' then 2 when 'dinner' then 3
|
// when 'breakfast' then 1 when 'lunch' then 2 when 'dinner' then 3
|
||||||
when 'snack' then 4 else 5
|
// when 'snack' then 4 else 5
|
||||||
end
|
// end
|
||||||
`, weekStart)
|
// `, weekStart)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("get meal plan: %w", err)
|
// return nil, fmt.Errorf("get meal plan: %w", err)
|
||||||
}
|
// }
|
||||||
defer rows.Close()
|
// defer rows.Close()
|
||||||
|
|
||||||
var entries []ext.MealPlanEntry
|
// var entries []ext.MealPlanEntry
|
||||||
for rows.Next() {
|
// for rows.Next() {
|
||||||
var e ext.MealPlanEntry
|
// var model generatedmodels.ModelPublicMealPlans
|
||||||
var recipeName, customMeal, notes *string
|
// var recipeName *string
|
||||||
if err := rows.Scan(&e.ID, &e.WeekStart, &e.DayOfWeek, &e.MealType, &e.RecipeID, &recipeName, &customMeal, &e.Servings, ¬es, &e.CreatedAt); err != nil {
|
// if err := rows.Scan(&model.ID, &model.WeekStart, &model.DayOfWeek, &model.MealType, &model.RecipeID, &recipeName, &model.CustomMeal, &model.Servings, &model.Notes, &model.CreatedAt); err != nil {
|
||||||
return nil, fmt.Errorf("scan meal plan entry: %w", err)
|
// return nil, fmt.Errorf("scan meal plan entry: %w", err)
|
||||||
}
|
// }
|
||||||
e.RecipeName = strVal(recipeName)
|
// entries = append(entries, mealPlanEntryFromModel(model, strVal(recipeName)))
|
||||||
e.CustomMeal = strVal(customMeal)
|
// }
|
||||||
e.Notes = strVal(notes)
|
// return entries, rows.Err()
|
||||||
entries = append(entries, e)
|
// }
|
||||||
}
|
|
||||||
return entries, rows.Err()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (db *DB) GenerateShoppingList(ctx context.Context, weekStart time.Time) (ext.ShoppingList, error) {
|
// func (db *DB) GenerateShoppingList(ctx context.Context, weekStart time.Time) (ext.ShoppingList, error) {
|
||||||
entries, err := db.GetMealPlan(ctx, weekStart)
|
// entries, err := db.GetMealPlan(ctx, weekStart)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.ShoppingList{}, err
|
// return ext.ShoppingList{}, err
|
||||||
}
|
// }
|
||||||
|
|
||||||
recipeIDs := map[uuid.UUID]bool{}
|
// recipeIDs := map[uuid.UUID]bool{}
|
||||||
for _, e := range entries {
|
// for _, e := range entries {
|
||||||
if e.RecipeID != nil {
|
// if e.RecipeID != nil {
|
||||||
recipeIDs[*e.RecipeID] = true
|
// recipeIDs[*e.RecipeID] = true
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
aggregated := map[string]*ext.ShoppingItem{}
|
// aggregated := map[string]*ext.ShoppingItem{}
|
||||||
for id := range recipeIDs {
|
// for id := range recipeIDs {
|
||||||
recipe, err := db.GetRecipe(ctx, id)
|
// recipe, err := db.GetRecipe(ctx, id)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
for _, ing := range recipe.Ingredients {
|
// for _, ing := range recipe.Ingredients {
|
||||||
key := strings.ToLower(ing.Name)
|
// key := strings.ToLower(ing.Name)
|
||||||
if existing, ok := aggregated[key]; ok {
|
// if existing, ok := aggregated[key]; ok {
|
||||||
if ing.Quantity != "" {
|
// if ing.Quantity != "" {
|
||||||
existing.Quantity += "+" + ing.Quantity
|
// existing.Quantity += "+" + ing.Quantity
|
||||||
}
|
// }
|
||||||
} else {
|
// } else {
|
||||||
recipeIDCopy := id
|
// recipeIDCopy := id
|
||||||
aggregated[key] = &ext.ShoppingItem{
|
// aggregated[key] = &ext.ShoppingItem{
|
||||||
Name: ing.Name,
|
// Name: ing.Name,
|
||||||
Quantity: ing.Quantity,
|
// Quantity: ing.Quantity,
|
||||||
Unit: ing.Unit,
|
// Unit: ing.Unit,
|
||||||
Purchased: false,
|
// Purchased: false,
|
||||||
RecipeID: &recipeIDCopy,
|
// RecipeID: &recipeIDCopy,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
items := make([]ext.ShoppingItem, 0, len(aggregated))
|
// items := make([]ext.ShoppingItem, 0, len(aggregated))
|
||||||
for _, item := range aggregated {
|
// for _, item := range aggregated {
|
||||||
items = append(items, *item)
|
// items = append(items, *item)
|
||||||
}
|
// }
|
||||||
|
|
||||||
itemsJSON, err := json.Marshal(items)
|
// itemsJSON, err := json.Marshal(items)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ext.ShoppingList{}, fmt.Errorf("marshal shopping items: %w", err)
|
// return ext.ShoppingList{}, fmt.Errorf("marshal shopping items: %w", err)
|
||||||
}
|
// }
|
||||||
|
|
||||||
row := db.pool.QueryRow(ctx, `
|
// row := db.pool.QueryRow(ctx, `
|
||||||
insert into shopping_lists (week_start, items)
|
// insert into shopping_lists (week_start, items)
|
||||||
values ($1, $2::jsonb)
|
// values ($1, $2::jsonb)
|
||||||
on conflict (week_start) do update set items = excluded.items, updated_at = now()
|
// on conflict (week_start) do update set items = excluded.items, updated_at = now()
|
||||||
returning id, created_at, updated_at
|
// returning id, created_at, updated_at
|
||||||
`, weekStart, itemsJSON)
|
// `, weekStart, itemsJSON)
|
||||||
|
|
||||||
list := ext.ShoppingList{WeekStart: weekStart, Items: items}
|
// var model generatedmodels.ModelPublicShoppingLists
|
||||||
if err := row.Scan(&list.ID, &list.CreatedAt, &list.UpdatedAt); err != nil {
|
// list := ext.ShoppingList{WeekStart: weekStart, Items: items}
|
||||||
return ext.ShoppingList{}, fmt.Errorf("upsert shopping list: %w", err)
|
// if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
}
|
// return ext.ShoppingList{}, fmt.Errorf("upsert shopping list: %w", err)
|
||||||
return list, nil
|
// }
|
||||||
}
|
// list.ID = model.ID.UUID()
|
||||||
|
// list.CreatedAt = model.CreatedAt.Time()
|
||||||
|
// list.UpdatedAt = model.UpdatedAt.Time()
|
||||||
|
// return list, nil
|
||||||
|
// }
|
||||||
|
|
||||||
func scanRecipeRow(rows interface{ Scan(...any) error }) (ext.Recipe, error) {
|
// func scanRecipeRow(rows interface{ Scan(...any) error }) (ext.Recipe, error) {
|
||||||
var r ext.Recipe
|
// var model generatedmodels.ModelPublicRecipes
|
||||||
var cuisine, notes *string
|
// var tags []string
|
||||||
var ingredientsBytes, instructionsBytes []byte
|
// if err := rows.Scan(&model.ID, &model.Name, &model.Cuisine, &model.PrepTimeMinutes, &model.CookTimeMinutes, &model.Servings,
|
||||||
if err := rows.Scan(&r.ID, &r.Name, &cuisine, &r.PrepTimeMinutes, &r.CookTimeMinutes, &r.Servings,
|
// &model.Ingredients, &model.Instructions, &tags, &model.Rating, &model.Notes, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
&ingredientsBytes, &instructionsBytes, &r.Tags, &r.Rating, ¬es, &r.CreatedAt, &r.UpdatedAt); err != nil {
|
// return ext.Recipe{}, fmt.Errorf("scan recipe: %w", err)
|
||||||
return ext.Recipe{}, fmt.Errorf("scan recipe: %w", err)
|
// }
|
||||||
}
|
// if tags == nil {
|
||||||
r.Cuisine = strVal(cuisine)
|
// tags = []string{}
|
||||||
r.Notes = strVal(notes)
|
// }
|
||||||
if r.Tags == nil {
|
// return recipeFromModel(model, tags), nil
|
||||||
r.Tags = []string{}
|
// }
|
||||||
}
|
|
||||||
if err := json.Unmarshal(ingredientsBytes, &r.Ingredients); err != nil {
|
|
||||||
r.Ingredients = []ext.Ingredient{}
|
|
||||||
}
|
|
||||||
if err := json.Unmarshal(instructionsBytes, &r.Instructions); err != nil {
|
|
||||||
r.Instructions = []string{}
|
|
||||||
}
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|||||||
537
internal/store/model_adapters.go
Normal file
537
internal/store/model_adapters.go
Normal file
@@ -0,0 +1,537 @@
|
|||||||
|
package store
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
|
ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func projectFromModel(m generatedmodels.ModelPublicProjects) ext.Project {
|
||||||
|
return ext.Project{
|
||||||
|
ID: m.GUID.UUID(),
|
||||||
|
Name: m.Name.String(),
|
||||||
|
Description: m.Description.String(),
|
||||||
|
CreatedAt: m.CreatedAt.Time(),
|
||||||
|
LastActiveAt: m.LastActiveAt.Time(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func thoughtFromModel(m generatedmodels.ModelPublicThoughts) (ext.Thought, error) {
|
||||||
|
var metadata ext.ThoughtMetadata
|
||||||
|
if len(m.Metadata) > 0 {
|
||||||
|
if err := json.Unmarshal(m.Metadata, &metadata); err != nil {
|
||||||
|
return ext.Thought{}, fmt.Errorf("decode thought metadata: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var projectID *uuid.UUID
|
||||||
|
if m.ProjectID.Valid {
|
||||||
|
id := m.ProjectID.UUID()
|
||||||
|
projectID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var archivedAt *time.Time
|
||||||
|
if m.ArchivedAt.Valid {
|
||||||
|
t := m.ArchivedAt.Time()
|
||||||
|
archivedAt = &t
|
||||||
|
}
|
||||||
|
|
||||||
|
return ext.Thought{
|
||||||
|
ID: m.GUID.UUID(),
|
||||||
|
Content: m.Content.String(),
|
||||||
|
Metadata: metadata,
|
||||||
|
ProjectID: projectID,
|
||||||
|
ArchivedAt: archivedAt,
|
||||||
|
CreatedAt: m.CreatedAt.Time(),
|
||||||
|
UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func storedFileFromModel(m generatedmodels.ModelPublicStoredFiles) ext.StoredFile {
|
||||||
|
var thoughtID *uuid.UUID
|
||||||
|
if m.ThoughtID.Valid {
|
||||||
|
id := m.ThoughtID.UUID()
|
||||||
|
thoughtID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var projectID *uuid.UUID
|
||||||
|
if m.ProjectID.Valid {
|
||||||
|
id := m.ProjectID.UUID()
|
||||||
|
projectID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
return ext.StoredFile{
|
||||||
|
ID: m.GUID.UUID(),
|
||||||
|
ThoughtID: thoughtID,
|
||||||
|
ProjectID: projectID,
|
||||||
|
Name: m.Name.String(),
|
||||||
|
MediaType: m.MediaType.String(),
|
||||||
|
Kind: m.Kind.String(),
|
||||||
|
Encoding: m.Encoding.String(),
|
||||||
|
SizeBytes: m.SizeBytes,
|
||||||
|
SHA256: m.Sha256.String(),
|
||||||
|
Content: m.Content,
|
||||||
|
CreatedAt: m.CreatedAt.Time(),
|
||||||
|
UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// func maintenanceTaskFromModel(m generatedmodels.ModelPublicMaintenanceTasks) ext.MaintenanceTask {
|
||||||
|
// var frequencyDays *int
|
||||||
|
// if m.FrequencyDays.Valid {
|
||||||
|
// n := int(m.FrequencyDays.Int64())
|
||||||
|
// frequencyDays = &n
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var lastCompleted *time.Time
|
||||||
|
// if m.LastCompleted.Valid {
|
||||||
|
// t := m.LastCompleted.Time()
|
||||||
|
// lastCompleted = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var nextDue *time.Time
|
||||||
|
// if m.NextDue.Valid {
|
||||||
|
// t := m.NextDue.Time()
|
||||||
|
// nextDue = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.MaintenanceTask{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// Name: m.Name.String(),
|
||||||
|
// Category: m.Category.String(),
|
||||||
|
// FrequencyDays: frequencyDays,
|
||||||
|
// LastCompleted: lastCompleted,
|
||||||
|
// NextDue: nextDue,
|
||||||
|
// Priority: m.Priority.String(),
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func maintenanceLogFromModel(m generatedmodels.ModelPublicMaintenanceLogs) ext.MaintenanceLog {
|
||||||
|
// var cost *float64
|
||||||
|
// if m.Cost.Valid {
|
||||||
|
// v := m.Cost.Float64()
|
||||||
|
// cost = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.MaintenanceLog{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// TaskID: m.TaskID.UUID(),
|
||||||
|
// CompletedAt: m.CompletedAt.Time(),
|
||||||
|
// PerformedBy: m.PerformedBy.String(),
|
||||||
|
// Cost: cost,
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// NextAction: m.NextAction.String(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func householdItemFromModel(m generatedmodels.ModelPublicHouseholdItems) ext.HouseholdItem {
|
||||||
|
// details := map[string]any{}
|
||||||
|
// if len(m.Details) > 0 {
|
||||||
|
// if err := json.Unmarshal(m.Details, &details); err != nil {
|
||||||
|
// details = map[string]any{}
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.HouseholdItem{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// Name: m.Name.String(),
|
||||||
|
// Category: m.Category.String(),
|
||||||
|
// Location: m.Location.String(),
|
||||||
|
// Details: details,
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func householdVendorFromModel(m generatedmodels.ModelPublicHouseholdVendors) ext.HouseholdVendor {
|
||||||
|
// var rating *int
|
||||||
|
// if m.Rating.Valid {
|
||||||
|
// v := int(m.Rating.Int64())
|
||||||
|
// rating = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var lastUsed *time.Time
|
||||||
|
// if m.LastUsed.Valid {
|
||||||
|
// t := m.LastUsed.Time()
|
||||||
|
// lastUsed = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.HouseholdVendor{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// Name: m.Name.String(),
|
||||||
|
// ServiceType: m.ServiceType.String(),
|
||||||
|
// Phone: m.Phone.String(),
|
||||||
|
// Email: m.Email.String(),
|
||||||
|
// Website: m.Website.String(),
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// Rating: rating,
|
||||||
|
// LastUsed: lastUsed,
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func familyMemberFromModel(m generatedmodels.ModelPublicFamilyMembers) ext.FamilyMember {
|
||||||
|
// var birthDate *time.Time
|
||||||
|
// if m.BirthDate.Valid {
|
||||||
|
// t := m.BirthDate.Time()
|
||||||
|
// birthDate = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.FamilyMember{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// Name: m.Name.String(),
|
||||||
|
// Relationship: m.Relationship.String(),
|
||||||
|
// BirthDate: birthDate,
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func activityFromModel(m generatedmodels.ModelPublicActivities, memberName string) ext.Activity {
|
||||||
|
// var familyMemberID *uuid.UUID
|
||||||
|
// if m.FamilyMemberID.Valid {
|
||||||
|
// id := m.FamilyMemberID.UUID()
|
||||||
|
// familyMemberID = &id
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var startDate *time.Time
|
||||||
|
// if m.StartDate.Valid {
|
||||||
|
// t := m.StartDate.Time()
|
||||||
|
// startDate = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var endDate *time.Time
|
||||||
|
// if m.EndDate.Valid {
|
||||||
|
// t := m.EndDate.Time()
|
||||||
|
// endDate = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.Activity{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// FamilyMemberID: familyMemberID,
|
||||||
|
// MemberName: memberName,
|
||||||
|
// Title: m.Title.String(),
|
||||||
|
// ActivityType: m.ActivityType.String(),
|
||||||
|
// DayOfWeek: m.DayOfWeek.String(),
|
||||||
|
// StartTime: m.StartTime.String(),
|
||||||
|
// EndTime: m.EndTime.String(),
|
||||||
|
// StartDate: startDate,
|
||||||
|
// EndDate: endDate,
|
||||||
|
// Location: m.Location.String(),
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func importantDateFromModel(m generatedmodels.ModelPublicImportantDates, memberName string) ext.ImportantDate {
|
||||||
|
// var familyMemberID *uuid.UUID
|
||||||
|
// if m.FamilyMemberID.Valid {
|
||||||
|
// id := m.FamilyMemberID.UUID()
|
||||||
|
// familyMemberID = &id
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.ImportantDate{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// FamilyMemberID: familyMemberID,
|
||||||
|
// MemberName: memberName,
|
||||||
|
// Title: m.Title.String(),
|
||||||
|
// DateValue: m.DateValue.Time(),
|
||||||
|
// RecurringYearly: m.RecurringYearly,
|
||||||
|
// ReminderDaysBefore: int(m.ReminderDaysBefore),
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func professionalContactFromModel(m generatedmodels.ModelPublicProfessionalContacts, tags []string) ext.ProfessionalContact {
|
||||||
|
// var lastContacted *time.Time
|
||||||
|
// if m.LastContacted.Valid {
|
||||||
|
// t := m.LastContacted.Time()
|
||||||
|
// lastContacted = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var followUpDate *time.Time
|
||||||
|
// if m.FollowUpDate.Valid {
|
||||||
|
// t := m.FollowUpDate.Time()
|
||||||
|
// followUpDate = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.ProfessionalContact{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// Name: m.Name.String(),
|
||||||
|
// Company: m.Company.String(),
|
||||||
|
// Title: m.Title.String(),
|
||||||
|
// Email: m.Email.String(),
|
||||||
|
// Phone: m.Phone.String(),
|
||||||
|
// LinkedInURL: m.LinkedinURL.String(),
|
||||||
|
// HowWeMet: m.HowWeMet.String(),
|
||||||
|
// Tags: tags,
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// LastContacted: lastContacted,
|
||||||
|
// FollowUpDate: followUpDate,
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func contactInteractionFromModel(m generatedmodels.ModelPublicContactInteractions) ext.ContactInteraction {
|
||||||
|
// return ext.ContactInteraction{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// ContactID: m.ContactID.UUID(),
|
||||||
|
// InteractionType: m.InteractionType.String(),
|
||||||
|
// OccurredAt: m.OccurredAt.Time(),
|
||||||
|
// Summary: m.Summary.String(),
|
||||||
|
// FollowUpNeeded: m.FollowUpNeeded,
|
||||||
|
// FollowUpNotes: m.FollowUpNotes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func opportunityFromModel(m generatedmodels.ModelPublicOpportunities) ext.Opportunity {
|
||||||
|
// var contactID *uuid.UUID
|
||||||
|
// if m.ContactID.Valid {
|
||||||
|
// id := m.ContactID.UUID()
|
||||||
|
// contactID = &id
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var value *float64
|
||||||
|
// if m.Value.Valid {
|
||||||
|
// v := m.Value.Float64()
|
||||||
|
// value = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var expectedCloseDate *time.Time
|
||||||
|
// if m.ExpectedCloseDate.Valid {
|
||||||
|
// t := m.ExpectedCloseDate.Time()
|
||||||
|
// expectedCloseDate = &t
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.Opportunity{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// ContactID: contactID,
|
||||||
|
// Title: m.Title.String(),
|
||||||
|
// Description: m.Description.String(),
|
||||||
|
// Stage: m.Stage.String(),
|
||||||
|
// Value: value,
|
||||||
|
// ExpectedCloseDate: expectedCloseDate,
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func recipeFromModel(m generatedmodels.ModelPublicRecipes, tags []string) ext.Recipe {
|
||||||
|
// var prepTimeMinutes *int
|
||||||
|
// if m.PrepTimeMinutes.Valid {
|
||||||
|
// v := int(m.PrepTimeMinutes.Int64())
|
||||||
|
// prepTimeMinutes = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var cookTimeMinutes *int
|
||||||
|
// if m.CookTimeMinutes.Valid {
|
||||||
|
// v := int(m.CookTimeMinutes.Int64())
|
||||||
|
// cookTimeMinutes = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var servings *int
|
||||||
|
// if m.Servings.Valid {
|
||||||
|
// v := int(m.Servings.Int64())
|
||||||
|
// servings = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var rating *int
|
||||||
|
// if m.Rating.Valid {
|
||||||
|
// v := int(m.Rating.Int64())
|
||||||
|
// rating = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// recipe := ext.Recipe{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// Name: m.Name.String(),
|
||||||
|
// Cuisine: m.Cuisine.String(),
|
||||||
|
// PrepTimeMinutes: prepTimeMinutes,
|
||||||
|
// CookTimeMinutes: cookTimeMinutes,
|
||||||
|
// Servings: servings,
|
||||||
|
// Tags: tags,
|
||||||
|
// Rating: rating,
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
// }
|
||||||
|
|
||||||
|
// if err := json.Unmarshal(m.Ingredients, &recipe.Ingredients); err != nil {
|
||||||
|
// recipe.Ingredients = []ext.Ingredient{}
|
||||||
|
// }
|
||||||
|
// if err := json.Unmarshal(m.Instructions, &recipe.Instructions); err != nil {
|
||||||
|
// recipe.Instructions = []string{}
|
||||||
|
// }
|
||||||
|
// return recipe
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func mealPlanEntryFromModel(m generatedmodels.ModelPublicMealPlans, recipeName string) ext.MealPlanEntry {
|
||||||
|
// var recipeID *uuid.UUID
|
||||||
|
// if m.RecipeID.Valid {
|
||||||
|
// id := m.RecipeID.UUID()
|
||||||
|
// recipeID = &id
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var servings *int
|
||||||
|
// if m.Servings.Valid {
|
||||||
|
// v := int(m.Servings.Int64())
|
||||||
|
// servings = &v
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return ext.MealPlanEntry{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// WeekStart: m.WeekStart.Time(),
|
||||||
|
// DayOfWeek: m.DayOfWeek.String(),
|
||||||
|
// MealType: m.MealType.String(),
|
||||||
|
// RecipeID: recipeID,
|
||||||
|
// RecipeName: recipeName,
|
||||||
|
// CustomMeal: m.CustomMeal.String(),
|
||||||
|
// Servings: servings,
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func shoppingListFromModel(m generatedmodels.ModelPublicShoppingLists) ext.ShoppingList {
|
||||||
|
// list := ext.ShoppingList{
|
||||||
|
// ID: m.ID.UUID(),
|
||||||
|
// WeekStart: m.WeekStart.Time(),
|
||||||
|
// Notes: m.Notes.String(),
|
||||||
|
// CreatedAt: m.CreatedAt.Time(),
|
||||||
|
// UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
// }
|
||||||
|
// if err := json.Unmarshal(m.Items, &list.Items); err != nil {
|
||||||
|
// list.Items = []ext.ShoppingItem{}
|
||||||
|
// }
|
||||||
|
// return list
|
||||||
|
// }
|
||||||
|
|
||||||
|
func planFromModel(m generatedmodels.ModelPublicPlans, tags []string) ext.Plan {
|
||||||
|
var projectID *uuid.UUID
|
||||||
|
if m.ProjectID.Valid {
|
||||||
|
id := m.ProjectID.UUID()
|
||||||
|
projectID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var dueDate *time.Time
|
||||||
|
if m.DueDate.Valid {
|
||||||
|
t := m.DueDate.Time()
|
||||||
|
dueDate = &t
|
||||||
|
}
|
||||||
|
|
||||||
|
var completedAt *time.Time
|
||||||
|
if m.CompletedAt.Valid {
|
||||||
|
t := m.CompletedAt.Time()
|
||||||
|
completedAt = &t
|
||||||
|
}
|
||||||
|
|
||||||
|
var lastReviewedAt *time.Time
|
||||||
|
if m.LastReviewedAt.Valid {
|
||||||
|
t := m.LastReviewedAt.Time()
|
||||||
|
lastReviewedAt = &t
|
||||||
|
}
|
||||||
|
|
||||||
|
var supersedesPlanID *uuid.UUID
|
||||||
|
if m.SupersedesPlanID.Valid {
|
||||||
|
id := m.SupersedesPlanID.UUID()
|
||||||
|
supersedesPlanID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
return ext.Plan{
|
||||||
|
ID: m.ID.UUID(),
|
||||||
|
Title: m.Title.String(),
|
||||||
|
Description: m.Description.String(),
|
||||||
|
Status: ext.PlanStatus(m.Status.String()),
|
||||||
|
Priority: ext.PlanPriority(m.Priority.String()),
|
||||||
|
ProjectID: projectID,
|
||||||
|
Owner: m.Owner.String(),
|
||||||
|
DueDate: dueDate,
|
||||||
|
CompletedAt: completedAt,
|
||||||
|
ReviewedBy: m.ReviewedBy.String(),
|
||||||
|
LastReviewedAt: lastReviewedAt,
|
||||||
|
SupersedesPlanID: supersedesPlanID,
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: m.CreatedAt.Time(),
|
||||||
|
UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func learningFromModel(m generatedmodels.ModelPublicLearnings, tags []string) ext.Learning {
|
||||||
|
var projectID *uuid.UUID
|
||||||
|
if m.ProjectID.Valid {
|
||||||
|
id := m.ProjectID.UUID()
|
||||||
|
projectID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var relatedThoughtID *uuid.UUID
|
||||||
|
if m.RelatedThoughtID.Valid {
|
||||||
|
id := m.RelatedThoughtID.UUID()
|
||||||
|
relatedThoughtID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var relatedSkillID *uuid.UUID
|
||||||
|
if m.RelatedSkillID.Valid {
|
||||||
|
id := m.RelatedSkillID.UUID()
|
||||||
|
relatedSkillID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var duplicateOfLearningID *uuid.UUID
|
||||||
|
if m.DuplicateOfLearningID.Valid {
|
||||||
|
id := m.DuplicateOfLearningID.UUID()
|
||||||
|
duplicateOfLearningID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var supersedesLearningID *uuid.UUID
|
||||||
|
if m.SupersedesLearningID.Valid {
|
||||||
|
id := m.SupersedesLearningID.UUID()
|
||||||
|
supersedesLearningID = &id
|
||||||
|
}
|
||||||
|
|
||||||
|
var reviewedBy *string
|
||||||
|
if m.ReviewedBy.Valid {
|
||||||
|
value := m.ReviewedBy.String()
|
||||||
|
reviewedBy = &value
|
||||||
|
}
|
||||||
|
|
||||||
|
var reviewedAt *time.Time
|
||||||
|
if m.ReviewedAt.Valid {
|
||||||
|
t := m.ReviewedAt.Time()
|
||||||
|
reviewedAt = &t
|
||||||
|
}
|
||||||
|
|
||||||
|
return ext.Learning{
|
||||||
|
ID: m.ID.UUID(),
|
||||||
|
Summary: m.Summary.String(),
|
||||||
|
Details: m.Details.String(),
|
||||||
|
Category: m.Category.String(),
|
||||||
|
Area: m.Area.String(),
|
||||||
|
Status: ext.LearningStatus(m.Status.String()),
|
||||||
|
Priority: ext.LearningPriority(m.Priority.String()),
|
||||||
|
Confidence: ext.LearningEvidenceLevel(m.Confidence.String()),
|
||||||
|
ActionRequired: m.ActionRequired,
|
||||||
|
SourceType: m.SourceType.String(),
|
||||||
|
SourceRef: m.SourceRef.String(),
|
||||||
|
ProjectID: projectID,
|
||||||
|
RelatedThoughtID: relatedThoughtID,
|
||||||
|
RelatedSkillID: relatedSkillID,
|
||||||
|
ReviewedBy: reviewedBy,
|
||||||
|
ReviewedAt: reviewedAt,
|
||||||
|
DuplicateOfLearningID: duplicateOfLearningID,
|
||||||
|
SupersedesLearningID: supersedesLearningID,
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: m.CreatedAt.Time(),
|
||||||
|
UpdatedAt: m.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
|
}
|
||||||
477
internal/store/plans.go
Normal file
477
internal/store/plans.go
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
package store
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
|
ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
const planColumns = `
|
||||||
|
id, title, description, status, priority, project_id, owner, due_date,
|
||||||
|
completed_at, reviewed_by, last_reviewed_at, supersedes_plan_id, tags::text[], created_at, updated_at`
|
||||||
|
|
||||||
|
func (db *DB) CreatePlan(ctx context.Context, plan ext.Plan) (ext.Plan, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
insert into plans (title, description, status, priority, project_id, owner, due_date,
|
||||||
|
completed_at, reviewed_by, last_reviewed_at, supersedes_plan_id, tags)
|
||||||
|
values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
|
returning`+planColumns,
|
||||||
|
strings.TrimSpace(plan.Title),
|
||||||
|
strings.TrimSpace(plan.Description),
|
||||||
|
string(plan.Status),
|
||||||
|
string(plan.Priority),
|
||||||
|
plan.ProjectID,
|
||||||
|
nullableText(plan.Owner),
|
||||||
|
plan.DueDate,
|
||||||
|
plan.CompletedAt,
|
||||||
|
nullableText(plan.ReviewedBy),
|
||||||
|
plan.LastReviewedAt,
|
||||||
|
plan.SupersedesPlanID,
|
||||||
|
plan.Tags,
|
||||||
|
)
|
||||||
|
return scanPlan(row)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) GetPlan(ctx context.Context, id uuid.UUID) (ext.Plan, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `select`+planColumns+` from plans where id = $1`, id)
|
||||||
|
plan, err := scanPlan(row)
|
||||||
|
if err != nil {
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return ext.Plan{}, fmt.Errorf("plan not found: %s", id)
|
||||||
|
}
|
||||||
|
return ext.Plan{}, fmt.Errorf("get plan: %w", err)
|
||||||
|
}
|
||||||
|
return plan, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) GetPlanDetail(ctx context.Context, id uuid.UUID) (ext.PlanDetail, error) {
|
||||||
|
plan, err := db.GetPlan(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return ext.PlanDetail{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dependsOn, err := db.listPlansByQuery(ctx, `
|
||||||
|
select`+planColumns+`
|
||||||
|
from plans p
|
||||||
|
join plan_dependencies pd on pd.depends_on_plan_id = p.id
|
||||||
|
where pd.plan_id = $1 order by p.title`, id)
|
||||||
|
if err != nil {
|
||||||
|
return ext.PlanDetail{}, fmt.Errorf("get plan depends_on: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
blocks, err := db.listPlansByQuery(ctx, `
|
||||||
|
select`+planColumns+`
|
||||||
|
from plans p
|
||||||
|
join plan_dependencies pd on pd.plan_id = p.id
|
||||||
|
where pd.depends_on_plan_id = $1 order by p.title`, id)
|
||||||
|
if err != nil {
|
||||||
|
return ext.PlanDetail{}, fmt.Errorf("get plan blocks: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
related, err := db.listPlansByQuery(ctx, `
|
||||||
|
select`+planColumns+`
|
||||||
|
from plans p
|
||||||
|
where p.id in (
|
||||||
|
select plan_b_id from plan_related_plans where plan_a_id = $1
|
||||||
|
union
|
||||||
|
select plan_a_id from plan_related_plans where plan_b_id = $1
|
||||||
|
) order by p.title`, id)
|
||||||
|
if err != nil {
|
||||||
|
return ext.PlanDetail{}, fmt.Errorf("get plan related: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
skills, err := db.ListPlanSkills(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return ext.PlanDetail{}, fmt.Errorf("get plan skills: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
guardrails, err := db.ListPlanGuardrails(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return ext.PlanDetail{}, fmt.Errorf("get plan guardrails: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ext.PlanDetail{
|
||||||
|
Plan: plan,
|
||||||
|
DependsOn: dependsOn,
|
||||||
|
Blocks: blocks,
|
||||||
|
RelatedPlans: related,
|
||||||
|
Skills: skills,
|
||||||
|
Guardrails: guardrails,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) UpdatePlan(ctx context.Context, id uuid.UUID, u ext.PlanUpdate) (ext.Plan, error) {
|
||||||
|
sets := []string{"updated_at = now()"}
|
||||||
|
args := []any{}
|
||||||
|
|
||||||
|
if u.Title != nil {
|
||||||
|
args = append(args, strings.TrimSpace(*u.Title))
|
||||||
|
sets = append(sets, fmt.Sprintf("title = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.Description != nil {
|
||||||
|
args = append(args, strings.TrimSpace(*u.Description))
|
||||||
|
sets = append(sets, fmt.Sprintf("description = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.Status != nil {
|
||||||
|
args = append(args, strings.TrimSpace(*u.Status))
|
||||||
|
sets = append(sets, fmt.Sprintf("status = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.Priority != nil {
|
||||||
|
args = append(args, strings.TrimSpace(*u.Priority))
|
||||||
|
sets = append(sets, fmt.Sprintf("priority = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.Owner != nil {
|
||||||
|
args = append(args, nullableText(*u.Owner))
|
||||||
|
sets = append(sets, fmt.Sprintf("owner = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.ClearDueDate {
|
||||||
|
sets = append(sets, "due_date = null")
|
||||||
|
} else if u.DueDate != nil {
|
||||||
|
args = append(args, *u.DueDate)
|
||||||
|
sets = append(sets, fmt.Sprintf("due_date = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.ClearCompletedAt {
|
||||||
|
sets = append(sets, "completed_at = null")
|
||||||
|
} else if u.CompletedAt != nil {
|
||||||
|
args = append(args, *u.CompletedAt)
|
||||||
|
sets = append(sets, fmt.Sprintf("completed_at = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.ReviewedBy != nil {
|
||||||
|
args = append(args, nullableText(*u.ReviewedBy))
|
||||||
|
sets = append(sets, fmt.Sprintf("reviewed_by = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.MarkReviewed {
|
||||||
|
sets = append(sets, "last_reviewed_at = now()")
|
||||||
|
}
|
||||||
|
if u.ClearSupersedesPlanID {
|
||||||
|
sets = append(sets, "supersedes_plan_id = null")
|
||||||
|
} else if u.SupersedesPlanID != nil {
|
||||||
|
args = append(args, *u.SupersedesPlanID)
|
||||||
|
sets = append(sets, fmt.Sprintf("supersedes_plan_id = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if u.Tags != nil {
|
||||||
|
args = append(args, *u.Tags)
|
||||||
|
sets = append(sets, fmt.Sprintf("tags = $%d", len(args)))
|
||||||
|
}
|
||||||
|
|
||||||
|
args = append(args, id)
|
||||||
|
query := fmt.Sprintf(
|
||||||
|
"update plans set %s where id = $%d returning%s",
|
||||||
|
strings.Join(sets, ", "), len(args), planColumns,
|
||||||
|
)
|
||||||
|
|
||||||
|
row := db.pool.QueryRow(ctx, query, args...)
|
||||||
|
plan, err := scanPlan(row)
|
||||||
|
if err != nil {
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return ext.Plan{}, fmt.Errorf("plan not found: %s", id)
|
||||||
|
}
|
||||||
|
return ext.Plan{}, fmt.Errorf("update plan: %w", err)
|
||||||
|
}
|
||||||
|
return plan, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) DeletePlan(ctx context.Context, id uuid.UUID) error {
|
||||||
|
tag, err := db.pool.Exec(ctx, `delete from plans where id = $1`, id)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("delete plan: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("plan not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) ListPlans(ctx context.Context, filter ext.PlanFilter) ([]ext.Plan, error) {
|
||||||
|
args := make([]any, 0, 8)
|
||||||
|
conditions := make([]string, 0, 8)
|
||||||
|
|
||||||
|
if filter.ProjectID != nil {
|
||||||
|
args = append(args, *filter.ProjectID)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("project_id = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if v := strings.TrimSpace(filter.Status); v != "" {
|
||||||
|
args = append(args, v)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("status = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if v := strings.TrimSpace(filter.Priority); v != "" {
|
||||||
|
args = append(args, v)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("priority = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if v := strings.TrimSpace(filter.Owner); v != "" {
|
||||||
|
args = append(args, v)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("owner = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if v := strings.TrimSpace(filter.Tag); v != "" {
|
||||||
|
args = append(args, v)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("$%d = any(tags)", len(args)))
|
||||||
|
}
|
||||||
|
if v := strings.TrimSpace(filter.Query); v != "" {
|
||||||
|
args = append(args, v)
|
||||||
|
conditions = append(conditions, fmt.Sprintf(
|
||||||
|
"to_tsvector('simple', title || ' ' || coalesce(description, '')) @@ websearch_to_tsquery('simple', $%d)", len(args)))
|
||||||
|
}
|
||||||
|
|
||||||
|
query := "select" + planColumns + " from plans"
|
||||||
|
if len(conditions) > 0 {
|
||||||
|
query += " where " + strings.Join(conditions, " and ")
|
||||||
|
}
|
||||||
|
query += " order by updated_at desc"
|
||||||
|
if filter.Limit > 0 {
|
||||||
|
args = append(args, filter.Limit)
|
||||||
|
query += fmt.Sprintf(" limit $%d", len(args))
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.listPlansByQuery(ctx, query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dependencies
|
||||||
|
|
||||||
|
func (db *DB) AddPlanDependency(ctx context.Context, planID, dependsOnPlanID uuid.UUID) error {
|
||||||
|
_, err := db.pool.Exec(ctx, `
|
||||||
|
insert into plan_dependencies (plan_id, depends_on_plan_id)
|
||||||
|
values ($1, $2)
|
||||||
|
on conflict do nothing
|
||||||
|
`, planID, dependsOnPlanID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("add plan dependency: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) RemovePlanDependency(ctx context.Context, planID, dependsOnPlanID uuid.UUID) error {
|
||||||
|
tag, err := db.pool.Exec(ctx, `
|
||||||
|
delete from plan_dependencies where plan_id = $1 and depends_on_plan_id = $2
|
||||||
|
`, planID, dependsOnPlanID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("remove plan dependency: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("plan dependency not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Related Plans
|
||||||
|
|
||||||
|
func (db *DB) AddRelatedPlan(ctx context.Context, planAID, planBID uuid.UUID) error {
|
||||||
|
a, b := canonicalPlanPair(planAID, planBID)
|
||||||
|
_, err := db.pool.Exec(ctx, `
|
||||||
|
insert into plan_related_plans (plan_a_id, plan_b_id)
|
||||||
|
values ($1, $2)
|
||||||
|
on conflict do nothing
|
||||||
|
`, a, b)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("add related plan: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) RemoveRelatedPlan(ctx context.Context, planAID, planBID uuid.UUID) error {
|
||||||
|
a, b := canonicalPlanPair(planAID, planBID)
|
||||||
|
tag, err := db.pool.Exec(ctx, `
|
||||||
|
delete from plan_related_plans where plan_a_id = $1 and plan_b_id = $2
|
||||||
|
`, a, b)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("remove related plan: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("related plan link not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Plan Skills
|
||||||
|
|
||||||
|
func (db *DB) AddPlanSkill(ctx context.Context, planID, skillID uuid.UUID) error {
|
||||||
|
_, err := db.pool.Exec(ctx, `
|
||||||
|
insert into plan_skills (plan_id, skill_id) values ($1, $2) on conflict do nothing
|
||||||
|
`, planID, skillID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("add plan skill: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) RemovePlanSkill(ctx context.Context, planID, skillID uuid.UUID) error {
|
||||||
|
tag, err := db.pool.Exec(ctx, `
|
||||||
|
delete from plan_skills where plan_id = $1 and skill_id = $2
|
||||||
|
`, planID, skillID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("remove plan skill: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("plan skill link not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) ListPlanSkills(ctx context.Context, planID uuid.UUID) ([]ext.AgentSkill, error) {
|
||||||
|
rows, err := db.pool.Query(ctx, `
|
||||||
|
select s.id, s.name, s.description, s.content, s.tags::text[], s.created_at, s.updated_at
|
||||||
|
from agent_skills s
|
||||||
|
join plan_skills ps on ps.skill_id = s.id
|
||||||
|
where ps.plan_id = $1
|
||||||
|
order by s.name
|
||||||
|
`, planID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("list plan skills: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var skills []ext.AgentSkill
|
||||||
|
for rows.Next() {
|
||||||
|
var model generatedmodels.ModelPublicAgentSkills
|
||||||
|
var tags []string
|
||||||
|
if err := rows.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan plan skill: %w", err)
|
||||||
|
}
|
||||||
|
s := ext.AgentSkill{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
|
if s.Tags == nil {
|
||||||
|
s.Tags = []string{}
|
||||||
|
}
|
||||||
|
skills = append(skills, s)
|
||||||
|
}
|
||||||
|
return skills, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Plan Guardrails
|
||||||
|
|
||||||
|
func (db *DB) AddPlanGuardrail(ctx context.Context, planID, guardrailID uuid.UUID) error {
|
||||||
|
_, err := db.pool.Exec(ctx, `
|
||||||
|
insert into plan_guardrails (plan_id, guardrail_id) values ($1, $2) on conflict do nothing
|
||||||
|
`, planID, guardrailID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("add plan guardrail: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) RemovePlanGuardrail(ctx context.Context, planID, guardrailID uuid.UUID) error {
|
||||||
|
tag, err := db.pool.Exec(ctx, `
|
||||||
|
delete from plan_guardrails where plan_id = $1 and guardrail_id = $2
|
||||||
|
`, planID, guardrailID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("remove plan guardrail: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("plan guardrail link not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) ListPlanGuardrails(ctx context.Context, planID uuid.UUID) ([]ext.AgentGuardrail, error) {
|
||||||
|
rows, err := db.pool.Query(ctx, `
|
||||||
|
select g.id, g.name, g.description, g.content, g.severity, g.tags::text[], g.created_at, g.updated_at
|
||||||
|
from agent_guardrails g
|
||||||
|
join plan_guardrails pg on pg.guardrail_id = g.id
|
||||||
|
where pg.plan_id = $1
|
||||||
|
order by g.name
|
||||||
|
`, planID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("list plan guardrails: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var guardrails []ext.AgentGuardrail
|
||||||
|
for rows.Next() {
|
||||||
|
var model generatedmodels.ModelPublicAgentGuardrails
|
||||||
|
var tags []string
|
||||||
|
if err := rows.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &model.Severity, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan plan guardrail: %w", err)
|
||||||
|
}
|
||||||
|
g := ext.AgentGuardrail{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Severity: model.Severity.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
|
if g.Tags == nil {
|
||||||
|
g.Tags = []string{}
|
||||||
|
}
|
||||||
|
guardrails = append(guardrails, g)
|
||||||
|
}
|
||||||
|
return guardrails, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// helpers
|
||||||
|
|
||||||
|
type planScanner interface {
|
||||||
|
Scan(dest ...any) error
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanPlan(row planScanner) (ext.Plan, error) {
|
||||||
|
var model generatedmodels.ModelPublicPlans
|
||||||
|
var tags []string
|
||||||
|
err := row.Scan(
|
||||||
|
&model.ID,
|
||||||
|
&model.Title,
|
||||||
|
&model.Description,
|
||||||
|
&model.Status,
|
||||||
|
&model.Priority,
|
||||||
|
&model.ProjectID,
|
||||||
|
&model.Owner,
|
||||||
|
&model.DueDate,
|
||||||
|
&model.CompletedAt,
|
||||||
|
&model.ReviewedBy,
|
||||||
|
&model.LastReviewedAt,
|
||||||
|
&model.SupersedesPlanID,
|
||||||
|
&tags,
|
||||||
|
&model.CreatedAt,
|
||||||
|
&model.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return ext.Plan{}, err
|
||||||
|
}
|
||||||
|
if tags == nil {
|
||||||
|
tags = []string{}
|
||||||
|
}
|
||||||
|
return planFromModel(model, tags), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) listPlansByQuery(ctx context.Context, query string, args ...any) ([]ext.Plan, error) {
|
||||||
|
rows, err := db.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
plans := make([]ext.Plan, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
plan, err := scanPlan(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("scan plan: %w", err)
|
||||||
|
}
|
||||||
|
plans = append(plans, plan)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("iterate plans: %w", err)
|
||||||
|
}
|
||||||
|
return plans, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// canonicalPlanPair ensures the smaller UUID is always plan_a_id to prevent duplicates.
|
||||||
|
func canonicalPlanPair(a, b uuid.UUID) (uuid.UUID, uuid.UUID) {
|
||||||
|
if strings.Compare(a.String(), b.String()) <= 0 {
|
||||||
|
return a, b
|
||||||
|
}
|
||||||
|
return b, a
|
||||||
|
}
|
||||||
|
|
||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v5"
|
"github.com/jackc/pgx/v5"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -18,37 +19,58 @@ func (db *DB) CreateProject(ctx context.Context, name, description string) (thou
|
|||||||
returning guid, name, description, created_at, last_active_at
|
returning guid, name, description, created_at, last_active_at
|
||||||
`, name, description)
|
`, name, description)
|
||||||
|
|
||||||
var project thoughttypes.Project
|
var model generatedmodels.ModelPublicProjects
|
||||||
if err := row.Scan(&project.ID, &project.Name, &project.Description, &project.CreatedAt, &project.LastActiveAt); err != nil {
|
if err := row.Scan(&model.GUID, &model.Name, &model.Description, &model.CreatedAt, &model.LastActiveAt); err != nil {
|
||||||
return thoughttypes.Project{}, fmt.Errorf("create project: %w", err)
|
return thoughttypes.Project{}, fmt.Errorf("create project: %w", err)
|
||||||
}
|
}
|
||||||
return project, nil
|
return projectFromModel(model), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) GetProject(ctx context.Context, nameOrID string) (thoughttypes.Project, error) {
|
func (db *DB) GetProject(ctx context.Context, nameOrID string) (thoughttypes.Project, error) {
|
||||||
var row pgx.Row
|
lookup := strings.TrimSpace(nameOrID)
|
||||||
if parsedID, err := uuid.Parse(strings.TrimSpace(nameOrID)); err == nil {
|
|
||||||
row = db.pool.QueryRow(ctx, `
|
// Prefer guid lookup when input parses as UUID, but fall back to name lookup
|
||||||
select guid, name, description, created_at, last_active_at
|
// so UUID-shaped project names can still be resolved by name.
|
||||||
from projects
|
if parsedID, err := uuid.Parse(lookup); err == nil {
|
||||||
where guid = $1
|
project, queryErr := db.getProjectByGUID(ctx, parsedID)
|
||||||
`, parsedID)
|
if queryErr == nil {
|
||||||
} else {
|
return project, nil
|
||||||
row = db.pool.QueryRow(ctx, `
|
}
|
||||||
select guid, name, description, created_at, last_active_at
|
if queryErr != pgx.ErrNoRows {
|
||||||
from projects
|
return thoughttypes.Project{}, queryErr
|
||||||
where name = $1
|
}
|
||||||
`, strings.TrimSpace(nameOrID))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var project thoughttypes.Project
|
return db.getProjectByName(ctx, lookup)
|
||||||
if err := row.Scan(&project.ID, &project.Name, &project.Description, &project.CreatedAt, &project.LastActiveAt); err != nil {
|
}
|
||||||
|
|
||||||
|
func (db *DB) getProjectByGUID(ctx context.Context, id uuid.UUID) (thoughttypes.Project, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
select guid, name, description, created_at, last_active_at
|
||||||
|
from projects
|
||||||
|
where guid = $1
|
||||||
|
`, id)
|
||||||
|
return scanProject(row)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) getProjectByName(ctx context.Context, name string) (thoughttypes.Project, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
select guid, name, description, created_at, last_active_at
|
||||||
|
from projects
|
||||||
|
where name = $1
|
||||||
|
`, name)
|
||||||
|
return scanProject(row)
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanProject(row pgx.Row) (thoughttypes.Project, error) {
|
||||||
|
var model generatedmodels.ModelPublicProjects
|
||||||
|
if err := row.Scan(&model.GUID, &model.Name, &model.Description, &model.CreatedAt, &model.LastActiveAt); err != nil {
|
||||||
if err == pgx.ErrNoRows {
|
if err == pgx.ErrNoRows {
|
||||||
return thoughttypes.Project{}, err
|
return thoughttypes.Project{}, err
|
||||||
}
|
}
|
||||||
return thoughttypes.Project{}, fmt.Errorf("get project: %w", err)
|
return thoughttypes.Project{}, fmt.Errorf("get project: %w", err)
|
||||||
}
|
}
|
||||||
return project, nil
|
return projectFromModel(model), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) ListProjects(ctx context.Context) ([]thoughttypes.ProjectSummary, error) {
|
func (db *DB) ListProjects(ctx context.Context) ([]thoughttypes.ProjectSummary, error) {
|
||||||
@@ -66,11 +88,15 @@ func (db *DB) ListProjects(ctx context.Context) ([]thoughttypes.ProjectSummary,
|
|||||||
|
|
||||||
projects := make([]thoughttypes.ProjectSummary, 0)
|
projects := make([]thoughttypes.ProjectSummary, 0)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var project thoughttypes.ProjectSummary
|
var model generatedmodels.ModelPublicProjects
|
||||||
if err := rows.Scan(&project.ID, &project.Name, &project.Description, &project.CreatedAt, &project.LastActiveAt, &project.ThoughtCount); err != nil {
|
var thoughtCount int
|
||||||
|
if err := rows.Scan(&model.GUID, &model.Name, &model.Description, &model.CreatedAt, &model.LastActiveAt, &thoughtCount); err != nil {
|
||||||
return nil, fmt.Errorf("scan project summary: %w", err)
|
return nil, fmt.Errorf("scan project summary: %w", err)
|
||||||
}
|
}
|
||||||
projects = append(projects, project)
|
projects = append(projects, thoughttypes.ProjectSummary{
|
||||||
|
Project: projectFromModel(model),
|
||||||
|
ThoughtCount: thoughtCount,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
if err := rows.Err(); err != nil {
|
if err := rows.Err(); err != nil {
|
||||||
return nil, fmt.Errorf("iterate projects: %w", err)
|
return nil, fmt.Errorf("iterate projects: %w", err)
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -23,9 +24,13 @@ func (db *DB) AddSkill(ctx context.Context, skill ext.AgentSkill) (ext.AgentSkil
|
|||||||
`, skill.Name, skill.Description, skill.Content, skill.Tags)
|
`, skill.Name, skill.Description, skill.Content, skill.Tags)
|
||||||
|
|
||||||
created := skill
|
created := skill
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
var model generatedmodels.ModelPublicAgentSkills
|
||||||
|
if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return ext.AgentSkill{}, fmt.Errorf("insert agent skill: %w", err)
|
return ext.AgentSkill{}, fmt.Errorf("insert agent skill: %w", err)
|
||||||
}
|
}
|
||||||
|
created.ID = model.ID.UUID()
|
||||||
|
created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
return created, nil
|
return created, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -41,7 +46,7 @@ func (db *DB) RemoveSkill(ctx context.Context, id uuid.UUID) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) ListSkills(ctx context.Context, tag string) ([]ext.AgentSkill, error) {
|
func (db *DB) ListSkills(ctx context.Context, tag string) ([]ext.AgentSkill, error) {
|
||||||
q := `select id, name, description, content, tags, created_at, updated_at from agent_skills`
|
q := `select id, name, description, content, tags::text[], created_at, updated_at from agent_skills`
|
||||||
args := []any{}
|
args := []any{}
|
||||||
if t := strings.TrimSpace(tag); t != "" {
|
if t := strings.TrimSpace(tag); t != "" {
|
||||||
args = append(args, t)
|
args = append(args, t)
|
||||||
@@ -57,12 +62,20 @@ func (db *DB) ListSkills(ctx context.Context, tag string) ([]ext.AgentSkill, err
|
|||||||
|
|
||||||
var skills []ext.AgentSkill
|
var skills []ext.AgentSkill
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var s ext.AgentSkill
|
var model generatedmodels.ModelPublicAgentSkills
|
||||||
var desc *string
|
var tags []string
|
||||||
if err := rows.Scan(&s.ID, &s.Name, &desc, &s.Content, &s.Tags, &s.CreatedAt, &s.UpdatedAt); err != nil {
|
if err := rows.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return nil, fmt.Errorf("scan agent skill: %w", err)
|
return nil, fmt.Errorf("scan agent skill: %w", err)
|
||||||
}
|
}
|
||||||
s.Description = strVal(desc)
|
s := ext.AgentSkill{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
if s.Tags == nil {
|
if s.Tags == nil {
|
||||||
s.Tags = []string{}
|
s.Tags = []string{}
|
||||||
}
|
}
|
||||||
@@ -73,16 +86,24 @@ func (db *DB) ListSkills(ctx context.Context, tag string) ([]ext.AgentSkill, err
|
|||||||
|
|
||||||
func (db *DB) GetSkill(ctx context.Context, id uuid.UUID) (ext.AgentSkill, error) {
|
func (db *DB) GetSkill(ctx context.Context, id uuid.UUID) (ext.AgentSkill, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
row := db.pool.QueryRow(ctx, `
|
||||||
select id, name, description, content, tags, created_at, updated_at
|
select id, name, description, content, tags::text[], created_at, updated_at
|
||||||
from agent_skills where id = $1
|
from agent_skills where id = $1
|
||||||
`, id)
|
`, id)
|
||||||
|
|
||||||
var s ext.AgentSkill
|
var model generatedmodels.ModelPublicAgentSkills
|
||||||
var desc *string
|
var tags []string
|
||||||
if err := row.Scan(&s.ID, &s.Name, &desc, &s.Content, &s.Tags, &s.CreatedAt, &s.UpdatedAt); err != nil {
|
if err := row.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return ext.AgentSkill{}, fmt.Errorf("get agent skill: %w", err)
|
return ext.AgentSkill{}, fmt.Errorf("get agent skill: %w", err)
|
||||||
}
|
}
|
||||||
s.Description = strVal(desc)
|
s := ext.AgentSkill{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
if s.Tags == nil {
|
if s.Tags == nil {
|
||||||
s.Tags = []string{}
|
s.Tags = []string{}
|
||||||
}
|
}
|
||||||
@@ -105,9 +126,13 @@ func (db *DB) AddGuardrail(ctx context.Context, g ext.AgentGuardrail) (ext.Agent
|
|||||||
`, g.Name, g.Description, g.Content, g.Severity, g.Tags)
|
`, g.Name, g.Description, g.Content, g.Severity, g.Tags)
|
||||||
|
|
||||||
created := g
|
created := g
|
||||||
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
var model generatedmodels.ModelPublicAgentGuardrails
|
||||||
|
if err := row.Scan(&model.ID, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return ext.AgentGuardrail{}, fmt.Errorf("insert agent guardrail: %w", err)
|
return ext.AgentGuardrail{}, fmt.Errorf("insert agent guardrail: %w", err)
|
||||||
}
|
}
|
||||||
|
created.ID = model.ID.UUID()
|
||||||
|
created.CreatedAt = model.CreatedAt.Time()
|
||||||
|
created.UpdatedAt = model.UpdatedAt.Time()
|
||||||
return created, nil
|
return created, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -135,7 +160,7 @@ func (db *DB) ListGuardrails(ctx context.Context, tag, severity string) ([]ext.A
|
|||||||
conditions = append(conditions, fmt.Sprintf("severity = $%d", len(args)))
|
conditions = append(conditions, fmt.Sprintf("severity = $%d", len(args)))
|
||||||
}
|
}
|
||||||
|
|
||||||
q := `select id, name, description, content, severity, tags, created_at, updated_at from agent_guardrails`
|
q := `select id, name, description, content, severity, tags::text[], created_at, updated_at from agent_guardrails`
|
||||||
if len(conditions) > 0 {
|
if len(conditions) > 0 {
|
||||||
q += " where " + strings.Join(conditions, " and ")
|
q += " where " + strings.Join(conditions, " and ")
|
||||||
}
|
}
|
||||||
@@ -149,12 +174,21 @@ func (db *DB) ListGuardrails(ctx context.Context, tag, severity string) ([]ext.A
|
|||||||
|
|
||||||
var guardrails []ext.AgentGuardrail
|
var guardrails []ext.AgentGuardrail
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var g ext.AgentGuardrail
|
var model generatedmodels.ModelPublicAgentGuardrails
|
||||||
var desc *string
|
var tags []string
|
||||||
if err := rows.Scan(&g.ID, &g.Name, &desc, &g.Content, &g.Severity, &g.Tags, &g.CreatedAt, &g.UpdatedAt); err != nil {
|
if err := rows.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &model.Severity, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return nil, fmt.Errorf("scan agent guardrail: %w", err)
|
return nil, fmt.Errorf("scan agent guardrail: %w", err)
|
||||||
}
|
}
|
||||||
g.Description = strVal(desc)
|
g := ext.AgentGuardrail{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Severity: model.Severity.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
if g.Tags == nil {
|
if g.Tags == nil {
|
||||||
g.Tags = []string{}
|
g.Tags = []string{}
|
||||||
}
|
}
|
||||||
@@ -165,16 +199,25 @@ func (db *DB) ListGuardrails(ctx context.Context, tag, severity string) ([]ext.A
|
|||||||
|
|
||||||
func (db *DB) GetGuardrail(ctx context.Context, id uuid.UUID) (ext.AgentGuardrail, error) {
|
func (db *DB) GetGuardrail(ctx context.Context, id uuid.UUID) (ext.AgentGuardrail, error) {
|
||||||
row := db.pool.QueryRow(ctx, `
|
row := db.pool.QueryRow(ctx, `
|
||||||
select id, name, description, content, severity, tags, created_at, updated_at
|
select id, name, description, content, severity, tags::text[], created_at, updated_at
|
||||||
from agent_guardrails where id = $1
|
from agent_guardrails where id = $1
|
||||||
`, id)
|
`, id)
|
||||||
|
|
||||||
var g ext.AgentGuardrail
|
var model generatedmodels.ModelPublicAgentGuardrails
|
||||||
var desc *string
|
var tags []string
|
||||||
if err := row.Scan(&g.ID, &g.Name, &desc, &g.Content, &g.Severity, &g.Tags, &g.CreatedAt, &g.UpdatedAt); err != nil {
|
if err := row.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &model.Severity, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return ext.AgentGuardrail{}, fmt.Errorf("get agent guardrail: %w", err)
|
return ext.AgentGuardrail{}, fmt.Errorf("get agent guardrail: %w", err)
|
||||||
}
|
}
|
||||||
g.Description = strVal(desc)
|
g := ext.AgentGuardrail{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Severity: model.Severity.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
if g.Tags == nil {
|
if g.Tags == nil {
|
||||||
g.Tags = []string{}
|
g.Tags = []string{}
|
||||||
}
|
}
|
||||||
@@ -210,7 +253,7 @@ func (db *DB) RemoveProjectSkill(ctx context.Context, projectID, skillID uuid.UU
|
|||||||
|
|
||||||
func (db *DB) ListProjectSkills(ctx context.Context, projectID uuid.UUID) ([]ext.AgentSkill, error) {
|
func (db *DB) ListProjectSkills(ctx context.Context, projectID uuid.UUID) ([]ext.AgentSkill, error) {
|
||||||
rows, err := db.pool.Query(ctx, `
|
rows, err := db.pool.Query(ctx, `
|
||||||
select s.id, s.name, s.description, s.content, s.tags, s.created_at, s.updated_at
|
select s.id, s.name, s.description, s.content, s.tags::text[], s.created_at, s.updated_at
|
||||||
from agent_skills s
|
from agent_skills s
|
||||||
join project_skills ps on ps.skill_id = s.id
|
join project_skills ps on ps.skill_id = s.id
|
||||||
where ps.project_id = $1
|
where ps.project_id = $1
|
||||||
@@ -223,12 +266,20 @@ func (db *DB) ListProjectSkills(ctx context.Context, projectID uuid.UUID) ([]ext
|
|||||||
|
|
||||||
var skills []ext.AgentSkill
|
var skills []ext.AgentSkill
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var s ext.AgentSkill
|
var model generatedmodels.ModelPublicAgentSkills
|
||||||
var desc *string
|
var tags []string
|
||||||
if err := rows.Scan(&s.ID, &s.Name, &desc, &s.Content, &s.Tags, &s.CreatedAt, &s.UpdatedAt); err != nil {
|
if err := rows.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return nil, fmt.Errorf("scan project skill: %w", err)
|
return nil, fmt.Errorf("scan project skill: %w", err)
|
||||||
}
|
}
|
||||||
s.Description = strVal(desc)
|
s := ext.AgentSkill{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
if s.Tags == nil {
|
if s.Tags == nil {
|
||||||
s.Tags = []string{}
|
s.Tags = []string{}
|
||||||
}
|
}
|
||||||
@@ -266,7 +317,7 @@ func (db *DB) RemoveProjectGuardrail(ctx context.Context, projectID, guardrailID
|
|||||||
|
|
||||||
func (db *DB) ListProjectGuardrails(ctx context.Context, projectID uuid.UUID) ([]ext.AgentGuardrail, error) {
|
func (db *DB) ListProjectGuardrails(ctx context.Context, projectID uuid.UUID) ([]ext.AgentGuardrail, error) {
|
||||||
rows, err := db.pool.Query(ctx, `
|
rows, err := db.pool.Query(ctx, `
|
||||||
select g.id, g.name, g.description, g.content, g.severity, g.tags, g.created_at, g.updated_at
|
select g.id, g.name, g.description, g.content, g.severity, g.tags::text[], g.created_at, g.updated_at
|
||||||
from agent_guardrails g
|
from agent_guardrails g
|
||||||
join project_guardrails pg on pg.guardrail_id = g.id
|
join project_guardrails pg on pg.guardrail_id = g.id
|
||||||
where pg.project_id = $1
|
where pg.project_id = $1
|
||||||
@@ -279,12 +330,21 @@ func (db *DB) ListProjectGuardrails(ctx context.Context, projectID uuid.UUID) ([
|
|||||||
|
|
||||||
var guardrails []ext.AgentGuardrail
|
var guardrails []ext.AgentGuardrail
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var g ext.AgentGuardrail
|
var model generatedmodels.ModelPublicAgentGuardrails
|
||||||
var desc *string
|
var tags []string
|
||||||
if err := rows.Scan(&g.ID, &g.Name, &desc, &g.Content, &g.Severity, &g.Tags, &g.CreatedAt, &g.UpdatedAt); err != nil {
|
if err := rows.Scan(&model.ID, &model.Name, &model.Description, &model.Content, &model.Severity, &tags, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
return nil, fmt.Errorf("scan project guardrail: %w", err)
|
return nil, fmt.Errorf("scan project guardrail: %w", err)
|
||||||
}
|
}
|
||||||
g.Description = strVal(desc)
|
g := ext.AgentGuardrail{
|
||||||
|
ID: model.ID.UUID(),
|
||||||
|
Name: model.Name.String(),
|
||||||
|
Description: model.Description.String(),
|
||||||
|
Content: model.Content.String(),
|
||||||
|
Severity: model.Severity.String(),
|
||||||
|
Tags: tags,
|
||||||
|
CreatedAt: model.CreatedAt.Time(),
|
||||||
|
UpdatedAt: model.UpdatedAt.Time(),
|
||||||
|
}
|
||||||
if g.Tags == nil {
|
if g.Tags == nil {
|
||||||
g.Tags = []string{}
|
g.Tags = []string{}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import (
|
|||||||
"github.com/jackc/pgx/v5"
|
"github.com/jackc/pgx/v5"
|
||||||
"github.com/pgvector/pgvector-go"
|
"github.com/pgvector/pgvector-go"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -149,13 +150,13 @@ func (db *DB) ListThoughts(ctx context.Context, filter thoughttypes.ListFilter)
|
|||||||
|
|
||||||
thoughts := make([]thoughttypes.Thought, 0, filter.Limit)
|
thoughts := make([]thoughttypes.Thought, 0, filter.Limit)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var thought thoughttypes.Thought
|
var model generatedmodels.ModelPublicThoughts
|
||||||
var metadataBytes []byte
|
if err := rows.Scan(&model.GUID, &model.Content, &model.Metadata, &model.ProjectID, &model.ArchivedAt, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
if err := rows.Scan(&thought.ID, &thought.Content, &metadataBytes, &thought.ProjectID, &thought.ArchivedAt, &thought.CreatedAt, &thought.UpdatedAt); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan listed thought: %w", err)
|
return nil, fmt.Errorf("scan listed thought: %w", err)
|
||||||
}
|
}
|
||||||
if err := json.Unmarshal(metadataBytes, &thought.Metadata); err != nil {
|
thought, err := thoughtFromModel(model)
|
||||||
return nil, fmt.Errorf("decode listed metadata: %w", err)
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("map listed thought: %w", err)
|
||||||
}
|
}
|
||||||
thoughts = append(thoughts, thought)
|
thoughts = append(thoughts, thought)
|
||||||
}
|
}
|
||||||
@@ -222,17 +223,17 @@ func (db *DB) GetThought(ctx context.Context, id uuid.UUID) (thoughttypes.Though
|
|||||||
where guid = $1
|
where guid = $1
|
||||||
`, id)
|
`, id)
|
||||||
|
|
||||||
var thought thoughttypes.Thought
|
var model generatedmodels.ModelPublicThoughts
|
||||||
var metadataBytes []byte
|
if err := row.Scan(&model.GUID, &model.Content, &model.Metadata, &model.ProjectID, &model.ArchivedAt, &model.CreatedAt, &model.UpdatedAt); err != nil {
|
||||||
if err := row.Scan(&thought.ID, &thought.Content, &metadataBytes, &thought.ProjectID, &thought.ArchivedAt, &thought.CreatedAt, &thought.UpdatedAt); err != nil {
|
|
||||||
if err == pgx.ErrNoRows {
|
if err == pgx.ErrNoRows {
|
||||||
return thoughttypes.Thought{}, err
|
return thoughttypes.Thought{}, err
|
||||||
}
|
}
|
||||||
return thoughttypes.Thought{}, fmt.Errorf("get thought: %w", err)
|
return thoughttypes.Thought{}, fmt.Errorf("get thought: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := json.Unmarshal(metadataBytes, &thought.Metadata); err != nil {
|
thought, err := thoughtFromModel(model)
|
||||||
return thoughttypes.Thought{}, fmt.Errorf("decode thought metadata: %w", err)
|
if err != nil {
|
||||||
|
return thoughttypes.Thought{}, fmt.Errorf("map thought: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return thought, nil
|
return thought, nil
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ package store
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/generatedmodels"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (db *DB) UpsertToolAnnotation(ctx context.Context, toolName, notes string) error {
|
func (db *DB) UpsertToolAnnotation(ctx context.Context, toolName, notes string) error {
|
||||||
@@ -28,11 +30,11 @@ func (db *DB) GetToolAnnotations(ctx context.Context) (map[string]string, error)
|
|||||||
|
|
||||||
annotations := make(map[string]string)
|
annotations := make(map[string]string)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var toolName, notes string
|
var model generatedmodels.ModelPublicToolAnnotations
|
||||||
if err := rows.Scan(&toolName, ¬es); err != nil {
|
if err := rows.Scan(&model.ToolName, &model.Notes); err != nil {
|
||||||
return nil, fmt.Errorf("scan tool annotation: %w", err)
|
return nil, fmt.Errorf("scan tool annotation: %w", err)
|
||||||
}
|
}
|
||||||
annotations[toolName] = notes
|
annotations[model.ToolName.String()] = model.Notes.String()
|
||||||
}
|
}
|
||||||
return annotations, rows.Err()
|
return annotations, rows.Err()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,10 +18,10 @@ import (
|
|||||||
const backfillConcurrency = 4
|
const backfillConcurrency = 4
|
||||||
|
|
||||||
type BackfillTool struct {
|
type BackfillTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
logger *slog.Logger
|
logger *slog.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
type BackfillInput struct {
|
type BackfillInput struct {
|
||||||
@@ -47,31 +47,50 @@ type BackfillOutput struct {
|
|||||||
Failures []BackfillFailure `json:"failures,omitempty"`
|
Failures []BackfillFailure `json:"failures,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBackfillTool(db *store.DB, provider ai.Provider, sessions *session.ActiveProjects, logger *slog.Logger) *BackfillTool {
|
func NewBackfillTool(db *store.DB, embeddings *ai.EmbeddingRunner, sessions *session.ActiveProjects, logger *slog.Logger) *BackfillTool {
|
||||||
return &BackfillTool{store: db, provider: provider, sessions: sessions, logger: logger}
|
return &BackfillTool{store: db, embeddings: embeddings, sessions: sessions, logger: logger}
|
||||||
}
|
}
|
||||||
|
|
||||||
// QueueThought queues a single thought for background embedding generation.
|
// QueueThought queues a single thought for background embedding generation.
|
||||||
// It is used by capture when the embedding provider is temporarily unavailable.
|
// It is used by capture when the embedding provider is temporarily unavailable.
|
||||||
func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content string) {
|
func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content string) {
|
||||||
go func() {
|
go func() {
|
||||||
vec, err := t.provider.Embed(ctx, content)
|
started := time.Now()
|
||||||
|
t.logger.Info("background embedding started",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", t.embeddings.PrimaryModel()),
|
||||||
|
)
|
||||||
|
|
||||||
|
result, err := t.embeddings.Embed(ctx, content)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.logger.Warn("background embedding retry failed",
|
t.logger.Warn("background embedding error",
|
||||||
slog.String("thought_id", id.String()),
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", t.embeddings.PrimaryModel()),
|
||||||
|
slog.String("stage", "embed"),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
slog.String("error", err.Error()),
|
slog.String("error", err.Error()),
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
model := t.provider.EmbeddingModel()
|
if err := t.store.UpsertEmbedding(ctx, id, result.Model, result.Vector); err != nil {
|
||||||
if err := t.store.UpsertEmbedding(ctx, id, model, vec); err != nil {
|
t.logger.Warn("background embedding error",
|
||||||
t.logger.Warn("background embedding upsert failed",
|
|
||||||
slog.String("thought_id", id.String()),
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", result.Model),
|
||||||
|
slog.String("stage", "upsert"),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
slog.String("error", err.Error()),
|
slog.String("error", err.Error()),
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
t.logger.Info("background embedding retry succeeded", slog.String("thought_id", id.String()))
|
t.logger.Info("background embedding complete",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", result.Model),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
)
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,15 +110,15 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
projectID = &project.ID
|
projectID = &project.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
model := t.provider.EmbeddingModel()
|
primaryModel := t.embeddings.PrimaryModel()
|
||||||
|
|
||||||
thoughts, err := t.store.ListThoughtsMissingEmbedding(ctx, model, limit, projectID, in.IncludeArchived, in.OlderThanDays)
|
thoughts, err := t.store.ListThoughtsMissingEmbedding(ctx, primaryModel, limit, projectID, in.IncludeArchived, in.OlderThanDays)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, BackfillOutput{}, err
|
return nil, BackfillOutput{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
out := BackfillOutput{
|
out := BackfillOutput{
|
||||||
Model: model,
|
Model: primaryModel,
|
||||||
Scanned: len(thoughts),
|
Scanned: len(thoughts),
|
||||||
DryRun: in.DryRun,
|
DryRun: in.DryRun,
|
||||||
}
|
}
|
||||||
@@ -125,7 +144,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
defer sem.Release(1)
|
defer sem.Release(1)
|
||||||
|
|
||||||
vec, embedErr := t.provider.Embed(ctx, content)
|
result, embedErr := t.embeddings.Embed(ctx, content)
|
||||||
if embedErr != nil {
|
if embedErr != nil {
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: embedErr.Error()})
|
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: embedErr.Error()})
|
||||||
@@ -134,7 +153,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if upsertErr := t.store.UpsertEmbedding(ctx, id, model, vec); upsertErr != nil {
|
if upsertErr := t.store.UpsertEmbedding(ctx, id, result.Model, result.Vector); upsertErr != nil {
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: upsertErr.Error()})
|
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: upsertErr.Error()})
|
||||||
mu.Unlock()
|
mu.Unlock()
|
||||||
@@ -154,7 +173,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
out.Skipped = out.Scanned - out.Embedded - out.Failed
|
out.Skipped = out.Scanned - out.Embedded - out.Failed
|
||||||
|
|
||||||
t.logger.Info("backfill completed",
|
t.logger.Info("backfill completed",
|
||||||
slog.String("model", model),
|
slog.String("model", primaryModel),
|
||||||
slog.Int("scanned", out.Scanned),
|
slog.Int("scanned", out.Scanned),
|
||||||
slog.Int("embedded", out.Embedded),
|
slog.Int("embedded", out.Embedded),
|
||||||
slog.Int("failed", out.Failed),
|
slog.Int("failed", out.Failed),
|
||||||
|
|||||||
@@ -1,212 +1,212 @@
|
|||||||
package tools
|
package tools
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
// "github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/store"
|
// "git.warky.dev/wdevs/amcs/internal/store"
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
// )
|
||||||
|
|
||||||
type CalendarTool struct {
|
// type CalendarTool struct {
|
||||||
store *store.DB
|
// store *store.DB
|
||||||
}
|
// }
|
||||||
|
|
||||||
func NewCalendarTool(db *store.DB) *CalendarTool {
|
// func NewCalendarTool(db *store.DB) *CalendarTool {
|
||||||
return &CalendarTool{store: db}
|
// return &CalendarTool{store: db}
|
||||||
}
|
// }
|
||||||
|
|
||||||
// add_family_member
|
// // add_family_member
|
||||||
|
|
||||||
type AddFamilyMemberInput struct {
|
// type AddFamilyMemberInput struct {
|
||||||
Name string `json:"name" jsonschema:"person's name"`
|
// Name string `json:"name" jsonschema:"person's name"`
|
||||||
Relationship string `json:"relationship,omitempty" jsonschema:"e.g. self, spouse, child, parent"`
|
// Relationship string `json:"relationship,omitempty" jsonschema:"e.g. self, spouse, child, parent"`
|
||||||
BirthDate *time.Time `json:"birth_date,omitempty"`
|
// BirthDate *time.Time `json:"birth_date,omitempty"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AddFamilyMemberOutput struct {
|
// type AddFamilyMemberOutput struct {
|
||||||
Member ext.FamilyMember `json:"member"`
|
// Member ext.FamilyMember `json:"member"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CalendarTool) AddMember(ctx context.Context, _ *mcp.CallToolRequest, in AddFamilyMemberInput) (*mcp.CallToolResult, AddFamilyMemberOutput, error) {
|
// func (t *CalendarTool) AddMember(ctx context.Context, _ *mcp.CallToolRequest, in AddFamilyMemberInput) (*mcp.CallToolResult, AddFamilyMemberOutput, error) {
|
||||||
if strings.TrimSpace(in.Name) == "" {
|
// if strings.TrimSpace(in.Name) == "" {
|
||||||
return nil, AddFamilyMemberOutput{}, errRequiredField("name")
|
// return nil, AddFamilyMemberOutput{}, errRequiredField("name")
|
||||||
}
|
// }
|
||||||
member, err := t.store.AddFamilyMember(ctx, ext.FamilyMember{
|
// member, err := t.store.AddFamilyMember(ctx, ext.FamilyMember{
|
||||||
Name: strings.TrimSpace(in.Name),
|
// Name: strings.TrimSpace(in.Name),
|
||||||
Relationship: strings.TrimSpace(in.Relationship),
|
// Relationship: strings.TrimSpace(in.Relationship),
|
||||||
BirthDate: in.BirthDate,
|
// BirthDate: in.BirthDate,
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, AddFamilyMemberOutput{}, err
|
// return nil, AddFamilyMemberOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, AddFamilyMemberOutput{Member: member}, nil
|
// return nil, AddFamilyMemberOutput{Member: member}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// list_family_members
|
// // list_family_members
|
||||||
|
|
||||||
type ListFamilyMembersInput struct{}
|
// type ListFamilyMembersInput struct{}
|
||||||
|
|
||||||
type ListFamilyMembersOutput struct {
|
// type ListFamilyMembersOutput struct {
|
||||||
Members []ext.FamilyMember `json:"members"`
|
// Members []ext.FamilyMember `json:"members"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CalendarTool) ListMembers(ctx context.Context, _ *mcp.CallToolRequest, _ ListFamilyMembersInput) (*mcp.CallToolResult, ListFamilyMembersOutput, error) {
|
// func (t *CalendarTool) ListMembers(ctx context.Context, _ *mcp.CallToolRequest, _ ListFamilyMembersInput) (*mcp.CallToolResult, ListFamilyMembersOutput, error) {
|
||||||
members, err := t.store.ListFamilyMembers(ctx)
|
// members, err := t.store.ListFamilyMembers(ctx)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, ListFamilyMembersOutput{}, err
|
// return nil, ListFamilyMembersOutput{}, err
|
||||||
}
|
// }
|
||||||
if members == nil {
|
// if members == nil {
|
||||||
members = []ext.FamilyMember{}
|
// members = []ext.FamilyMember{}
|
||||||
}
|
// }
|
||||||
return nil, ListFamilyMembersOutput{Members: members}, nil
|
// return nil, ListFamilyMembersOutput{Members: members}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// add_activity
|
// // add_activity
|
||||||
|
|
||||||
type AddActivityInput struct {
|
// type AddActivityInput struct {
|
||||||
Title string `json:"title" jsonschema:"activity title"`
|
// Title string `json:"title" jsonschema:"activity title"`
|
||||||
ActivityType string `json:"activity_type,omitempty" jsonschema:"e.g. sports, medical, school, social"`
|
// ActivityType string `json:"activity_type,omitempty" jsonschema:"e.g. sports, medical, school, social"`
|
||||||
FamilyMemberID *uuid.UUID `json:"family_member_id,omitempty" jsonschema:"leave empty for whole-family activities"`
|
// FamilyMemberID *uuid.UUID `json:"family_member_id,omitempty" jsonschema:"leave empty for whole-family activities"`
|
||||||
DayOfWeek string `json:"day_of_week,omitempty" jsonschema:"for recurring: monday, tuesday, etc."`
|
// DayOfWeek string `json:"day_of_week,omitempty" jsonschema:"for recurring: monday, tuesday, etc."`
|
||||||
StartTime string `json:"start_time,omitempty" jsonschema:"HH:MM format"`
|
// StartTime string `json:"start_time,omitempty" jsonschema:"HH:MM format"`
|
||||||
EndTime string `json:"end_time,omitempty" jsonschema:"HH:MM format"`
|
// EndTime string `json:"end_time,omitempty" jsonschema:"HH:MM format"`
|
||||||
StartDate *time.Time `json:"start_date,omitempty"`
|
// StartDate *time.Time `json:"start_date,omitempty"`
|
||||||
EndDate *time.Time `json:"end_date,omitempty" jsonschema:"for recurring activities, when they end"`
|
// EndDate *time.Time `json:"end_date,omitempty" jsonschema:"for recurring activities, when they end"`
|
||||||
Location string `json:"location,omitempty"`
|
// Location string `json:"location,omitempty"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AddActivityOutput struct {
|
// type AddActivityOutput struct {
|
||||||
Activity ext.Activity `json:"activity"`
|
// Activity ext.Activity `json:"activity"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CalendarTool) AddActivity(ctx context.Context, _ *mcp.CallToolRequest, in AddActivityInput) (*mcp.CallToolResult, AddActivityOutput, error) {
|
// func (t *CalendarTool) AddActivity(ctx context.Context, _ *mcp.CallToolRequest, in AddActivityInput) (*mcp.CallToolResult, AddActivityOutput, error) {
|
||||||
if strings.TrimSpace(in.Title) == "" {
|
// if strings.TrimSpace(in.Title) == "" {
|
||||||
return nil, AddActivityOutput{}, errRequiredField("title")
|
// return nil, AddActivityOutput{}, errRequiredField("title")
|
||||||
}
|
// }
|
||||||
activity, err := t.store.AddActivity(ctx, ext.Activity{
|
// activity, err := t.store.AddActivity(ctx, ext.Activity{
|
||||||
FamilyMemberID: in.FamilyMemberID,
|
// FamilyMemberID: in.FamilyMemberID,
|
||||||
Title: strings.TrimSpace(in.Title),
|
// Title: strings.TrimSpace(in.Title),
|
||||||
ActivityType: strings.TrimSpace(in.ActivityType),
|
// ActivityType: strings.TrimSpace(in.ActivityType),
|
||||||
DayOfWeek: strings.ToLower(strings.TrimSpace(in.DayOfWeek)),
|
// DayOfWeek: strings.ToLower(strings.TrimSpace(in.DayOfWeek)),
|
||||||
StartTime: strings.TrimSpace(in.StartTime),
|
// StartTime: strings.TrimSpace(in.StartTime),
|
||||||
EndTime: strings.TrimSpace(in.EndTime),
|
// EndTime: strings.TrimSpace(in.EndTime),
|
||||||
StartDate: in.StartDate,
|
// StartDate: in.StartDate,
|
||||||
EndDate: in.EndDate,
|
// EndDate: in.EndDate,
|
||||||
Location: strings.TrimSpace(in.Location),
|
// Location: strings.TrimSpace(in.Location),
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, AddActivityOutput{}, err
|
// return nil, AddActivityOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, AddActivityOutput{Activity: activity}, nil
|
// return nil, AddActivityOutput{Activity: activity}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// get_week_schedule
|
// // get_week_schedule
|
||||||
|
|
||||||
type GetWeekScheduleInput struct {
|
// type GetWeekScheduleInput struct {
|
||||||
WeekStart time.Time `json:"week_start" jsonschema:"start of the week (Monday) to retrieve"`
|
// WeekStart time.Time `json:"week_start" jsonschema:"start of the week (Monday) to retrieve"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type GetWeekScheduleOutput struct {
|
// type GetWeekScheduleOutput struct {
|
||||||
Activities []ext.Activity `json:"activities"`
|
// Activities []ext.Activity `json:"activities"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CalendarTool) GetWeekSchedule(ctx context.Context, _ *mcp.CallToolRequest, in GetWeekScheduleInput) (*mcp.CallToolResult, GetWeekScheduleOutput, error) {
|
// func (t *CalendarTool) GetWeekSchedule(ctx context.Context, _ *mcp.CallToolRequest, in GetWeekScheduleInput) (*mcp.CallToolResult, GetWeekScheduleOutput, error) {
|
||||||
activities, err := t.store.GetWeekSchedule(ctx, in.WeekStart)
|
// activities, err := t.store.GetWeekSchedule(ctx, in.WeekStart)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, GetWeekScheduleOutput{}, err
|
// return nil, GetWeekScheduleOutput{}, err
|
||||||
}
|
// }
|
||||||
if activities == nil {
|
// if activities == nil {
|
||||||
activities = []ext.Activity{}
|
// activities = []ext.Activity{}
|
||||||
}
|
// }
|
||||||
return nil, GetWeekScheduleOutput{Activities: activities}, nil
|
// return nil, GetWeekScheduleOutput{Activities: activities}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// search_activities
|
// // search_activities
|
||||||
|
|
||||||
type SearchActivitiesInput struct {
|
// type SearchActivitiesInput struct {
|
||||||
Query string `json:"query,omitempty" jsonschema:"search text matching title or notes"`
|
// Query string `json:"query,omitempty" jsonschema:"search text matching title or notes"`
|
||||||
ActivityType string `json:"activity_type,omitempty" jsonschema:"filter by type"`
|
// ActivityType string `json:"activity_type,omitempty" jsonschema:"filter by type"`
|
||||||
FamilyMemberID *uuid.UUID `json:"family_member_id,omitempty" jsonschema:"filter by family member"`
|
// FamilyMemberID *uuid.UUID `json:"family_member_id,omitempty" jsonschema:"filter by family member"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type SearchActivitiesOutput struct {
|
// type SearchActivitiesOutput struct {
|
||||||
Activities []ext.Activity `json:"activities"`
|
// Activities []ext.Activity `json:"activities"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CalendarTool) SearchActivities(ctx context.Context, _ *mcp.CallToolRequest, in SearchActivitiesInput) (*mcp.CallToolResult, SearchActivitiesOutput, error) {
|
// func (t *CalendarTool) SearchActivities(ctx context.Context, _ *mcp.CallToolRequest, in SearchActivitiesInput) (*mcp.CallToolResult, SearchActivitiesOutput, error) {
|
||||||
activities, err := t.store.SearchActivities(ctx, in.Query, in.ActivityType, in.FamilyMemberID)
|
// activities, err := t.store.SearchActivities(ctx, in.Query, in.ActivityType, in.FamilyMemberID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, SearchActivitiesOutput{}, err
|
// return nil, SearchActivitiesOutput{}, err
|
||||||
}
|
// }
|
||||||
if activities == nil {
|
// if activities == nil {
|
||||||
activities = []ext.Activity{}
|
// activities = []ext.Activity{}
|
||||||
}
|
// }
|
||||||
return nil, SearchActivitiesOutput{Activities: activities}, nil
|
// return nil, SearchActivitiesOutput{Activities: activities}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// add_important_date
|
// // add_important_date
|
||||||
|
|
||||||
type AddImportantDateInput struct {
|
// type AddImportantDateInput struct {
|
||||||
Title string `json:"title" jsonschema:"description of the date"`
|
// Title string `json:"title" jsonschema:"description of the date"`
|
||||||
DateValue time.Time `json:"date_value" jsonschema:"the date"`
|
// DateValue time.Time `json:"date_value" jsonschema:"the date"`
|
||||||
FamilyMemberID *uuid.UUID `json:"family_member_id,omitempty"`
|
// FamilyMemberID *uuid.UUID `json:"family_member_id,omitempty"`
|
||||||
RecurringYearly bool `json:"recurring_yearly,omitempty" jsonschema:"if true, reminds every year"`
|
// RecurringYearly bool `json:"recurring_yearly,omitempty" jsonschema:"if true, reminds every year"`
|
||||||
ReminderDaysBefore int `json:"reminder_days_before,omitempty" jsonschema:"how many days before to remind (default: 7)"`
|
// ReminderDaysBefore int `json:"reminder_days_before,omitempty" jsonschema:"how many days before to remind (default: 7)"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AddImportantDateOutput struct {
|
// type AddImportantDateOutput struct {
|
||||||
Date ext.ImportantDate `json:"date"`
|
// Date ext.ImportantDate `json:"date"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CalendarTool) AddImportantDate(ctx context.Context, _ *mcp.CallToolRequest, in AddImportantDateInput) (*mcp.CallToolResult, AddImportantDateOutput, error) {
|
// func (t *CalendarTool) AddImportantDate(ctx context.Context, _ *mcp.CallToolRequest, in AddImportantDateInput) (*mcp.CallToolResult, AddImportantDateOutput, error) {
|
||||||
if strings.TrimSpace(in.Title) == "" {
|
// if strings.TrimSpace(in.Title) == "" {
|
||||||
return nil, AddImportantDateOutput{}, errRequiredField("title")
|
// return nil, AddImportantDateOutput{}, errRequiredField("title")
|
||||||
}
|
// }
|
||||||
reminder := in.ReminderDaysBefore
|
// reminder := in.ReminderDaysBefore
|
||||||
if reminder <= 0 {
|
// if reminder <= 0 {
|
||||||
reminder = 7
|
// reminder = 7
|
||||||
}
|
// }
|
||||||
d, err := t.store.AddImportantDate(ctx, ext.ImportantDate{
|
// d, err := t.store.AddImportantDate(ctx, ext.ImportantDate{
|
||||||
FamilyMemberID: in.FamilyMemberID,
|
// FamilyMemberID: in.FamilyMemberID,
|
||||||
Title: strings.TrimSpace(in.Title),
|
// Title: strings.TrimSpace(in.Title),
|
||||||
DateValue: in.DateValue,
|
// DateValue: in.DateValue,
|
||||||
RecurringYearly: in.RecurringYearly,
|
// RecurringYearly: in.RecurringYearly,
|
||||||
ReminderDaysBefore: reminder,
|
// ReminderDaysBefore: reminder,
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, AddImportantDateOutput{}, err
|
// return nil, AddImportantDateOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, AddImportantDateOutput{Date: d}, nil
|
// return nil, AddImportantDateOutput{Date: d}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// get_upcoming_dates
|
// // get_upcoming_dates
|
||||||
|
|
||||||
type GetUpcomingDatesInput struct {
|
// type GetUpcomingDatesInput struct {
|
||||||
DaysAhead int `json:"days_ahead,omitempty" jsonschema:"how many days to look ahead (default: 30)"`
|
// DaysAhead int `json:"days_ahead,omitempty" jsonschema:"how many days to look ahead (default: 30)"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type GetUpcomingDatesOutput struct {
|
// type GetUpcomingDatesOutput struct {
|
||||||
Dates []ext.ImportantDate `json:"dates"`
|
// Dates []ext.ImportantDate `json:"dates"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CalendarTool) GetUpcomingDates(ctx context.Context, _ *mcp.CallToolRequest, in GetUpcomingDatesInput) (*mcp.CallToolResult, GetUpcomingDatesOutput, error) {
|
// func (t *CalendarTool) GetUpcomingDates(ctx context.Context, _ *mcp.CallToolRequest, in GetUpcomingDatesInput) (*mcp.CallToolResult, GetUpcomingDatesOutput, error) {
|
||||||
dates, err := t.store.GetUpcomingDates(ctx, in.DaysAhead)
|
// dates, err := t.store.GetUpcomingDates(ctx, in.DaysAhead)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, GetUpcomingDatesOutput{}, err
|
// return nil, GetUpcomingDatesOutput{}, err
|
||||||
}
|
// }
|
||||||
if dates == nil {
|
// if dates == nil {
|
||||||
dates = []ext.ImportantDate{}
|
// dates = []ext.ImportantDate{}
|
||||||
}
|
// }
|
||||||
return nil, GetUpcomingDatesOutput{Dates: dates}, nil
|
// return nil, GetUpcomingDatesOutput{Dates: dates}, nil
|
||||||
}
|
// }
|
||||||
|
|||||||
@@ -2,9 +2,7 @@ package tools
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"log/slog"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
@@ -22,15 +20,19 @@ type EmbeddingQueuer interface {
|
|||||||
QueueThought(ctx context.Context, id uuid.UUID, content string)
|
QueueThought(ctx context.Context, id uuid.UUID, content string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MetadataQueuer queues a thought for background metadata retry. Both
|
||||||
|
// MetadataRetryer and EnrichmentRetryer satisfy this.
|
||||||
|
type MetadataQueuer interface {
|
||||||
|
QueueThought(id uuid.UUID)
|
||||||
|
}
|
||||||
|
|
||||||
type CaptureTool struct {
|
type CaptureTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
capture config.CaptureConfig
|
capture config.CaptureConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
metadataTimeout time.Duration
|
retryer MetadataQueuer
|
||||||
retryer *MetadataRetryer
|
embedRetryer EmbeddingQueuer
|
||||||
embedRetryer EmbeddingQueuer
|
|
||||||
log *slog.Logger
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type CaptureInput struct {
|
type CaptureInput struct {
|
||||||
@@ -42,8 +44,8 @@ type CaptureOutput struct {
|
|||||||
Thought thoughttypes.Thought `json:"thought"`
|
Thought thoughttypes.Thought `json:"thought"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCaptureTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer *MetadataRetryer, embedRetryer EmbeddingQueuer, log *slog.Logger) *CaptureTool {
|
func NewCaptureTool(db *store.DB, embeddings *ai.EmbeddingRunner, capture config.CaptureConfig, sessions *session.ActiveProjects, retryer MetadataQueuer, embedRetryer EmbeddingQueuer) *CaptureTool {
|
||||||
return &CaptureTool{store: db, provider: provider, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, embedRetryer: embedRetryer, log: log}
|
return &CaptureTool{store: db, embeddings: embeddings, capture: capture, sessions: sessions, retryer: retryer, embedRetryer: embedRetryer}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) {
|
func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) {
|
||||||
@@ -58,6 +60,7 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C
|
|||||||
}
|
}
|
||||||
|
|
||||||
rawMetadata := metadata.Fallback(t.capture)
|
rawMetadata := metadata.Fallback(t.capture)
|
||||||
|
rawMetadata.MetadataStatus = metadata.MetadataStatusPending
|
||||||
thought := thoughttypes.Thought{
|
thought := thoughttypes.Thought{
|
||||||
Content: content,
|
Content: content,
|
||||||
Metadata: rawMetadata,
|
Metadata: rawMetadata,
|
||||||
@@ -66,7 +69,7 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C
|
|||||||
thought.ProjectID = &project.ID
|
thought.ProjectID = &project.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
created, err := t.store.InsertThought(ctx, thought, t.provider.EmbeddingModel())
|
created, err := t.store.InsertThought(ctx, thought, t.embeddings.PrimaryModel())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, CaptureOutput{}, err
|
return nil, CaptureOutput{}, err
|
||||||
}
|
}
|
||||||
@@ -74,56 +77,12 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C
|
|||||||
_ = t.store.TouchProject(ctx, project.ID)
|
_ = t.store.TouchProject(ctx, project.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.retryer != nil || t.embedRetryer != nil {
|
if t.retryer != nil {
|
||||||
t.launchEnrichment(created.ID, content)
|
t.retryer.QueueThought(created.ID)
|
||||||
|
}
|
||||||
|
if t.embedRetryer != nil {
|
||||||
|
t.embedRetryer.QueueThought(ctx, created.ID, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, CaptureOutput{Thought: created}, nil
|
return nil, CaptureOutput{Thought: created}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *CaptureTool) launchEnrichment(id uuid.UUID, content string) {
|
|
||||||
go func() {
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
if t.retryer != nil {
|
|
||||||
attemptedAt := time.Now().UTC()
|
|
||||||
rawMetadata := metadata.Fallback(t.capture)
|
|
||||||
extracted, err := t.provider.ExtractMetadata(ctx, content)
|
|
||||||
if err != nil {
|
|
||||||
failed := metadata.MarkMetadataFailed(rawMetadata, t.capture, attemptedAt, err)
|
|
||||||
if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, failed); updateErr != nil {
|
|
||||||
t.log.Warn("deferred metadata failure could not be persisted",
|
|
||||||
slog.String("thought_id", id.String()),
|
|
||||||
slog.String("error", updateErr.Error()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
t.log.Warn("deferred metadata extraction failed",
|
|
||||||
slog.String("thought_id", id.String()),
|
|
||||||
slog.String("provider", t.provider.Name()),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
)
|
|
||||||
t.retryer.QueueThought(id)
|
|
||||||
} else {
|
|
||||||
completed := metadata.MarkMetadataComplete(extracted, t.capture, attemptedAt)
|
|
||||||
if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, completed); updateErr != nil {
|
|
||||||
t.log.Warn("deferred metadata completion could not be persisted",
|
|
||||||
slog.String("thought_id", id.String()),
|
|
||||||
slog.String("error", updateErr.Error()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.embedRetryer != nil {
|
|
||||||
if _, err := t.provider.Embed(ctx, content); err != nil {
|
|
||||||
t.log.Warn("deferred embedding failed",
|
|
||||||
slog.String("thought_id", id.String()),
|
|
||||||
slog.String("provider", t.provider.Name()),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
t.embedRetryer.QueueThought(ctx, id, content)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -15,10 +15,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type ContextTool struct {
|
type ContextTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
search config.SearchConfig
|
search config.SearchConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
}
|
}
|
||||||
|
|
||||||
type ProjectContextInput struct {
|
type ProjectContextInput struct {
|
||||||
@@ -41,8 +41,8 @@ type ProjectContextOutput struct {
|
|||||||
Items []ContextItem `json:"items"`
|
Items []ContextItem `json:"items"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewContextTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *ContextTool {
|
func NewContextTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig, sessions *session.ActiveProjects) *ContextTool {
|
||||||
return &ContextTool{store: db, provider: provider, search: search, sessions: sessions}
|
return &ContextTool{store: db, embeddings: embeddings, search: search, sessions: sessions}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ProjectContextInput) (*mcp.CallToolResult, ProjectContextOutput, error) {
|
func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ProjectContextInput) (*mcp.CallToolResult, ProjectContextOutput, error) {
|
||||||
@@ -72,7 +72,7 @@ func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in P
|
|||||||
|
|
||||||
query := strings.TrimSpace(in.Query)
|
query := strings.TrimSpace(in.Query)
|
||||||
if query != "" {
|
if query != "" {
|
||||||
semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, t.search.DefaultThreshold, &project.ID, nil)
|
semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, t.search.DefaultThreshold, &project.ID, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, ProjectContextOutput{}, err
|
return nil, ProjectContextOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,240 +1,240 @@
|
|||||||
package tools
|
package tools
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"errors"
|
// "errors"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v5"
|
// "github.com/jackc/pgx/v5"
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
// "github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/store"
|
// "git.warky.dev/wdevs/amcs/internal/store"
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
// )
|
||||||
|
|
||||||
type CRMTool struct {
|
// type CRMTool struct {
|
||||||
store *store.DB
|
// store *store.DB
|
||||||
}
|
// }
|
||||||
|
|
||||||
func NewCRMTool(db *store.DB) *CRMTool {
|
// func NewCRMTool(db *store.DB) *CRMTool {
|
||||||
return &CRMTool{store: db}
|
// return &CRMTool{store: db}
|
||||||
}
|
// }
|
||||||
|
|
||||||
// add_professional_contact
|
// // add_professional_contact
|
||||||
|
|
||||||
type AddContactInput struct {
|
// type AddContactInput struct {
|
||||||
Name string `json:"name" jsonschema:"contact's full name"`
|
// Name string `json:"name" jsonschema:"contact's full name"`
|
||||||
Company string `json:"company,omitempty"`
|
// Company string `json:"company,omitempty"`
|
||||||
Title string `json:"title,omitempty" jsonschema:"job title"`
|
// Title string `json:"title,omitempty" jsonschema:"job title"`
|
||||||
Email string `json:"email,omitempty"`
|
// Email string `json:"email,omitempty"`
|
||||||
Phone string `json:"phone,omitempty"`
|
// Phone string `json:"phone,omitempty"`
|
||||||
LinkedInURL string `json:"linkedin_url,omitempty"`
|
// LinkedInURL string `json:"linkedin_url,omitempty"`
|
||||||
HowWeMet string `json:"how_we_met,omitempty"`
|
// HowWeMet string `json:"how_we_met,omitempty"`
|
||||||
Tags []string `json:"tags,omitempty"`
|
// Tags []string `json:"tags,omitempty"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
FollowUpDate *time.Time `json:"follow_up_date,omitempty"`
|
// FollowUpDate *time.Time `json:"follow_up_date,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AddContactOutput struct {
|
// type AddContactOutput struct {
|
||||||
Contact ext.ProfessionalContact `json:"contact"`
|
// Contact ext.ProfessionalContact `json:"contact"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CRMTool) AddContact(ctx context.Context, _ *mcp.CallToolRequest, in AddContactInput) (*mcp.CallToolResult, AddContactOutput, error) {
|
// func (t *CRMTool) AddContact(ctx context.Context, _ *mcp.CallToolRequest, in AddContactInput) (*mcp.CallToolResult, AddContactOutput, error) {
|
||||||
if strings.TrimSpace(in.Name) == "" {
|
// if strings.TrimSpace(in.Name) == "" {
|
||||||
return nil, AddContactOutput{}, errRequiredField("name")
|
// return nil, AddContactOutput{}, errRequiredField("name")
|
||||||
}
|
// }
|
||||||
if in.Tags == nil {
|
// if in.Tags == nil {
|
||||||
in.Tags = []string{}
|
// in.Tags = []string{}
|
||||||
}
|
// }
|
||||||
contact, err := t.store.AddProfessionalContact(ctx, ext.ProfessionalContact{
|
// contact, err := t.store.AddProfessionalContact(ctx, ext.ProfessionalContact{
|
||||||
Name: strings.TrimSpace(in.Name),
|
// Name: strings.TrimSpace(in.Name),
|
||||||
Company: strings.TrimSpace(in.Company),
|
// Company: strings.TrimSpace(in.Company),
|
||||||
Title: strings.TrimSpace(in.Title),
|
// Title: strings.TrimSpace(in.Title),
|
||||||
Email: strings.TrimSpace(in.Email),
|
// Email: strings.TrimSpace(in.Email),
|
||||||
Phone: strings.TrimSpace(in.Phone),
|
// Phone: strings.TrimSpace(in.Phone),
|
||||||
LinkedInURL: strings.TrimSpace(in.LinkedInURL),
|
// LinkedInURL: strings.TrimSpace(in.LinkedInURL),
|
||||||
HowWeMet: strings.TrimSpace(in.HowWeMet),
|
// HowWeMet: strings.TrimSpace(in.HowWeMet),
|
||||||
Tags: in.Tags,
|
// Tags: in.Tags,
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
FollowUpDate: in.FollowUpDate,
|
// FollowUpDate: in.FollowUpDate,
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, AddContactOutput{}, err
|
// return nil, AddContactOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, AddContactOutput{Contact: contact}, nil
|
// return nil, AddContactOutput{Contact: contact}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// search_contacts
|
// // search_contacts
|
||||||
|
|
||||||
type SearchContactsInput struct {
|
// type SearchContactsInput struct {
|
||||||
Query string `json:"query,omitempty" jsonschema:"search text matching name, company, title, or notes"`
|
// Query string `json:"query,omitempty" jsonschema:"search text matching name, company, title, or notes"`
|
||||||
Tags []string `json:"tags,omitempty" jsonschema:"filter by tags (all must match)"`
|
// Tags []string `json:"tags,omitempty" jsonschema:"filter by tags (all must match)"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type SearchContactsOutput struct {
|
// type SearchContactsOutput struct {
|
||||||
Contacts []ext.ProfessionalContact `json:"contacts"`
|
// Contacts []ext.ProfessionalContact `json:"contacts"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CRMTool) SearchContacts(ctx context.Context, _ *mcp.CallToolRequest, in SearchContactsInput) (*mcp.CallToolResult, SearchContactsOutput, error) {
|
// func (t *CRMTool) SearchContacts(ctx context.Context, _ *mcp.CallToolRequest, in SearchContactsInput) (*mcp.CallToolResult, SearchContactsOutput, error) {
|
||||||
contacts, err := t.store.SearchContacts(ctx, in.Query, in.Tags)
|
// contacts, err := t.store.SearchContacts(ctx, in.Query, in.Tags)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, SearchContactsOutput{}, err
|
// return nil, SearchContactsOutput{}, err
|
||||||
}
|
// }
|
||||||
if contacts == nil {
|
// if contacts == nil {
|
||||||
contacts = []ext.ProfessionalContact{}
|
// contacts = []ext.ProfessionalContact{}
|
||||||
}
|
// }
|
||||||
return nil, SearchContactsOutput{Contacts: contacts}, nil
|
// return nil, SearchContactsOutput{Contacts: contacts}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// log_interaction
|
// // log_interaction
|
||||||
|
|
||||||
type LogInteractionInput struct {
|
// type LogInteractionInput struct {
|
||||||
ContactID uuid.UUID `json:"contact_id" jsonschema:"id of the contact"`
|
// ContactID uuid.UUID `json:"contact_id" jsonschema:"id of the contact"`
|
||||||
InteractionType string `json:"interaction_type" jsonschema:"one of: meeting, email, call, coffee, event, linkedin, other"`
|
// InteractionType string `json:"interaction_type" jsonschema:"one of: meeting, email, call, coffee, event, linkedin, other"`
|
||||||
OccurredAt *time.Time `json:"occurred_at,omitempty" jsonschema:"when it happened (defaults to now)"`
|
// OccurredAt *time.Time `json:"occurred_at,omitempty" jsonschema:"when it happened (defaults to now)"`
|
||||||
Summary string `json:"summary" jsonschema:"summary of the interaction"`
|
// Summary string `json:"summary" jsonschema:"summary of the interaction"`
|
||||||
FollowUpNeeded bool `json:"follow_up_needed,omitempty"`
|
// FollowUpNeeded bool `json:"follow_up_needed,omitempty"`
|
||||||
FollowUpNotes string `json:"follow_up_notes,omitempty"`
|
// FollowUpNotes string `json:"follow_up_notes,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type LogInteractionOutput struct {
|
// type LogInteractionOutput struct {
|
||||||
Interaction ext.ContactInteraction `json:"interaction"`
|
// Interaction ext.ContactInteraction `json:"interaction"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CRMTool) LogInteraction(ctx context.Context, _ *mcp.CallToolRequest, in LogInteractionInput) (*mcp.CallToolResult, LogInteractionOutput, error) {
|
// func (t *CRMTool) LogInteraction(ctx context.Context, _ *mcp.CallToolRequest, in LogInteractionInput) (*mcp.CallToolResult, LogInteractionOutput, error) {
|
||||||
if strings.TrimSpace(in.Summary) == "" {
|
// if strings.TrimSpace(in.Summary) == "" {
|
||||||
return nil, LogInteractionOutput{}, errRequiredField("summary")
|
// return nil, LogInteractionOutput{}, errRequiredField("summary")
|
||||||
}
|
// }
|
||||||
occurredAt := time.Now()
|
// occurredAt := time.Now()
|
||||||
if in.OccurredAt != nil {
|
// if in.OccurredAt != nil {
|
||||||
occurredAt = *in.OccurredAt
|
// occurredAt = *in.OccurredAt
|
||||||
}
|
// }
|
||||||
interaction, err := t.store.LogInteraction(ctx, ext.ContactInteraction{
|
// interaction, err := t.store.LogInteraction(ctx, ext.ContactInteraction{
|
||||||
ContactID: in.ContactID,
|
// ContactID: in.ContactID,
|
||||||
InteractionType: in.InteractionType,
|
// InteractionType: in.InteractionType,
|
||||||
OccurredAt: occurredAt,
|
// OccurredAt: occurredAt,
|
||||||
Summary: strings.TrimSpace(in.Summary),
|
// Summary: strings.TrimSpace(in.Summary),
|
||||||
FollowUpNeeded: in.FollowUpNeeded,
|
// FollowUpNeeded: in.FollowUpNeeded,
|
||||||
FollowUpNotes: strings.TrimSpace(in.FollowUpNotes),
|
// FollowUpNotes: strings.TrimSpace(in.FollowUpNotes),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, LogInteractionOutput{}, err
|
// return nil, LogInteractionOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, LogInteractionOutput{Interaction: interaction}, nil
|
// return nil, LogInteractionOutput{Interaction: interaction}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// get_contact_history
|
// // get_contact_history
|
||||||
|
|
||||||
type GetContactHistoryInput struct {
|
// type GetContactHistoryInput struct {
|
||||||
ContactID uuid.UUID `json:"contact_id" jsonschema:"id of the contact"`
|
// ContactID uuid.UUID `json:"contact_id" jsonschema:"id of the contact"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type GetContactHistoryOutput struct {
|
// type GetContactHistoryOutput struct {
|
||||||
History ext.ContactHistory `json:"history"`
|
// History ext.ContactHistory `json:"history"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CRMTool) GetHistory(ctx context.Context, _ *mcp.CallToolRequest, in GetContactHistoryInput) (*mcp.CallToolResult, GetContactHistoryOutput, error) {
|
// func (t *CRMTool) GetHistory(ctx context.Context, _ *mcp.CallToolRequest, in GetContactHistoryInput) (*mcp.CallToolResult, GetContactHistoryOutput, error) {
|
||||||
history, err := t.store.GetContactHistory(ctx, in.ContactID)
|
// history, err := t.store.GetContactHistory(ctx, in.ContactID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, GetContactHistoryOutput{}, err
|
// return nil, GetContactHistoryOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, GetContactHistoryOutput{History: history}, nil
|
// return nil, GetContactHistoryOutput{History: history}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// create_opportunity
|
// // create_opportunity
|
||||||
|
|
||||||
type CreateOpportunityInput struct {
|
// type CreateOpportunityInput struct {
|
||||||
ContactID *uuid.UUID `json:"contact_id,omitempty"`
|
// ContactID *uuid.UUID `json:"contact_id,omitempty"`
|
||||||
Title string `json:"title" jsonschema:"opportunity title"`
|
// Title string `json:"title" jsonschema:"opportunity title"`
|
||||||
Description string `json:"description,omitempty"`
|
// Description string `json:"description,omitempty"`
|
||||||
Stage string `json:"stage,omitempty" jsonschema:"one of: identified, in_conversation, proposal, negotiation, won, lost (default: identified)"`
|
// Stage string `json:"stage,omitempty" jsonschema:"one of: identified, in_conversation, proposal, negotiation, won, lost (default: identified)"`
|
||||||
Value *float64 `json:"value,omitempty" jsonschema:"monetary value"`
|
// Value *float64 `json:"value,omitempty" jsonschema:"monetary value"`
|
||||||
ExpectedCloseDate *time.Time `json:"expected_close_date,omitempty"`
|
// ExpectedCloseDate *time.Time `json:"expected_close_date,omitempty"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type CreateOpportunityOutput struct {
|
// type CreateOpportunityOutput struct {
|
||||||
Opportunity ext.Opportunity `json:"opportunity"`
|
// Opportunity ext.Opportunity `json:"opportunity"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CRMTool) CreateOpportunity(ctx context.Context, _ *mcp.CallToolRequest, in CreateOpportunityInput) (*mcp.CallToolResult, CreateOpportunityOutput, error) {
|
// func (t *CRMTool) CreateOpportunity(ctx context.Context, _ *mcp.CallToolRequest, in CreateOpportunityInput) (*mcp.CallToolResult, CreateOpportunityOutput, error) {
|
||||||
if strings.TrimSpace(in.Title) == "" {
|
// if strings.TrimSpace(in.Title) == "" {
|
||||||
return nil, CreateOpportunityOutput{}, errRequiredField("title")
|
// return nil, CreateOpportunityOutput{}, errRequiredField("title")
|
||||||
}
|
// }
|
||||||
stage := strings.TrimSpace(in.Stage)
|
// stage := strings.TrimSpace(in.Stage)
|
||||||
if stage == "" {
|
// if stage == "" {
|
||||||
stage = "identified"
|
// stage = "identified"
|
||||||
}
|
// }
|
||||||
opp, err := t.store.CreateOpportunity(ctx, ext.Opportunity{
|
// opp, err := t.store.CreateOpportunity(ctx, ext.Opportunity{
|
||||||
ContactID: in.ContactID,
|
// ContactID: in.ContactID,
|
||||||
Title: strings.TrimSpace(in.Title),
|
// Title: strings.TrimSpace(in.Title),
|
||||||
Description: strings.TrimSpace(in.Description),
|
// Description: strings.TrimSpace(in.Description),
|
||||||
Stage: stage,
|
// Stage: stage,
|
||||||
Value: in.Value,
|
// Value: in.Value,
|
||||||
ExpectedCloseDate: in.ExpectedCloseDate,
|
// ExpectedCloseDate: in.ExpectedCloseDate,
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, CreateOpportunityOutput{}, err
|
// return nil, CreateOpportunityOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, CreateOpportunityOutput{Opportunity: opp}, nil
|
// return nil, CreateOpportunityOutput{Opportunity: opp}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// get_follow_ups_due
|
// // get_follow_ups_due
|
||||||
|
|
||||||
type GetFollowUpsDueInput struct {
|
// type GetFollowUpsDueInput struct {
|
||||||
DaysAhead int `json:"days_ahead,omitempty" jsonschema:"look ahead window in days (default: 7)"`
|
// DaysAhead int `json:"days_ahead,omitempty" jsonschema:"look ahead window in days (default: 7)"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type GetFollowUpsDueOutput struct {
|
// type GetFollowUpsDueOutput struct {
|
||||||
Contacts []ext.ProfessionalContact `json:"contacts"`
|
// Contacts []ext.ProfessionalContact `json:"contacts"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CRMTool) GetFollowUpsDue(ctx context.Context, _ *mcp.CallToolRequest, in GetFollowUpsDueInput) (*mcp.CallToolResult, GetFollowUpsDueOutput, error) {
|
// func (t *CRMTool) GetFollowUpsDue(ctx context.Context, _ *mcp.CallToolRequest, in GetFollowUpsDueInput) (*mcp.CallToolResult, GetFollowUpsDueOutput, error) {
|
||||||
contacts, err := t.store.GetFollowUpsDue(ctx, in.DaysAhead)
|
// contacts, err := t.store.GetFollowUpsDue(ctx, in.DaysAhead)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, GetFollowUpsDueOutput{}, err
|
// return nil, GetFollowUpsDueOutput{}, err
|
||||||
}
|
// }
|
||||||
if contacts == nil {
|
// if contacts == nil {
|
||||||
contacts = []ext.ProfessionalContact{}
|
// contacts = []ext.ProfessionalContact{}
|
||||||
}
|
// }
|
||||||
return nil, GetFollowUpsDueOutput{Contacts: contacts}, nil
|
// return nil, GetFollowUpsDueOutput{Contacts: contacts}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// link_thought_to_contact
|
// // link_thought_to_contact
|
||||||
|
|
||||||
type LinkThoughtToContactInput struct {
|
// type LinkThoughtToContactInput struct {
|
||||||
ContactID uuid.UUID `json:"contact_id" jsonschema:"id of the contact"`
|
// ContactID uuid.UUID `json:"contact_id" jsonschema:"id of the contact"`
|
||||||
ThoughtID uuid.UUID `json:"thought_id" jsonschema:"id of the thought to link"`
|
// ThoughtID uuid.UUID `json:"thought_id" jsonschema:"id of the thought to link"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type LinkThoughtToContactOutput struct {
|
// type LinkThoughtToContactOutput struct {
|
||||||
Contact ext.ProfessionalContact `json:"contact"`
|
// Contact ext.ProfessionalContact `json:"contact"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *CRMTool) LinkThought(ctx context.Context, _ *mcp.CallToolRequest, in LinkThoughtToContactInput) (*mcp.CallToolResult, LinkThoughtToContactOutput, error) {
|
// func (t *CRMTool) LinkThought(ctx context.Context, _ *mcp.CallToolRequest, in LinkThoughtToContactInput) (*mcp.CallToolResult, LinkThoughtToContactOutput, error) {
|
||||||
thought, err := t.store.GetThought(ctx, in.ThoughtID)
|
// thought, err := t.store.GetThought(ctx, in.ThoughtID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
if errors.Is(err, pgx.ErrNoRows) {
|
// if errors.Is(err, pgx.ErrNoRows) {
|
||||||
return nil, LinkThoughtToContactOutput{}, errEntityNotFound("thought", "thought_id", in.ThoughtID.String())
|
// return nil, LinkThoughtToContactOutput{}, errEntityNotFound("thought", "thought_id", in.ThoughtID.String())
|
||||||
}
|
// }
|
||||||
return nil, LinkThoughtToContactOutput{}, err
|
// return nil, LinkThoughtToContactOutput{}, err
|
||||||
}
|
// }
|
||||||
|
|
||||||
appendText := fmt.Sprintf("\n\n[Linked thought %s]: %s", thought.ID, thought.Content)
|
// appendText := fmt.Sprintf("\n\n[Linked thought %s]: %s", thought.ID, thought.Content)
|
||||||
if err := t.store.AppendThoughtToContactNotes(ctx, in.ContactID, appendText); err != nil {
|
// if err := t.store.AppendThoughtToContactNotes(ctx, in.ContactID, appendText); err != nil {
|
||||||
return nil, LinkThoughtToContactOutput{}, err
|
// return nil, LinkThoughtToContactOutput{}, err
|
||||||
}
|
// }
|
||||||
|
|
||||||
contact, err := t.store.GetContact(ctx, in.ContactID)
|
// contact, err := t.store.GetContact(ctx, in.ContactID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
if errors.Is(err, pgx.ErrNoRows) {
|
// if errors.Is(err, pgx.ErrNoRows) {
|
||||||
return nil, LinkThoughtToContactOutput{}, errEntityNotFound("contact", "contact_id", in.ContactID.String())
|
// return nil, LinkThoughtToContactOutput{}, errEntityNotFound("contact", "contact_id", in.ContactID.String())
|
||||||
}
|
// }
|
||||||
return nil, LinkThoughtToContactOutput{}, err
|
// return nil, LinkThoughtToContactOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, LinkThoughtToContactOutput{Contact: contact}, nil
|
// return nil, LinkThoughtToContactOutput{Contact: contact}, nil
|
||||||
}
|
// }
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ var enrichmentRetryBackoff = []time.Duration{
|
|||||||
type EnrichmentRetryer struct {
|
type EnrichmentRetryer struct {
|
||||||
backgroundCtx context.Context
|
backgroundCtx context.Context
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
metadata *ai.MetadataRunner
|
||||||
capture config.CaptureConfig
|
capture config.CaptureConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
metadataTimeout time.Duration
|
metadataTimeout time.Duration
|
||||||
@@ -66,14 +66,14 @@ type RetryEnrichmentOutput struct {
|
|||||||
Failures []RetryEnrichmentFailure `json:"failures,omitempty"`
|
Failures []RetryEnrichmentFailure `json:"failures,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewEnrichmentRetryer(backgroundCtx context.Context, db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *EnrichmentRetryer {
|
func NewEnrichmentRetryer(backgroundCtx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *EnrichmentRetryer {
|
||||||
if backgroundCtx == nil {
|
if backgroundCtx == nil {
|
||||||
backgroundCtx = context.Background()
|
backgroundCtx = context.Background()
|
||||||
}
|
}
|
||||||
return &EnrichmentRetryer{
|
return &EnrichmentRetryer{
|
||||||
backgroundCtx: backgroundCtx,
|
backgroundCtx: backgroundCtx,
|
||||||
store: db,
|
store: db,
|
||||||
provider: provider,
|
metadata: metadataRunner,
|
||||||
capture: capture,
|
capture: capture,
|
||||||
sessions: sessions,
|
sessions: sessions,
|
||||||
metadataTimeout: metadataTimeout,
|
metadataTimeout: metadataTimeout,
|
||||||
@@ -91,12 +91,30 @@ func (t *RetryEnrichmentTool) Handle(ctx context.Context, req *mcp.CallToolReque
|
|||||||
|
|
||||||
func (r *EnrichmentRetryer) QueueThought(id uuid.UUID) {
|
func (r *EnrichmentRetryer) QueueThought(id uuid.UUID) {
|
||||||
go func() {
|
go func() {
|
||||||
if _, err := r.retryOne(r.backgroundCtx, id); err != nil {
|
started := time.Now()
|
||||||
r.logger.Warn("background metadata retry failed",
|
r.logger.Info("background metadata started",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
)
|
||||||
|
updated, err := r.retryOne(r.backgroundCtx, id)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Warn("background metadata error",
|
||||||
slog.String("thought_id", id.String()),
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
slog.String("error", err.Error()),
|
slog.String("error", err.Error()),
|
||||||
)
|
)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
r.logger.Info("background metadata complete",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
slog.Bool("updated", updated),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
)
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -190,7 +208,7 @@ func (r *EnrichmentRetryer) retryOne(ctx context.Context, id uuid.UUID) (bool, e
|
|||||||
}
|
}
|
||||||
|
|
||||||
attemptedAt := time.Now().UTC()
|
attemptedAt := time.Now().UTC()
|
||||||
extracted, extractErr := r.provider.ExtractMetadata(attemptCtx, thought.Content)
|
extracted, extractErr := r.metadata.ExtractMetadata(attemptCtx, thought.Content)
|
||||||
if extractErr != nil {
|
if extractErr != nil {
|
||||||
failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr)
|
failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr)
|
||||||
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil {
|
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil {
|
||||||
|
|||||||
@@ -1,151 +1,151 @@
|
|||||||
package tools
|
package tools
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"strings"
|
// "strings"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
// "github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/store"
|
// "git.warky.dev/wdevs/amcs/internal/store"
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
// )
|
||||||
|
|
||||||
type HouseholdTool struct {
|
// type HouseholdTool struct {
|
||||||
store *store.DB
|
// store *store.DB
|
||||||
}
|
// }
|
||||||
|
|
||||||
func NewHouseholdTool(db *store.DB) *HouseholdTool {
|
// func NewHouseholdTool(db *store.DB) *HouseholdTool {
|
||||||
return &HouseholdTool{store: db}
|
// return &HouseholdTool{store: db}
|
||||||
}
|
// }
|
||||||
|
|
||||||
// add_household_item
|
// // add_household_item
|
||||||
|
|
||||||
type AddHouseholdItemInput struct {
|
// type AddHouseholdItemInput struct {
|
||||||
Name string `json:"name" jsonschema:"name of the item"`
|
// Name string `json:"name" jsonschema:"name of the item"`
|
||||||
Category string `json:"category,omitempty" jsonschema:"category (e.g. paint, appliance, measurement, document)"`
|
// Category string `json:"category,omitempty" jsonschema:"category (e.g. paint, appliance, measurement, document)"`
|
||||||
Location string `json:"location,omitempty" jsonschema:"where in the home this item is"`
|
// Location string `json:"location,omitempty" jsonschema:"where in the home this item is"`
|
||||||
Details map[string]any `json:"details,omitempty" jsonschema:"flexible metadata (model numbers, colors, specs, etc.)"`
|
// Details map[string]any `json:"details,omitempty" jsonschema:"flexible metadata (model numbers, colors, specs, etc.)"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AddHouseholdItemOutput struct {
|
// type AddHouseholdItemOutput struct {
|
||||||
Item ext.HouseholdItem `json:"item"`
|
// Item ext.HouseholdItem `json:"item"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *HouseholdTool) AddItem(ctx context.Context, _ *mcp.CallToolRequest, in AddHouseholdItemInput) (*mcp.CallToolResult, AddHouseholdItemOutput, error) {
|
// func (t *HouseholdTool) AddItem(ctx context.Context, _ *mcp.CallToolRequest, in AddHouseholdItemInput) (*mcp.CallToolResult, AddHouseholdItemOutput, error) {
|
||||||
if strings.TrimSpace(in.Name) == "" {
|
// if strings.TrimSpace(in.Name) == "" {
|
||||||
return nil, AddHouseholdItemOutput{}, errRequiredField("name")
|
// return nil, AddHouseholdItemOutput{}, errRequiredField("name")
|
||||||
}
|
// }
|
||||||
if in.Details == nil {
|
// if in.Details == nil {
|
||||||
in.Details = map[string]any{}
|
// in.Details = map[string]any{}
|
||||||
}
|
// }
|
||||||
item, err := t.store.AddHouseholdItem(ctx, ext.HouseholdItem{
|
// item, err := t.store.AddHouseholdItem(ctx, ext.HouseholdItem{
|
||||||
Name: strings.TrimSpace(in.Name),
|
// Name: strings.TrimSpace(in.Name),
|
||||||
Category: strings.TrimSpace(in.Category),
|
// Category: strings.TrimSpace(in.Category),
|
||||||
Location: strings.TrimSpace(in.Location),
|
// Location: strings.TrimSpace(in.Location),
|
||||||
Details: in.Details,
|
// Details: in.Details,
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, AddHouseholdItemOutput{}, err
|
// return nil, AddHouseholdItemOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, AddHouseholdItemOutput{Item: item}, nil
|
// return nil, AddHouseholdItemOutput{Item: item}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// search_household_items
|
// // search_household_items
|
||||||
|
|
||||||
type SearchHouseholdItemsInput struct {
|
// type SearchHouseholdItemsInput struct {
|
||||||
Query string `json:"query,omitempty" jsonschema:"search text matching name or notes"`
|
// Query string `json:"query,omitempty" jsonschema:"search text matching name or notes"`
|
||||||
Category string `json:"category,omitempty" jsonschema:"filter by category"`
|
// Category string `json:"category,omitempty" jsonschema:"filter by category"`
|
||||||
Location string `json:"location,omitempty" jsonschema:"filter by location"`
|
// Location string `json:"location,omitempty" jsonschema:"filter by location"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type SearchHouseholdItemsOutput struct {
|
// type SearchHouseholdItemsOutput struct {
|
||||||
Items []ext.HouseholdItem `json:"items"`
|
// Items []ext.HouseholdItem `json:"items"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *HouseholdTool) SearchItems(ctx context.Context, _ *mcp.CallToolRequest, in SearchHouseholdItemsInput) (*mcp.CallToolResult, SearchHouseholdItemsOutput, error) {
|
// func (t *HouseholdTool) SearchItems(ctx context.Context, _ *mcp.CallToolRequest, in SearchHouseholdItemsInput) (*mcp.CallToolResult, SearchHouseholdItemsOutput, error) {
|
||||||
items, err := t.store.SearchHouseholdItems(ctx, in.Query, in.Category, in.Location)
|
// items, err := t.store.SearchHouseholdItems(ctx, in.Query, in.Category, in.Location)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, SearchHouseholdItemsOutput{}, err
|
// return nil, SearchHouseholdItemsOutput{}, err
|
||||||
}
|
// }
|
||||||
if items == nil {
|
// if items == nil {
|
||||||
items = []ext.HouseholdItem{}
|
// items = []ext.HouseholdItem{}
|
||||||
}
|
// }
|
||||||
return nil, SearchHouseholdItemsOutput{Items: items}, nil
|
// return nil, SearchHouseholdItemsOutput{Items: items}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// get_household_item
|
// // get_household_item
|
||||||
|
|
||||||
type GetHouseholdItemInput struct {
|
// type GetHouseholdItemInput struct {
|
||||||
ID uuid.UUID `json:"id" jsonschema:"item id"`
|
// ID uuid.UUID `json:"id" jsonschema:"item id"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type GetHouseholdItemOutput struct {
|
// type GetHouseholdItemOutput struct {
|
||||||
Item ext.HouseholdItem `json:"item"`
|
// Item ext.HouseholdItem `json:"item"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *HouseholdTool) GetItem(ctx context.Context, _ *mcp.CallToolRequest, in GetHouseholdItemInput) (*mcp.CallToolResult, GetHouseholdItemOutput, error) {
|
// func (t *HouseholdTool) GetItem(ctx context.Context, _ *mcp.CallToolRequest, in GetHouseholdItemInput) (*mcp.CallToolResult, GetHouseholdItemOutput, error) {
|
||||||
item, err := t.store.GetHouseholdItem(ctx, in.ID)
|
// item, err := t.store.GetHouseholdItem(ctx, in.ID)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, GetHouseholdItemOutput{}, err
|
// return nil, GetHouseholdItemOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, GetHouseholdItemOutput{Item: item}, nil
|
// return nil, GetHouseholdItemOutput{Item: item}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// add_vendor
|
// // add_vendor
|
||||||
|
|
||||||
type AddVendorInput struct {
|
// type AddVendorInput struct {
|
||||||
Name string `json:"name" jsonschema:"vendor name"`
|
// Name string `json:"name" jsonschema:"vendor name"`
|
||||||
ServiceType string `json:"service_type,omitempty" jsonschema:"type of service (e.g. plumber, electrician, landscaper)"`
|
// ServiceType string `json:"service_type,omitempty" jsonschema:"type of service (e.g. plumber, electrician, landscaper)"`
|
||||||
Phone string `json:"phone,omitempty"`
|
// Phone string `json:"phone,omitempty"`
|
||||||
Email string `json:"email,omitempty"`
|
// Email string `json:"email,omitempty"`
|
||||||
Website string `json:"website,omitempty"`
|
// Website string `json:"website,omitempty"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
Rating *int `json:"rating,omitempty" jsonschema:"1-5 rating"`
|
// Rating *int `json:"rating,omitempty" jsonschema:"1-5 rating"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AddVendorOutput struct {
|
// type AddVendorOutput struct {
|
||||||
Vendor ext.HouseholdVendor `json:"vendor"`
|
// Vendor ext.HouseholdVendor `json:"vendor"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *HouseholdTool) AddVendor(ctx context.Context, _ *mcp.CallToolRequest, in AddVendorInput) (*mcp.CallToolResult, AddVendorOutput, error) {
|
// func (t *HouseholdTool) AddVendor(ctx context.Context, _ *mcp.CallToolRequest, in AddVendorInput) (*mcp.CallToolResult, AddVendorOutput, error) {
|
||||||
if strings.TrimSpace(in.Name) == "" {
|
// if strings.TrimSpace(in.Name) == "" {
|
||||||
return nil, AddVendorOutput{}, errRequiredField("name")
|
// return nil, AddVendorOutput{}, errRequiredField("name")
|
||||||
}
|
// }
|
||||||
vendor, err := t.store.AddVendor(ctx, ext.HouseholdVendor{
|
// vendor, err := t.store.AddVendor(ctx, ext.HouseholdVendor{
|
||||||
Name: strings.TrimSpace(in.Name),
|
// Name: strings.TrimSpace(in.Name),
|
||||||
ServiceType: strings.TrimSpace(in.ServiceType),
|
// ServiceType: strings.TrimSpace(in.ServiceType),
|
||||||
Phone: strings.TrimSpace(in.Phone),
|
// Phone: strings.TrimSpace(in.Phone),
|
||||||
Email: strings.TrimSpace(in.Email),
|
// Email: strings.TrimSpace(in.Email),
|
||||||
Website: strings.TrimSpace(in.Website),
|
// Website: strings.TrimSpace(in.Website),
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
Rating: in.Rating,
|
// Rating: in.Rating,
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, AddVendorOutput{}, err
|
// return nil, AddVendorOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, AddVendorOutput{Vendor: vendor}, nil
|
// return nil, AddVendorOutput{Vendor: vendor}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// list_vendors
|
// // list_vendors
|
||||||
|
|
||||||
type ListVendorsInput struct {
|
// type ListVendorsInput struct {
|
||||||
ServiceType string `json:"service_type,omitempty" jsonschema:"filter by service type"`
|
// ServiceType string `json:"service_type,omitempty" jsonschema:"filter by service type"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type ListVendorsOutput struct {
|
// type ListVendorsOutput struct {
|
||||||
Vendors []ext.HouseholdVendor `json:"vendors"`
|
// Vendors []ext.HouseholdVendor `json:"vendors"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *HouseholdTool) ListVendors(ctx context.Context, _ *mcp.CallToolRequest, in ListVendorsInput) (*mcp.CallToolResult, ListVendorsOutput, error) {
|
// func (t *HouseholdTool) ListVendors(ctx context.Context, _ *mcp.CallToolRequest, in ListVendorsInput) (*mcp.CallToolResult, ListVendorsOutput, error) {
|
||||||
vendors, err := t.store.ListVendors(ctx, in.ServiceType)
|
// vendors, err := t.store.ListVendors(ctx, in.ServiceType)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, ListVendorsOutput{}, err
|
// return nil, ListVendorsOutput{}, err
|
||||||
}
|
// }
|
||||||
if vendors == nil {
|
// if vendors == nil {
|
||||||
vendors = []ext.HouseholdVendor{}
|
// vendors = []ext.HouseholdVendor{}
|
||||||
}
|
// }
|
||||||
return nil, ListVendorsOutput{Vendors: vendors}, nil
|
// return nil, ListVendorsOutput{Vendors: vendors}, nil
|
||||||
}
|
// }
|
||||||
|
|||||||
192
internal/tools/learnings.go
Normal file
192
internal/tools/learnings.go
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/session"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/store"
|
||||||
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LearningsTool struct {
|
||||||
|
store *store.DB
|
||||||
|
sessions *session.ActiveProjects
|
||||||
|
cfg config.SearchConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddLearningInput struct {
|
||||||
|
Summary string `json:"summary" jsonschema:"short curated learning summary"`
|
||||||
|
Details string `json:"details,omitempty" jsonschema:"optional detailed learning body"`
|
||||||
|
Category string `json:"category,omitempty"`
|
||||||
|
Area string `json:"area,omitempty"`
|
||||||
|
Status string `json:"status,omitempty"`
|
||||||
|
Priority string `json:"priority,omitempty"`
|
||||||
|
Confidence string `json:"confidence,omitempty"`
|
||||||
|
ActionRequired *bool `json:"action_required,omitempty"`
|
||||||
|
SourceType string `json:"source_type,omitempty"`
|
||||||
|
SourceRef string `json:"source_ref,omitempty"`
|
||||||
|
Project string `json:"project,omitempty" jsonschema:"project name or id; falls back to active session project"`
|
||||||
|
RelatedThoughtID *uuid.UUID `json:"related_thought_id,omitempty"`
|
||||||
|
RelatedSkillID *uuid.UUID `json:"related_skill_id,omitempty"`
|
||||||
|
ReviewedBy *string `json:"reviewed_by,omitempty"`
|
||||||
|
DuplicateOfLearningID *uuid.UUID `json:"duplicate_of_learning_id,omitempty"`
|
||||||
|
SupersedesLearningID *uuid.UUID `json:"supersedes_learning_id,omitempty"`
|
||||||
|
Tags []string `json:"tags,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddLearningOutput struct {
|
||||||
|
Learning thoughttypes.Learning `json:"learning"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetLearningInput struct {
|
||||||
|
ID uuid.UUID `json:"id" jsonschema:"learning id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetLearningOutput struct {
|
||||||
|
Learning thoughttypes.Learning `json:"learning"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListLearningsInput struct {
|
||||||
|
Limit int `json:"limit,omitempty"`
|
||||||
|
Project string `json:"project,omitempty" jsonschema:"project name or id; falls back to active session project"`
|
||||||
|
Category string `json:"category,omitempty"`
|
||||||
|
Area string `json:"area,omitempty"`
|
||||||
|
Status string `json:"status,omitempty"`
|
||||||
|
Priority string `json:"priority,omitempty"`
|
||||||
|
Tag string `json:"tag,omitempty"`
|
||||||
|
Query string `json:"query,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListLearningsOutput struct {
|
||||||
|
Learnings []thoughttypes.Learning `json:"learnings"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewLearningsTool(db *store.DB, sessions *session.ActiveProjects, cfg config.SearchConfig) *LearningsTool {
|
||||||
|
return &LearningsTool{store: db, sessions: sessions, cfg: cfg}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *LearningsTool) Add(ctx context.Context, req *mcp.CallToolRequest, in AddLearningInput) (*mcp.CallToolResult, AddLearningOutput, error) {
|
||||||
|
summary := strings.TrimSpace(in.Summary)
|
||||||
|
if summary == "" {
|
||||||
|
return nil, AddLearningOutput{}, errRequiredField("summary")
|
||||||
|
}
|
||||||
|
if err := t.ensureConfigured(); err != nil {
|
||||||
|
return nil, AddLearningOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, AddLearningOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
learning := thoughttypes.Learning{
|
||||||
|
Summary: summary,
|
||||||
|
Details: strings.TrimSpace(in.Details),
|
||||||
|
Category: defaultString(strings.TrimSpace(in.Category), "insight"),
|
||||||
|
Area: defaultString(strings.TrimSpace(in.Area), "other"),
|
||||||
|
Status: thoughttypes.LearningStatus(defaultString(strings.TrimSpace(in.Status), string(thoughttypes.LearningStatusPending))),
|
||||||
|
Priority: thoughttypes.LearningPriority(defaultString(strings.TrimSpace(in.Priority), string(thoughttypes.LearningPriorityMedium))),
|
||||||
|
Confidence: thoughttypes.LearningEvidenceLevel(defaultString(strings.TrimSpace(in.Confidence), string(thoughttypes.LearningEvidenceHypothesis))),
|
||||||
|
SourceType: strings.TrimSpace(in.SourceType),
|
||||||
|
SourceRef: strings.TrimSpace(in.SourceRef),
|
||||||
|
RelatedThoughtID: in.RelatedThoughtID,
|
||||||
|
RelatedSkillID: in.RelatedSkillID,
|
||||||
|
ReviewedBy: in.ReviewedBy,
|
||||||
|
DuplicateOfLearningID: in.DuplicateOfLearningID,
|
||||||
|
SupersedesLearningID: in.SupersedesLearningID,
|
||||||
|
Tags: normalizeStringSlice(in.Tags),
|
||||||
|
}
|
||||||
|
if in.ActionRequired != nil {
|
||||||
|
learning.ActionRequired = *in.ActionRequired
|
||||||
|
}
|
||||||
|
if project != nil {
|
||||||
|
learning.ProjectID = &project.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
created, err := t.store.CreateLearning(ctx, learning)
|
||||||
|
if err != nil {
|
||||||
|
return nil, AddLearningOutput{}, err
|
||||||
|
}
|
||||||
|
return nil, AddLearningOutput{Learning: created}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *LearningsTool) Get(ctx context.Context, _ *mcp.CallToolRequest, in GetLearningInput) (*mcp.CallToolResult, GetLearningOutput, error) {
|
||||||
|
if err := t.ensureConfigured(); err != nil {
|
||||||
|
return nil, GetLearningOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
learning, err := t.store.GetLearning(ctx, in.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, GetLearningOutput{}, err
|
||||||
|
}
|
||||||
|
return nil, GetLearningOutput{Learning: learning}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *LearningsTool) List(ctx context.Context, req *mcp.CallToolRequest, in ListLearningsInput) (*mcp.CallToolResult, ListLearningsOutput, error) {
|
||||||
|
if err := t.ensureConfigured(); err != nil {
|
||||||
|
return nil, ListLearningsOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ListLearningsOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
filter := thoughttypes.LearningFilter{
|
||||||
|
Limit: normalizeLimit(in.Limit, t.cfg),
|
||||||
|
Category: strings.TrimSpace(in.Category),
|
||||||
|
Area: strings.TrimSpace(in.Area),
|
||||||
|
Status: strings.TrimSpace(in.Status),
|
||||||
|
Priority: strings.TrimSpace(in.Priority),
|
||||||
|
Tag: strings.TrimSpace(in.Tag),
|
||||||
|
Query: strings.TrimSpace(in.Query),
|
||||||
|
}
|
||||||
|
if project != nil {
|
||||||
|
filter.ProjectID = &project.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
items, err := t.store.ListLearnings(ctx, filter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ListLearningsOutput{}, err
|
||||||
|
}
|
||||||
|
return nil, ListLearningsOutput{Learnings: items}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *LearningsTool) ensureConfigured() error {
|
||||||
|
if t == nil || t.store == nil {
|
||||||
|
return errInvalidInput("learnings tool is not configured")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func defaultString(value string, fallback string) string {
|
||||||
|
if value == "" {
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeStringSlice(values []string) []string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
out := make([]string, 0, len(values))
|
||||||
|
seen := map[string]struct{}{}
|
||||||
|
for _, value := range values {
|
||||||
|
trimmed := strings.TrimSpace(value)
|
||||||
|
if trimmed == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := seen[trimmed]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[trimmed] = struct{}{}
|
||||||
|
out = append(out, trimmed)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
70
internal/tools/learnings_test.go
Normal file
70
internal/tools/learnings_test.go
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/mcperrors"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLearningsAddRequiresSummary(t *testing.T) {
|
||||||
|
tool := &LearningsTool{}
|
||||||
|
|
||||||
|
_, _, err := tool.Add(context.Background(), nil, AddLearningInput{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Add() error = nil, want error")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, data := requireRPCError(t, err)
|
||||||
|
if data.Field != "summary" {
|
||||||
|
t.Fatalf("Add() error field = %q, want %q", data.Field, "summary")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLearningsMethodsRequireConfiguredStore(t *testing.T) {
|
||||||
|
tool := &LearningsTool{}
|
||||||
|
|
||||||
|
t.Run("add", func(t *testing.T) {
|
||||||
|
_, _, err := tool.Add(context.Background(), nil, AddLearningInput{Summary: "Keep this"})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Add() error = nil, want error")
|
||||||
|
}
|
||||||
|
_, data := requireRPCError(t, err)
|
||||||
|
if data.Type != mcperrors.TypeInvalidInput {
|
||||||
|
t.Fatalf("Add() data.type = %q, want %q", data.Type, mcperrors.TypeInvalidInput)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("get", func(t *testing.T) {
|
||||||
|
_, _, err := tool.Get(context.Background(), nil, GetLearningInput{ID: uuid.New()})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Get() error = nil, want error")
|
||||||
|
}
|
||||||
|
_, data := requireRPCError(t, err)
|
||||||
|
if data.Type != mcperrors.TypeInvalidInput {
|
||||||
|
t.Fatalf("Get() data.type = %q, want %q", data.Type, mcperrors.TypeInvalidInput)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("list", func(t *testing.T) {
|
||||||
|
_, _, err := tool.List(context.Background(), nil, ListLearningsInput{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("List() error = nil, want error")
|
||||||
|
}
|
||||||
|
_, data := requireRPCError(t, err)
|
||||||
|
if data.Type != mcperrors.TypeInvalidInput {
|
||||||
|
t.Fatalf("List() data.type = %q, want %q", data.Type, mcperrors.TypeInvalidInput)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeStringSliceTrimsDedupesAndDropsEmpties(t *testing.T) {
|
||||||
|
got := normalizeStringSlice([]string{" alpha ", "beta", "", "beta", "alpha"})
|
||||||
|
if len(got) != 2 {
|
||||||
|
t.Fatalf("normalizeStringSlice() len = %d, want 2", len(got))
|
||||||
|
}
|
||||||
|
if got[0] != "alpha" || got[1] != "beta" {
|
||||||
|
t.Fatalf("normalizeStringSlice() = %#v, want [alpha beta]", got)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -13,9 +13,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type LinksTool struct {
|
type LinksTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
search config.SearchConfig
|
search config.SearchConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
type LinkInput struct {
|
type LinkInput struct {
|
||||||
@@ -47,8 +47,8 @@ type RelatedOutput struct {
|
|||||||
Related []RelatedThought `json:"related"`
|
Related []RelatedThought `json:"related"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLinksTool(db *store.DB, provider ai.Provider, search config.SearchConfig) *LinksTool {
|
func NewLinksTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig) *LinksTool {
|
||||||
return &LinksTool{store: db, provider: provider, search: search}
|
return &LinksTool{store: db, embeddings: embeddings, search: search}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *LinksTool) Link(ctx context.Context, _ *mcp.CallToolRequest, in LinkInput) (*mcp.CallToolResult, LinkOutput, error) {
|
func (t *LinksTool) Link(ctx context.Context, _ *mcp.CallToolRequest, in LinkInput) (*mcp.CallToolResult, LinkOutput, error) {
|
||||||
@@ -117,7 +117,7 @@ func (t *LinksTool) Related(ctx context.Context, _ *mcp.CallToolRequest, in Rela
|
|||||||
}
|
}
|
||||||
|
|
||||||
if includeSemantic {
|
if includeSemantic {
|
||||||
semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, thought.Content, t.search.DefaultLimit, t.search.DefaultThreshold, thought.ProjectID, &thought.ID)
|
semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, thought.Content, t.search.DefaultLimit, t.search.DefaultThreshold, thought.ProjectID, &thought.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, RelatedOutput{}, err
|
return nil, RelatedOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,137 +1,137 @@
|
|||||||
package tools
|
package tools
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"context"
|
// "context"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
// "github.com/google/uuid"
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
// "github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/store"
|
// "git.warky.dev/wdevs/amcs/internal/store"
|
||||||
ext "git.warky.dev/wdevs/amcs/internal/types"
|
// ext "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
// )
|
||||||
|
|
||||||
type MaintenanceTool struct {
|
// type MaintenanceTool struct {
|
||||||
store *store.DB
|
// store *store.DB
|
||||||
}
|
// }
|
||||||
|
|
||||||
func NewMaintenanceTool(db *store.DB) *MaintenanceTool {
|
// func NewMaintenanceTool(db *store.DB) *MaintenanceTool {
|
||||||
return &MaintenanceTool{store: db}
|
// return &MaintenanceTool{store: db}
|
||||||
}
|
// }
|
||||||
|
|
||||||
// add_maintenance_task
|
// // add_maintenance_task
|
||||||
|
|
||||||
type AddMaintenanceTaskInput struct {
|
// type AddMaintenanceTaskInput struct {
|
||||||
Name string `json:"name" jsonschema:"task name"`
|
// Name string `json:"name" jsonschema:"task name"`
|
||||||
Category string `json:"category,omitempty" jsonschema:"e.g. hvac, plumbing, exterior, appliance, landscaping"`
|
// Category string `json:"category,omitempty" jsonschema:"e.g. hvac, plumbing, exterior, appliance, landscaping"`
|
||||||
FrequencyDays *int `json:"frequency_days,omitempty" jsonschema:"recurrence interval in days; omit for one-time tasks"`
|
// FrequencyDays *int `json:"frequency_days,omitempty" jsonschema:"recurrence interval in days; omit for one-time tasks"`
|
||||||
NextDue *time.Time `json:"next_due,omitempty" jsonschema:"when the task is next due"`
|
// NextDue *time.Time `json:"next_due,omitempty" jsonschema:"when the task is next due"`
|
||||||
Priority string `json:"priority,omitempty" jsonschema:"low, medium, high, or urgent (default: medium)"`
|
// Priority string `json:"priority,omitempty" jsonschema:"low, medium, high, or urgent (default: medium)"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AddMaintenanceTaskOutput struct {
|
// type AddMaintenanceTaskOutput struct {
|
||||||
Task ext.MaintenanceTask `json:"task"`
|
// Task ext.MaintenanceTask `json:"task"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *MaintenanceTool) AddTask(ctx context.Context, _ *mcp.CallToolRequest, in AddMaintenanceTaskInput) (*mcp.CallToolResult, AddMaintenanceTaskOutput, error) {
|
// func (t *MaintenanceTool) AddTask(ctx context.Context, _ *mcp.CallToolRequest, in AddMaintenanceTaskInput) (*mcp.CallToolResult, AddMaintenanceTaskOutput, error) {
|
||||||
if strings.TrimSpace(in.Name) == "" {
|
// if strings.TrimSpace(in.Name) == "" {
|
||||||
return nil, AddMaintenanceTaskOutput{}, errRequiredField("name")
|
// return nil, AddMaintenanceTaskOutput{}, errRequiredField("name")
|
||||||
}
|
// }
|
||||||
priority := strings.TrimSpace(in.Priority)
|
// priority := strings.TrimSpace(in.Priority)
|
||||||
if priority == "" {
|
// if priority == "" {
|
||||||
priority = "medium"
|
// priority = "medium"
|
||||||
}
|
// }
|
||||||
task, err := t.store.AddMaintenanceTask(ctx, ext.MaintenanceTask{
|
// task, err := t.store.AddMaintenanceTask(ctx, ext.MaintenanceTask{
|
||||||
Name: strings.TrimSpace(in.Name),
|
// Name: strings.TrimSpace(in.Name),
|
||||||
Category: strings.TrimSpace(in.Category),
|
// Category: strings.TrimSpace(in.Category),
|
||||||
FrequencyDays: in.FrequencyDays,
|
// FrequencyDays: in.FrequencyDays,
|
||||||
NextDue: in.NextDue,
|
// NextDue: in.NextDue,
|
||||||
Priority: priority,
|
// Priority: priority,
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, AddMaintenanceTaskOutput{}, err
|
// return nil, AddMaintenanceTaskOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, AddMaintenanceTaskOutput{Task: task}, nil
|
// return nil, AddMaintenanceTaskOutput{Task: task}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// log_maintenance
|
// // log_maintenance
|
||||||
|
|
||||||
type LogMaintenanceInput struct {
|
// type LogMaintenanceInput struct {
|
||||||
TaskID uuid.UUID `json:"task_id" jsonschema:"id of the maintenance task"`
|
// TaskID uuid.UUID `json:"task_id" jsonschema:"id of the maintenance task"`
|
||||||
CompletedAt *time.Time `json:"completed_at,omitempty" jsonschema:"when the work was done (defaults to now)"`
|
// CompletedAt *time.Time `json:"completed_at,omitempty" jsonschema:"when the work was done (defaults to now)"`
|
||||||
PerformedBy string `json:"performed_by,omitempty" jsonschema:"who did the work (self, vendor name, etc.)"`
|
// PerformedBy string `json:"performed_by,omitempty" jsonschema:"who did the work (self, vendor name, etc.)"`
|
||||||
Cost *float64 `json:"cost,omitempty" jsonschema:"cost of the work"`
|
// Cost *float64 `json:"cost,omitempty" jsonschema:"cost of the work"`
|
||||||
Notes string `json:"notes,omitempty"`
|
// Notes string `json:"notes,omitempty"`
|
||||||
NextAction string `json:"next_action,omitempty" jsonschema:"recommended follow-up"`
|
// NextAction string `json:"next_action,omitempty" jsonschema:"recommended follow-up"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type LogMaintenanceOutput struct {
|
// type LogMaintenanceOutput struct {
|
||||||
Log ext.MaintenanceLog `json:"log"`
|
// Log ext.MaintenanceLog `json:"log"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *MaintenanceTool) LogWork(ctx context.Context, _ *mcp.CallToolRequest, in LogMaintenanceInput) (*mcp.CallToolResult, LogMaintenanceOutput, error) {
|
// func (t *MaintenanceTool) LogWork(ctx context.Context, _ *mcp.CallToolRequest, in LogMaintenanceInput) (*mcp.CallToolResult, LogMaintenanceOutput, error) {
|
||||||
completedAt := time.Now()
|
// completedAt := time.Now()
|
||||||
if in.CompletedAt != nil {
|
// if in.CompletedAt != nil {
|
||||||
completedAt = *in.CompletedAt
|
// completedAt = *in.CompletedAt
|
||||||
}
|
// }
|
||||||
log, err := t.store.LogMaintenance(ctx, ext.MaintenanceLog{
|
// log, err := t.store.LogMaintenance(ctx, ext.MaintenanceLog{
|
||||||
TaskID: in.TaskID,
|
// TaskID: in.TaskID,
|
||||||
CompletedAt: completedAt,
|
// CompletedAt: completedAt,
|
||||||
PerformedBy: strings.TrimSpace(in.PerformedBy),
|
// PerformedBy: strings.TrimSpace(in.PerformedBy),
|
||||||
Cost: in.Cost,
|
// Cost: in.Cost,
|
||||||
Notes: strings.TrimSpace(in.Notes),
|
// Notes: strings.TrimSpace(in.Notes),
|
||||||
NextAction: strings.TrimSpace(in.NextAction),
|
// NextAction: strings.TrimSpace(in.NextAction),
|
||||||
})
|
// })
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, LogMaintenanceOutput{}, err
|
// return nil, LogMaintenanceOutput{}, err
|
||||||
}
|
// }
|
||||||
return nil, LogMaintenanceOutput{Log: log}, nil
|
// return nil, LogMaintenanceOutput{Log: log}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// get_upcoming_maintenance
|
// // get_upcoming_maintenance
|
||||||
|
|
||||||
type GetUpcomingMaintenanceInput struct {
|
// type GetUpcomingMaintenanceInput struct {
|
||||||
DaysAhead int `json:"days_ahead,omitempty" jsonschema:"how many days to look ahead (default: 30)"`
|
// DaysAhead int `json:"days_ahead,omitempty" jsonschema:"how many days to look ahead (default: 30)"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type GetUpcomingMaintenanceOutput struct {
|
// type GetUpcomingMaintenanceOutput struct {
|
||||||
Tasks []ext.MaintenanceTask `json:"tasks"`
|
// Tasks []ext.MaintenanceTask `json:"tasks"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *MaintenanceTool) GetUpcoming(ctx context.Context, _ *mcp.CallToolRequest, in GetUpcomingMaintenanceInput) (*mcp.CallToolResult, GetUpcomingMaintenanceOutput, error) {
|
// func (t *MaintenanceTool) GetUpcoming(ctx context.Context, _ *mcp.CallToolRequest, in GetUpcomingMaintenanceInput) (*mcp.CallToolResult, GetUpcomingMaintenanceOutput, error) {
|
||||||
tasks, err := t.store.GetUpcomingMaintenance(ctx, in.DaysAhead)
|
// tasks, err := t.store.GetUpcomingMaintenance(ctx, in.DaysAhead)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, GetUpcomingMaintenanceOutput{}, err
|
// return nil, GetUpcomingMaintenanceOutput{}, err
|
||||||
}
|
// }
|
||||||
if tasks == nil {
|
// if tasks == nil {
|
||||||
tasks = []ext.MaintenanceTask{}
|
// tasks = []ext.MaintenanceTask{}
|
||||||
}
|
// }
|
||||||
return nil, GetUpcomingMaintenanceOutput{Tasks: tasks}, nil
|
// return nil, GetUpcomingMaintenanceOutput{Tasks: tasks}, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
// search_maintenance_history
|
// // search_maintenance_history
|
||||||
|
|
||||||
type SearchMaintenanceHistoryInput struct {
|
// type SearchMaintenanceHistoryInput struct {
|
||||||
Query string `json:"query,omitempty" jsonschema:"search text matching task name or notes"`
|
// Query string `json:"query,omitempty" jsonschema:"search text matching task name or notes"`
|
||||||
Category string `json:"category,omitempty" jsonschema:"filter by task category"`
|
// Category string `json:"category,omitempty" jsonschema:"filter by task category"`
|
||||||
Start *time.Time `json:"start,omitempty" jsonschema:"filter logs completed on or after this date"`
|
// Start *time.Time `json:"start,omitempty" jsonschema:"filter logs completed on or after this date"`
|
||||||
End *time.Time `json:"end,omitempty" jsonschema:"filter logs completed on or before this date"`
|
// End *time.Time `json:"end,omitempty" jsonschema:"filter logs completed on or before this date"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type SearchMaintenanceHistoryOutput struct {
|
// type SearchMaintenanceHistoryOutput struct {
|
||||||
Logs []ext.MaintenanceLogWithTask `json:"logs"`
|
// Logs []ext.MaintenanceLogWithTask `json:"logs"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (t *MaintenanceTool) SearchHistory(ctx context.Context, _ *mcp.CallToolRequest, in SearchMaintenanceHistoryInput) (*mcp.CallToolResult, SearchMaintenanceHistoryOutput, error) {
|
// func (t *MaintenanceTool) SearchHistory(ctx context.Context, _ *mcp.CallToolRequest, in SearchMaintenanceHistoryInput) (*mcp.CallToolResult, SearchMaintenanceHistoryOutput, error) {
|
||||||
logs, err := t.store.SearchMaintenanceHistory(ctx, in.Query, in.Category, in.Start, in.End)
|
// logs, err := t.store.SearchMaintenanceHistory(ctx, in.Query, in.Category, in.Start, in.End)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, SearchMaintenanceHistoryOutput{}, err
|
// return nil, SearchMaintenanceHistoryOutput{}, err
|
||||||
}
|
// }
|
||||||
if logs == nil {
|
// if logs == nil {
|
||||||
logs = []ext.MaintenanceLogWithTask{}
|
// logs = []ext.MaintenanceLogWithTask{}
|
||||||
}
|
// }
|
||||||
return nil, SearchMaintenanceHistoryOutput{Logs: logs}, nil
|
// return nil, SearchMaintenanceHistoryOutput{Logs: logs}, nil
|
||||||
}
|
// }
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user