Compare commits
31 Commits
v0.0.3
...
bdc78cc2a3
| Author | SHA1 | Date | |
|---|---|---|---|
| bdc78cc2a3 | |||
| 6c5e3918dc | |||
| cd14be0666 | |||
| 20122a5f53 | |||
|
|
8e74dc9284 | ||
| 1c9741373e | |||
| 3e832eea98 | |||
| c4d260d971 | |||
| 27cd494f6d | |||
| 3dfed9c986 | |||
| 512b16f8fe | |||
| 9a9fa4f384 | |||
| 979afc909e | |||
| 55859811be | |||
| 7f9c6f122e | |||
| 14e218d784 | |||
| 532d1560a3 | |||
| 894fa3fc1d | |||
| a6165a0f2e | |||
| b6e156011f | |||
| 4d107cb87e | |||
| 1ed67881e6 | |||
| 1d4dbad33f | |||
| 02bcbdabd8 | |||
| 5f48a197e8 | |||
| 1958eaca01 | |||
| 4aed4105aa | |||
| 8af4956951 | |||
| 5457cbbd21 | |||
| d6488cd4d5 | |||
| a1bf5ceb38 |
@@ -31,6 +31,9 @@ jobs:
|
|||||||
- name: Download dependencies
|
- name: Download dependencies
|
||||||
run: go mod download
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Tidy modules
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: go test ./...
|
run: go test ./...
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,11 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'v*.*.*'
|
- 'v*.*.*'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag:
|
||||||
|
description: 'Tag to release (e.g. v1.2.3)'
|
||||||
|
required: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
GITEA_SERVER: https://git.warky.dev
|
GITEA_SERVER: https://git.warky.dev
|
||||||
@@ -35,10 +40,25 @@ jobs:
|
|||||||
- name: Download dependencies
|
- name: Download dependencies
|
||||||
run: go mod download
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Set up Node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 'lts/*'
|
||||||
|
|
||||||
|
- name: Install pnpm
|
||||||
|
run: npm install -g pnpm
|
||||||
|
|
||||||
|
- name: Build UI
|
||||||
|
run: |
|
||||||
|
cd ui
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
pnpm run build
|
||||||
|
|
||||||
- name: Set build vars
|
- name: Set build vars
|
||||||
id: vars
|
id: vars
|
||||||
run: |
|
run: |
|
||||||
echo "VERSION=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT
|
TAG="${{ github.event.inputs.tag || github.ref_name }}"
|
||||||
|
echo "VERSION=${TAG}" >> $GITHUB_OUTPUT
|
||||||
echo "COMMIT=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
echo "COMMIT=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||||
echo "BUILD_DATE=$(date -u +%Y-%m-%dT%H:%M:%SZ)" >> $GITHUB_OUTPUT
|
echo "BUILD_DATE=$(date -u +%Y-%m-%dT%H:%M:%SZ)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
@@ -66,21 +86,21 @@ jobs:
|
|||||||
- name: Create Gitea Release
|
- name: Create Gitea Release
|
||||||
id: create_release
|
id: create_release
|
||||||
run: |
|
run: |
|
||||||
VERSION="${{ steps.vars.outputs.VERSION }}"
|
export VERSION="${{ steps.vars.outputs.VERSION }}"
|
||||||
BODY=$(python3 <<'PY'
|
BODY=$(python3 <<'PY'
|
||||||
import json, subprocess, os
|
import json, subprocess, os
|
||||||
version = os.environ['VERSION']
|
version = os.environ['VERSION']
|
||||||
commit = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], text=True).strip()
|
commit = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], text=True).strip()
|
||||||
body = f"## {version}\n\nBuilt from commit {commit}.\n\nSee `checksums.txt` to verify downloads."
|
body = f"## {version}\n\nBuilt from commit {commit}.\n\nSee `checksums.txt` to verify downloads."
|
||||||
print(json.dumps({
|
print(json.dumps({
|
||||||
'tag_name': version,
|
'tag_name': version,
|
||||||
'name': version,
|
'name': version,
|
||||||
'body': body,
|
'body': body,
|
||||||
'draft': False,
|
'draft': False,
|
||||||
'prerelease': False,
|
'prerelease': False,
|
||||||
}))
|
}))
|
||||||
PY
|
PY
|
||||||
)
|
)
|
||||||
RESPONSE=$(curl -fsS -X POST "${{ env.GITEA_SERVER }}/api/v1/repos/${{ env.GITEA_REPO }}/releases" \
|
RESPONSE=$(curl -fsS -X POST "${{ env.GITEA_SERVER }}/api/v1/repos/${{ env.GITEA_REPO }}/releases" \
|
||||||
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -34,3 +34,4 @@ OB1/
|
|||||||
ui/node_modules/
|
ui/node_modules/
|
||||||
ui/.svelte-kit/
|
ui/.svelte-kit/
|
||||||
internal/app/ui/dist/
|
internal/app/ui/dist/
|
||||||
|
.codex
|
||||||
|
|||||||
10
Dockerfile
10
Dockerfile
@@ -29,7 +29,14 @@ RUN set -eu; \
|
|||||||
-X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \
|
||||||
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \
|
||||||
-X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \
|
||||||
-o /out/amcs-server ./cmd/amcs-server
|
-o /out/amcs-server ./cmd/amcs-server; \
|
||||||
|
CGO_ENABLED=0 GOOS=linux go build -trimpath \
|
||||||
|
-ldflags="-s -w \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Version=${VERSION_TAG} \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \
|
||||||
|
-X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \
|
||||||
|
-o /out/amcs-migrate-config ./cmd/amcs-migrate-config
|
||||||
|
|
||||||
FROM debian:bookworm-slim
|
FROM debian:bookworm-slim
|
||||||
|
|
||||||
@@ -41,6 +48,7 @@ RUN apt-get update \
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY --from=builder /out/amcs-server /app/amcs-server
|
COPY --from=builder /out/amcs-server /app/amcs-server
|
||||||
|
COPY --from=builder /out/amcs-migrate-config /app/amcs-migrate-config
|
||||||
COPY --chown=appuser:appuser configs /app/configs
|
COPY --chown=appuser:appuser configs /app/configs
|
||||||
|
|
||||||
USER appuser
|
USER appuser
|
||||||
|
|||||||
1
Makefile
1
Makefile
@@ -61,6 +61,7 @@ release-version:
|
|||||||
exit 1; \
|
exit 1; \
|
||||||
fi; \
|
fi; \
|
||||||
git tag -a "$$next_tag" -m "Release $$next_tag"; \
|
git tag -a "$$next_tag" -m "Release $$next_tag"; \
|
||||||
|
git push origin "$$next_tag"; \
|
||||||
echo "$$next_tag"
|
echo "$$next_tag"
|
||||||
|
|
||||||
migrate:
|
migrate:
|
||||||
|
|||||||
168
README.md
168
README.md
@@ -1,24 +1,18 @@
|
|||||||
# Avalon Memory Crystal Server (amcs)
|
# AMCS Directory
|
||||||
|
|
||||||

|
This is the AMCS (Advanced Module Control System) directory.
|
||||||
|
|
||||||
A Go MCP server for capturing and retrieving thoughts, memory, and project context. Exposes tools over Streamable HTTP, backed by Postgres with pgvector for semantic search.
|
## Purpose
|
||||||
|
|
||||||
## What it does
|
The AMCS directory is used to store configuration and code for the Advanced Module Control System, which handles...
|
||||||
|
|
||||||
- **Capture** thoughts with automatic embedding and metadata extraction
|
## Structure
|
||||||
- **Search** thoughts semantically via vector similarity
|
|
||||||
- **Organise** thoughts into projects and retrieve full project context
|
|
||||||
- **Summarise** and recall memory across topics and time windows
|
|
||||||
- **Link** related thoughts and traverse relationships
|
|
||||||
|
|
||||||
## Stack
|
- `configs/` - Configuration files
|
||||||
|
- `scripts/` - Scripts for managing the system
|
||||||
|
- `assets/` - Asset files
|
||||||
|
|
||||||
- Go — MCP server over Streamable HTTP
|
## Next Steps
|
||||||
- Postgres + pgvector — storage and vector search
|
|
||||||
- LiteLLM — primary hosted AI provider (embeddings + metadata extraction)
|
|
||||||
- OpenRouter — default upstream behind LiteLLM
|
|
||||||
- Ollama — supported local or self-hosted OpenAI-compatible provider
|
|
||||||
|
|
||||||
## Tools
|
## Tools
|
||||||
|
|
||||||
@@ -46,21 +40,29 @@ A Go MCP server for capturing and retrieving thoughts, memory, and project conte
|
|||||||
| `load_file` | Retrieve a stored file by ID; returns metadata, base64 content, and an embedded MCP binary resource |
|
| `load_file` | Retrieve a stored file by ID; returns metadata, base64 content, and an embedded MCP binary resource |
|
||||||
| `list_files` | Browse stored files by thought, project, or kind |
|
| `list_files` | Browse stored files by thought, project, or kind |
|
||||||
| `backfill_embeddings` | Generate missing embeddings for stored thoughts |
|
| `backfill_embeddings` | Generate missing embeddings for stored thoughts |
|
||||||
| `reparse_thought_metadata` | Re-extract and normalize metadata for stored thoughts |
|
| `reparse_thought_metadata` | Re-extract metadata from thought content |
|
||||||
| `retry_failed_metadata` | Retry metadata extraction for thoughts still pending or failed |
|
| `retry_failed_metadata` | Retry pending/failed metadata extraction |
|
||||||
| `add_skill` | Store a reusable agent skill (behavioural instruction or capability prompt) |
|
| `add_maintenance_task` | Create a recurring or one-time home maintenance task |
|
||||||
|
| `log_maintenance` | Log completed maintenance; updates next due date |
|
||||||
|
| `get_upcoming_maintenance` | List maintenance tasks due within the next N days |
|
||||||
|
| `search_maintenance_history` | Search the maintenance log by task name, category, or date range |
|
||||||
|
| `save_chat_history` | Save chat messages with optional title, summary, channel, agent, and project |
|
||||||
|
| `get_chat_history` | Fetch chat history by UUID or session_id |
|
||||||
|
| `list_chat_histories` | List chat histories; filter by project, channel, agent_id, session_id, or days |
|
||||||
|
| `delete_chat_history` | Delete a chat history by id |
|
||||||
|
| `add_skill` | Store an agent skill (instruction or capability prompt) |
|
||||||
| `remove_skill` | Delete an agent skill by id |
|
| `remove_skill` | Delete an agent skill by id |
|
||||||
| `list_skills` | List all agent skills, optionally filtered by tag |
|
| `list_skills` | List all agent skills, optionally filtered by tag |
|
||||||
| `add_guardrail` | Store a reusable agent guardrail (constraint or safety rule) |
|
| `add_guardrail` | Store an agent guardrail (constraint or safety rule) |
|
||||||
| `remove_guardrail` | Delete an agent guardrail by id |
|
| `remove_guardrail` | Delete an agent guardrail by id |
|
||||||
| `list_guardrails` | List all agent guardrails, optionally filtered by tag or severity |
|
| `list_guardrails` | List all agent guardrails, optionally filtered by tag or severity |
|
||||||
| `add_project_skill` | Link an agent skill to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
| `add_project_skill` | Link a skill to a project; pass `project` if client is stateless |
|
||||||
| `remove_project_skill` | Unlink an agent skill from a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
| `remove_project_skill` | Unlink a skill from a project; pass `project` if client is stateless |
|
||||||
| `list_project_skills` | List all skills linked to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
| `list_project_skills` | Skills for a project; pass `project` if client is stateless |
|
||||||
| `add_project_guardrail` | Link an agent guardrail to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
| `add_project_guardrail` | Link a guardrail to a project; pass `project` if client is stateless |
|
||||||
| `remove_project_guardrail` | Unlink an agent guardrail from a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
| `remove_project_guardrail` | Unlink a guardrail from a project; pass `project` if client is stateless |
|
||||||
| `list_project_guardrails` | List all guardrails linked to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
| `list_project_guardrails` | Guardrails for a project; pass `project` if client is stateless |
|
||||||
| `get_version_info` | Return the server build version information, including version, tag name, commit, and build date |
|
| `get_version_info` | Build version, commit, and date |
|
||||||
| `describe_tools` | List all available MCP tools with names, descriptions, categories, and model-authored usage notes; call this at the start of a session to orient yourself |
|
| `describe_tools` | List all available MCP tools with names, descriptions, categories, and model-authored usage notes; call this at the start of a session to orient yourself |
|
||||||
| `annotate_tool` | Persist your own usage notes for a specific tool; notes are returned by `describe_tools` in future sessions |
|
| `annotate_tool` | Persist your own usage notes for a specific tool; notes are returned by `describe_tools` in future sessions |
|
||||||
|
|
||||||
@@ -74,7 +76,7 @@ AMCS includes a built-in tool directory that models can read and annotate.
|
|||||||
{ "category": "thoughts" }
|
{ "category": "thoughts" }
|
||||||
```
|
```
|
||||||
|
|
||||||
Available categories: `system`, `thoughts`, `projects`, `files`, `admin`, `household`, `maintenance`, `calendar`, `meals`, `crm`, `skills`, `chat`, `meta`.
|
Available categories: `system`, `thoughts`, `projects`, `files`, `admin`, `maintenance`, `skills`, `chat`, `meta`.
|
||||||
|
|
||||||
**`annotate_tool`** lets a model write persistent usage notes against a tool name. Notes survive across sessions and are returned by `describe_tools`:
|
**`annotate_tool`** lets a model write persistent usage notes against a tool name. Notes survive across sessions and are returned by `describe_tools`:
|
||||||
|
|
||||||
@@ -236,12 +238,25 @@ Link existing skills and guardrails to a project so they are automatically avail
|
|||||||
Config is YAML-driven. Copy `configs/config.example.yaml` and set:
|
Config is YAML-driven. Copy `configs/config.example.yaml` and set:
|
||||||
|
|
||||||
- `database.url` — Postgres connection string
|
- `database.url` — Postgres connection string
|
||||||
- `auth.mode` — `api_keys` or `oauth_client_credentials`
|
- `auth.keys` — static API keys for MCP access via `x-brain-key` or `Authorization: Bearer <key>`
|
||||||
- `auth.keys` — API keys for MCP access via `x-brain-key` or `Authorization: Bearer <key>` when `auth.mode=api_keys`
|
- `auth.oauth.clients` — optional OAuth client credentials registry
|
||||||
- `auth.oauth.clients` — client registry when `auth.mode=oauth_client_credentials`
|
- `ai.providers` — named provider definitions (`litellm`, `ollama`, `openrouter`)
|
||||||
|
- `ai.embeddings.primary` / `ai.metadata.primary` — primary role targets (`provider` + `model`)
|
||||||
|
- `ai.embeddings.fallbacks` / `ai.metadata.fallbacks` — sequential fallback targets
|
||||||
- `mcp.version` is build-generated and should not be set in config
|
- `mcp.version` is build-generated and should not be set in config
|
||||||
|
|
||||||
**OAuth Client Credentials flow** (`auth.mode=oauth_client_credentials`):
|
Config schema is versioned. Current schema version is `2`.
|
||||||
|
|
||||||
|
Use the migration helper to rewrite legacy configs in-place:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go run ./cmd/amcs-migrate-config --config ./configs/dev.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `--dry-run` to print migrated YAML without writing.
|
||||||
|
Server startup migrates older config formats in memory only and does not write files.
|
||||||
|
|
||||||
|
**OAuth Client Credentials flow**:
|
||||||
|
|
||||||
1. Obtain a token — `POST /oauth/token` (public, no auth required):
|
1. Obtain a token — `POST /oauth/token` (public, no auth required):
|
||||||
```
|
```
|
||||||
@@ -259,8 +274,9 @@ Config is YAML-driven. Copy `configs/config.example.yaml` and set:
|
|||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, pass `client_id` and `client_secret` as body parameters instead of `Authorization: Basic`. Direct `Authorization: Basic` credential validation on the MCP endpoint is also supported as a fallback (no token required).
|
Alternatively, pass `client_id` and `client_secret` as body parameters instead of `Authorization: Basic`. Direct `Authorization: Basic` credential validation on the MCP endpoint is also supported as a fallback (no token required).
|
||||||
- `ai.litellm.base_url` and `ai.litellm.api_key` — LiteLLM proxy
|
- `AMCS_LITELLM_BASE_URL` / `AMCS_LITELLM_API_KEY` override all configured LiteLLM providers
|
||||||
- `ai.ollama.base_url` and `ai.ollama.api_key` — Ollama local or remote server
|
- `AMCS_OLLAMA_BASE_URL` / `AMCS_OLLAMA_API_KEY` override all configured Ollama providers
|
||||||
|
- `AMCS_OPENROUTER_API_KEY` overrides all configured OpenRouter providers
|
||||||
|
|
||||||
See `llm/plan.md` for an audited high-level status summary of the original implementation plan, and `llm/todo.md` for the audited backfill/fallback follow-up status.
|
See `llm/plan.md` for an audited high-level status summary of the original implementation plan, and `llm/todo.md` for the audited backfill/fallback follow-up status.
|
||||||
|
|
||||||
@@ -525,6 +541,53 @@ Recommended Apache settings:
|
|||||||
- `ProxyTimeout 600` and `ProxyPass ... timeout=600` give Apache enough time to wait for the Go backend.
|
- `ProxyTimeout 600` and `ProxyPass ... timeout=600` give Apache enough time to wait for the Go backend.
|
||||||
- If another proxy or load balancer sits in front of Apache, align its size and timeout settings too.
|
- If another proxy or load balancer sits in front of Apache, align its size and timeout settings too.
|
||||||
|
|
||||||
|
## CLI
|
||||||
|
|
||||||
|
`amcs-cli` is a pre-built CLI client for the AMCS MCP server. Download it from https://git.warky.dev/wdevs/amcs/releases
|
||||||
|
|
||||||
|
The primary purpose is to give agents and MCP clients a ready-made bridge to the AMCS server so they do not need to implement their own HTTP MCP client. Configure it once and any stdio-based MCP client can use AMCS immediately.
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
| Command | Purpose |
|
||||||
|
|---|---|
|
||||||
|
| `amcs-cli tools` | List all tools available on the remote server |
|
||||||
|
| `amcs-cli call <tool>` | Call a tool by name with `--arg key=value` flags |
|
||||||
|
| `amcs-cli stdio` | Start a stdio MCP bridge backed by the remote server |
|
||||||
|
|
||||||
|
`stdio` is the main integration point. It connects to the remote HTTP MCP server, discovers all its tools, and re-exposes them over stdio. Register it as a stdio MCP server in your agent config and it proxies every tool call through to AMCS.
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
Config file: `~/.config/amcs/config.yaml`
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
server: https://your-amcs-server
|
||||||
|
token: your-bearer-token
|
||||||
|
```
|
||||||
|
|
||||||
|
Env vars override the config file: `AMCS_SERVER` (preferred), `AMCS_URL` (legacy alias), and `AMCS_TOKEN`. Flags `--server` and `--token` override env vars.
|
||||||
|
|
||||||
|
### stdio MCP client setup
|
||||||
|
|
||||||
|
#### Claude Code
|
||||||
|
|
||||||
|
```bash
|
||||||
|
claude mcp add --transport stdio amcs amcs-cli stdio
|
||||||
|
```
|
||||||
|
|
||||||
|
With inline credentials (no config file):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
claude mcp add --transport stdio amcs amcs-cli stdio \
|
||||||
|
--env AMCS_SERVER=https://your-amcs-server \
|
||||||
|
--env AMCS_TOKEN=your-bearer-token
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Output format
|
||||||
|
|
||||||
|
`call` outputs JSON by default. Pass `--output yaml` for YAML.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
Run the SQL migrations against a local database with:
|
Run the SQL migrations against a local database with:
|
||||||
@@ -586,29 +649,50 @@ Notes:
|
|||||||
- Database migrations `001` through `005` run automatically when the Postgres volume is created for the first time.
|
- Database migrations `001` through `005` run automatically when the Postgres volume is created for the first time.
|
||||||
- `migrations/006_rls_and_grants.sql` is intentionally skipped during container bootstrap because it contains deployment-specific grants for a role named `amcs_user`.
|
- `migrations/006_rls_and_grants.sql` is intentionally skipped during container bootstrap because it contains deployment-specific grants for a role named `amcs_user`.
|
||||||
|
|
||||||
|
### Run config migration with Compose
|
||||||
|
|
||||||
|
The container image now includes `/app/amcs-migrate-config`.
|
||||||
|
|
||||||
|
Dry-run (prints migrated YAML, does not write files):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose --profile tools run --rm migrate-config --config /app/configs/dev.yaml --dry-run
|
||||||
|
```
|
||||||
|
|
||||||
|
Apply migration in-place (writes file + creates backup):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose --profile tools run --rm migrate-config --config /app/configs/dev.yaml
|
||||||
|
```
|
||||||
|
|
||||||
## Ollama
|
## Ollama
|
||||||
|
|
||||||
Set `ai.provider: "ollama"` to use a local or self-hosted Ollama server through its OpenAI-compatible API.
|
Set your role targets to an Ollama provider to use a local or self-hosted Ollama server through its OpenAI-compatible API.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ai:
|
ai:
|
||||||
provider: "ollama"
|
providers:
|
||||||
|
local:
|
||||||
|
type: "ollama"
|
||||||
|
base_url: "http://localhost:11434/v1"
|
||||||
|
api_key: "ollama"
|
||||||
|
request_headers: {}
|
||||||
embeddings:
|
embeddings:
|
||||||
model: "nomic-embed-text"
|
|
||||||
dimensions: 768
|
dimensions: 768
|
||||||
|
primary:
|
||||||
|
provider: "local"
|
||||||
|
model: "nomic-embed-text"
|
||||||
metadata:
|
metadata:
|
||||||
model: "llama3.2"
|
|
||||||
temperature: 0.1
|
temperature: 0.1
|
||||||
ollama:
|
primary:
|
||||||
base_url: "http://localhost:11434/v1"
|
provider: "local"
|
||||||
api_key: "ollama"
|
model: "llama3.2"
|
||||||
request_headers: {}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
|
|
||||||
- For remote Ollama servers, point `ai.ollama.base_url` at the remote `/v1` endpoint.
|
- For remote Ollama servers, point `ai.providers.<name>.base_url` at the remote `/v1` endpoint.
|
||||||
- The client always sends Bearer auth; Ollama ignores it locally, so `api_key: "ollama"` is a safe default.
|
- The client always sends Bearer auth; Ollama ignores it locally, so `api_key: "ollama"` is a safe default.
|
||||||
- `ai.embeddings.dimensions` must match the embedding model you actually use, or startup will fail the database vector-dimension check.
|
- `ai.embeddings.dimensions` must match the embedding model you actually use, or startup will fail the database vector-dimension check.
|
||||||
|
|||||||
90
changelog.md
Normal file
90
changelog.md
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-04-21
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Config Schema v2 Introduced
|
||||||
|
|
||||||
|
- Refactored configuration to schema version `2` with named AI providers and role-based model chains.
|
||||||
|
- Added support for per-role primary and fallback targets for embeddings and metadata.
|
||||||
|
- Added optional background role overrides for backfill and metadata retry workers.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Automatic v1 -> v2 Migration
|
||||||
|
|
||||||
|
- Added config migration framework with explicit schema versioning.
|
||||||
|
- Implemented `v1 -> v2` migration to transform legacy provider blocks into named providers + role chains.
|
||||||
|
- Loader now auto-migrates older config files, rewrites migrated YAML, and creates timestamped backups.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - AI Registry and Role Runners
|
||||||
|
|
||||||
|
- Added `ai.Registry` to build provider clients from named provider config entries.
|
||||||
|
- Added `EmbeddingRunner` and `MetadataRunner` with sequential fallback execution.
|
||||||
|
- Added target health tracking with cooldowns for transient/permanent/empty-response failures.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - App and Tool Wiring Updates
|
||||||
|
|
||||||
|
- Rewired app startup to use provider registry + role runners for foreground and background flows.
|
||||||
|
- Updated capture, search, summarize, context, recall, backfill, metadata retry, and reparse paths to use new runners.
|
||||||
|
- Preserved environment override behavior for provider credentials/endpoints across matching provider types.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Migrate Config CLI Added
|
||||||
|
|
||||||
|
- Added `cmd/amcs-migrate-config` CLI to migrate config files to the current schema version.
|
||||||
|
- Supports dry-run output and in-place write mode with automatic backup file creation.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Tests and Documentation Updated
|
||||||
|
|
||||||
|
- Added focused tests for config migration, AI registry behavior, and runner fallback behavior.
|
||||||
|
- Updated `configs/config.example.yaml` to the new v2 schema.
|
||||||
|
- Updated README configuration sections and migration guidance to reflect v2 and `amcs-migrate-config` usage.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Uncommitted File Change List
|
||||||
|
|
||||||
|
- Modified: `.gitignore`
|
||||||
|
- Modified: `README.md`
|
||||||
|
- Modified: `configs/config.example.yaml`
|
||||||
|
- Modified: `internal/ai/compat/client.go`
|
||||||
|
- Modified: `internal/ai/compat/client_test.go`
|
||||||
|
- Modified: `internal/app/app.go`
|
||||||
|
- Modified: `internal/config/config.go`
|
||||||
|
- Modified: `internal/config/loader.go`
|
||||||
|
- Modified: `internal/config/loader_test.go`
|
||||||
|
- Modified: `internal/config/validate.go`
|
||||||
|
- Modified: `internal/config/validate_test.go`
|
||||||
|
- Modified: `internal/mcpserver/server.go`
|
||||||
|
- Modified: `internal/mcpserver/streamable_integration_test.go`
|
||||||
|
- Modified: `internal/tools/backfill.go`
|
||||||
|
- Modified: `internal/tools/capture.go`
|
||||||
|
- Modified: `internal/tools/context.go`
|
||||||
|
- Modified: `internal/tools/enrichment_retry.go`
|
||||||
|
- Modified: `internal/tools/links.go`
|
||||||
|
- Modified: `internal/tools/metadata_retry.go`
|
||||||
|
- Modified: `internal/tools/recall.go`
|
||||||
|
- Modified: `internal/tools/reparse_metadata.go`
|
||||||
|
- Modified: `internal/tools/retrieval.go`
|
||||||
|
- Modified: `internal/tools/search.go`
|
||||||
|
- Modified: `internal/tools/summarize.go`
|
||||||
|
- Modified: `internal/tools/update.go`
|
||||||
|
- Deleted: `internal/ai/factory.go`
|
||||||
|
- Deleted: `internal/ai/factory_test.go`
|
||||||
|
- Deleted: `internal/ai/litellm/client.go`
|
||||||
|
- Deleted: `internal/ai/ollama/client.go`
|
||||||
|
- Deleted: `internal/ai/openrouter/client.go`
|
||||||
|
- Deleted: `internal/ai/provider.go`
|
||||||
|
- New: `changelog.md`
|
||||||
|
- New: `cmd/amcs-migrate-config/main.go`
|
||||||
|
- New: `internal/ai/registry.go`
|
||||||
|
- New: `internal/ai/registry_test.go`
|
||||||
|
- New: `internal/ai/runner.go`
|
||||||
|
- New: `internal/ai/runner_test.go`
|
||||||
|
- New: `internal/config/migrate.go`
|
||||||
|
- New: `internal/config/migrate_test.go`
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Docker Support for Config Migration CLI
|
||||||
|
- Added `amcs-migrate-config` binary to the Docker image build output.
|
||||||
|
- Added `migrate-config` service in `docker-compose.yml` under the `tools` profile.
|
||||||
|
- Documented compose-based migration commands (dry-run and in-place apply) in the README.
|
||||||
|
|
||||||
|
### 2026-04-21 21h - Startup Migration Write Disabled
|
||||||
|
- Changed config loading to migrate legacy schemas in memory only during startup.
|
||||||
|
- Removed automatic file rewrite and backup creation from the startup config loader.
|
||||||
|
- Added loader log hint to use `amcs-migrate-config` when persistent conversion is needed.
|
||||||
@@ -6,7 +6,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
@@ -17,9 +16,12 @@ var (
|
|||||||
serverFlag string
|
serverFlag string
|
||||||
tokenFlag string
|
tokenFlag string
|
||||||
outputFlag string
|
outputFlag string
|
||||||
|
verbose bool
|
||||||
cfg Config
|
cfg Config
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const cliUserAgent = "amcs-cli/0.0.1"
|
||||||
|
|
||||||
var rootCmd = &cobra.Command{
|
var rootCmd = &cobra.Command{
|
||||||
Use: "amcs-cli",
|
Use: "amcs-cli",
|
||||||
Short: "CLI for connecting to a remote AMCS MCP server",
|
Short: "CLI for connecting to a remote AMCS MCP server",
|
||||||
@@ -42,6 +44,7 @@ func init() {
|
|||||||
rootCmd.PersistentFlags().StringVar(&serverFlag, "server", "", "AMCS server URL")
|
rootCmd.PersistentFlags().StringVar(&serverFlag, "server", "", "AMCS server URL")
|
||||||
rootCmd.PersistentFlags().StringVar(&tokenFlag, "token", "", "AMCS bearer token")
|
rootCmd.PersistentFlags().StringVar(&tokenFlag, "token", "", "AMCS bearer token")
|
||||||
rootCmd.PersistentFlags().StringVar(&outputFlag, "output", "json", "Output format: json or yaml")
|
rootCmd.PersistentFlags().StringVar(&outputFlag, "output", "json", "Output format: json or yaml")
|
||||||
|
rootCmd.PersistentFlags().BoolVar(&verbose, "verbose", false, "Enable verbose logging to stderr")
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadConfig() error {
|
func loadConfig() error {
|
||||||
@@ -54,6 +57,9 @@ func loadConfig() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
cfg = loaded
|
cfg = loaded
|
||||||
|
if v := strings.TrimSpace(os.Getenv("AMCS_SERVER")); v != "" {
|
||||||
|
cfg.Server = v
|
||||||
|
}
|
||||||
if v := strings.TrimSpace(os.Getenv("AMCS_URL")); v != "" {
|
if v := strings.TrimSpace(os.Getenv("AMCS_URL")); v != "" {
|
||||||
cfg.Server = v
|
cfg.Server = v
|
||||||
}
|
}
|
||||||
@@ -75,7 +81,7 @@ func loadConfig() error {
|
|||||||
|
|
||||||
func requireServer() error {
|
func requireServer() error {
|
||||||
if strings.TrimSpace(cfg.Server) == "" {
|
if strings.TrimSpace(cfg.Server) == "" {
|
||||||
return fmt.Errorf("server URL is required; set --server, AMCS_URL, or config server")
|
return fmt.Errorf("server URL is required; set --server, AMCS_SERVER, AMCS_URL, or config server")
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -109,6 +115,9 @@ func (t *bearerTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
|||||||
base = http.DefaultTransport
|
base = http.DefaultTransport
|
||||||
}
|
}
|
||||||
clone := req.Clone(req.Context())
|
clone := req.Clone(req.Context())
|
||||||
|
if strings.TrimSpace(clone.Header.Get("User-Agent")) == "" {
|
||||||
|
clone.Header.Set("User-Agent", cliUserAgent)
|
||||||
|
}
|
||||||
if strings.TrimSpace(t.token) != "" {
|
if strings.TrimSpace(t.token) != "" {
|
||||||
clone.Header.Set("Authorization", "Bearer "+t.token)
|
clone.Header.Set("Authorization", "Bearer "+t.token)
|
||||||
}
|
}
|
||||||
@@ -119,16 +128,24 @@ func connectRemote(ctx context.Context) (*mcp.ClientSession, error) {
|
|||||||
if err := requireServer(); err != nil {
|
if err := requireServer(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
verboseLogf("connecting to %s", endpointURL())
|
||||||
client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil)
|
client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil)
|
||||||
transport := &mcp.StreamableClientTransport{
|
transport := &mcp.StreamableClientTransport{
|
||||||
Endpoint: endpointURL(),
|
Endpoint: endpointURL(),
|
||||||
HTTPClient: newHTTPClient(),
|
HTTPClient: newHTTPClient(),
|
||||||
|
DisableStandaloneSSE: true,
|
||||||
}
|
}
|
||||||
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
session, err := client.Connect(ctx, transport, nil)
|
session, err := client.Connect(ctx, transport, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("connect to AMCS server: %w", err)
|
return nil, fmt.Errorf("connect to AMCS server: %w", err)
|
||||||
}
|
}
|
||||||
|
verboseLogf("connected to %s", endpointURL())
|
||||||
return session, nil
|
return session, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func verboseLogf(format string, args ...any) {
|
||||||
|
if !verbose {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_, _ = fmt.Fprintf(os.Stderr, "[amcs-cli] "+format+"\n", args...)
|
||||||
|
}
|
||||||
|
|||||||
35
cmd/amcs-cli/cmd/root_test.go
Normal file
35
cmd/amcs-cli/cmd/root_test.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBearerTransportFormatsBearerToken(t *testing.T) {
|
||||||
|
const want = "Bearer X"
|
||||||
|
const wantUA = "amcs-cli/0.0.1"
|
||||||
|
|
||||||
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if got := r.Header.Get("Authorization"); got != want {
|
||||||
|
t.Fatalf("Authorization header = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
if got := r.Header.Get("User-Agent"); got != wantUA {
|
||||||
|
t.Fatalf("User-Agent header = %q, want %q", got, wantUA)
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
defer ts.Close()
|
||||||
|
|
||||||
|
client := &http.Client{Transport: &bearerTransport{token: "X"}}
|
||||||
|
req, err := http.NewRequest(http.MethodGet, ts.URL, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRequest() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("client.Do() error = %v", err)
|
||||||
|
}
|
||||||
|
_ = res.Body.Close()
|
||||||
|
}
|
||||||
89
cmd/amcs-cli/cmd/sse.go
Normal file
89
cmd/amcs-cli/cmd/sse.go
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var sseCmd = &cobra.Command{
|
||||||
|
Use: "sse",
|
||||||
|
Short: "Run a stdio MCP bridge backed by a remote AMCS server using SSE transport (widely supported by hosted MCP clients)",
|
||||||
|
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||||
|
ctx := cmd.Context()
|
||||||
|
|
||||||
|
if err := requireServer(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil)
|
||||||
|
transport := &mcp.SSEClientTransport{
|
||||||
|
Endpoint: sseEndpointURL(),
|
||||||
|
HTTPClient: newHTTPClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
verboseLogf("connecting to SSE endpoint %s", sseEndpointURL())
|
||||||
|
remote, err := client.Connect(ctx, transport, nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("connect to AMCS SSE endpoint: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = remote.Close() }()
|
||||||
|
verboseLogf("connected to SSE endpoint %s", sseEndpointURL())
|
||||||
|
|
||||||
|
tools, err := remote.ListTools(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("load remote tools: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
server := mcp.NewServer(&mcp.Implementation{
|
||||||
|
Name: "amcs-cli",
|
||||||
|
Title: "AMCS CLI Bridge (SSE)",
|
||||||
|
Version: "0.0.1",
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
for _, tool := range tools.Tools {
|
||||||
|
remoteTool := tool
|
||||||
|
server.AddTool(&mcp.Tool{
|
||||||
|
Name: remoteTool.Name,
|
||||||
|
Description: remoteTool.Description,
|
||||||
|
InputSchema: remoteTool.InputSchema,
|
||||||
|
OutputSchema: remoteTool.OutputSchema,
|
||||||
|
Annotations: remoteTool.Annotations,
|
||||||
|
}, func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) {
|
||||||
|
return remote.CallTool(ctx, &mcp.CallToolParams{
|
||||||
|
Name: req.Params.Name,
|
||||||
|
Arguments: req.Params.Arguments,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
session, err := server.Connect(ctx, &mcp.StdioTransport{}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("start stdio bridge: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = session.Close() }()
|
||||||
|
verboseLogf("sse stdio bridge ready")
|
||||||
|
verboseLogf("waiting for MCP commands on stdin")
|
||||||
|
|
||||||
|
<-ctx.Done()
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func sseEndpointURL() string {
|
||||||
|
base := strings.TrimRight(strings.TrimSpace(cfg.Server), "/")
|
||||||
|
if strings.HasSuffix(base, "/mcp") {
|
||||||
|
base = strings.TrimSuffix(base, "/mcp")
|
||||||
|
}
|
||||||
|
if strings.HasSuffix(base, "/sse") {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
return base + "/sse"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(sseCmd)
|
||||||
|
}
|
||||||
@@ -51,6 +51,8 @@ var stdioCmd = &cobra.Command{
|
|||||||
return fmt.Errorf("start stdio bridge: %w", err)
|
return fmt.Errorf("start stdio bridge: %w", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = session.Close() }()
|
defer func() { _ = session.Close() }()
|
||||||
|
verboseLogf("stdio bridge connected to remote AMCS and ready")
|
||||||
|
verboseLogf("waiting for MCP commands on stdin")
|
||||||
|
|
||||||
<-ctx.Done()
|
<-ctx.Done()
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
105
cmd/amcs-migrate-config/main.go
Normal file
105
cmd/amcs-migrate-config/main.go
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
var (
|
||||||
|
configPath string
|
||||||
|
dryRun bool
|
||||||
|
toVersion int
|
||||||
|
)
|
||||||
|
flag.StringVar(&configPath, "config", "", "Path to the YAML config file (default: $AMCS_CONFIG or ./configs/dev.yaml)")
|
||||||
|
flag.BoolVar(&dryRun, "dry-run", false, "Print the migrated config to stdout instead of writing it back")
|
||||||
|
flag.IntVar(&toVersion, "to-version", config.CurrentConfigVersion, "Stop migrating after reaching this version")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if toVersion <= 0 || toVersion > config.CurrentConfigVersion {
|
||||||
|
log.Fatalf("invalid -to-version %d (must be between 1 and %d)", toVersion, config.CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
path := config.ResolvePath(configPath)
|
||||||
|
original, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("read config %q: %v", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
raw := map[string]any{}
|
||||||
|
if err := yaml.Unmarshal(original, &raw); err != nil {
|
||||||
|
log.Fatalf("decode config %q: %v", path, err)
|
||||||
|
}
|
||||||
|
if raw == nil {
|
||||||
|
raw = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
applied, err := migrateUpTo(raw, toVersion)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("migrate: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(applied) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s already at version %d; nothing to do\n", path, currentVersion(raw))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
out, err := yaml.Marshal(raw)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("marshal migrated config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, step := range applied {
|
||||||
|
fmt.Fprintf(os.Stderr, "applied migration v%d -> v%d: %s\n", step.From, step.To, step.Describe)
|
||||||
|
}
|
||||||
|
|
||||||
|
if dryRun {
|
||||||
|
_, _ = os.Stdout.Write(out)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
backup := fmt.Sprintf("%s.bak.%d", path, time.Now().Unix())
|
||||||
|
if err := os.WriteFile(backup, original, 0o600); err != nil {
|
||||||
|
log.Fatalf("write backup %q: %v", backup, err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(path, out, 0o600); err != nil {
|
||||||
|
log.Fatalf("write migrated config %q: %v", path, err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "wrote migrated config to %s (backup: %s)\n", path, backup)
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateUpTo runs the migration ladder but stops at the requested version.
|
||||||
|
func migrateUpTo(raw map[string]any, target int) ([]config.ConfigMigration, error) {
|
||||||
|
if currentVersion(raw) >= target {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if target == config.CurrentConfigVersion {
|
||||||
|
return config.Migrate(raw)
|
||||||
|
}
|
||||||
|
// Partial migrations are rare; for now reject anything other than the
|
||||||
|
// current version target since the migration ladder is short.
|
||||||
|
return nil, fmt.Errorf("partial migration to v%d is not supported (use -to-version=%d)", target, config.CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
func currentVersion(raw map[string]any) int {
|
||||||
|
v, ok := raw["version"]
|
||||||
|
if !ok {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
switch n := v.(type) {
|
||||||
|
case int:
|
||||||
|
return n
|
||||||
|
case int64:
|
||||||
|
return int(n)
|
||||||
|
case float64:
|
||||||
|
return int(n)
|
||||||
|
}
|
||||||
|
return 1
|
||||||
|
}
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
version: 2
|
||||||
|
|
||||||
server:
|
server:
|
||||||
host: "0.0.0.0"
|
host: "0.0.0.0"
|
||||||
port: 8080
|
port: 8080
|
||||||
@@ -9,6 +11,7 @@ server:
|
|||||||
|
|
||||||
mcp:
|
mcp:
|
||||||
path: "/mcp"
|
path: "/mcp"
|
||||||
|
sse_path: "/sse"
|
||||||
server_name: "amcs"
|
server_name: "amcs"
|
||||||
transport: "streamable_http"
|
transport: "streamable_http"
|
||||||
session_timeout: "10m"
|
session_timeout: "10m"
|
||||||
@@ -26,7 +29,7 @@ auth:
|
|||||||
- id: "oauth-client"
|
- id: "oauth-client"
|
||||||
client_id: ""
|
client_id: ""
|
||||||
client_secret: ""
|
client_secret: ""
|
||||||
description: "used when auth.mode=oauth_client_credentials"
|
description: "optional OAuth client credentials"
|
||||||
|
|
||||||
database:
|
database:
|
||||||
url: "postgres://postgres:postgres@localhost:5432/amcs?sslmode=disable"
|
url: "postgres://postgres:postgres@localhost:5432/amcs?sslmode=disable"
|
||||||
@@ -36,33 +39,58 @@ database:
|
|||||||
max_conn_idle_time: "10m"
|
max_conn_idle_time: "10m"
|
||||||
|
|
||||||
ai:
|
ai:
|
||||||
provider: "litellm"
|
providers:
|
||||||
|
default:
|
||||||
|
type: "litellm"
|
||||||
|
base_url: "http://localhost:4000/v1"
|
||||||
|
api_key: "replace-me"
|
||||||
|
request_headers: {}
|
||||||
|
|
||||||
|
ollama_local:
|
||||||
|
type: "ollama"
|
||||||
|
base_url: "http://localhost:11434/v1"
|
||||||
|
api_key: "ollama"
|
||||||
|
request_headers: {}
|
||||||
|
|
||||||
|
openrouter:
|
||||||
|
type: "openrouter"
|
||||||
|
base_url: "https://openrouter.ai/api/v1"
|
||||||
|
api_key: "replace-me"
|
||||||
|
app_name: "amcs"
|
||||||
|
site_url: ""
|
||||||
|
request_headers: {}
|
||||||
|
|
||||||
embeddings:
|
embeddings:
|
||||||
model: "openai/text-embedding-3-small"
|
|
||||||
dimensions: 1536
|
dimensions: 1536
|
||||||
|
primary:
|
||||||
|
provider: "default"
|
||||||
|
model: "openai/text-embedding-3-small"
|
||||||
|
fallbacks:
|
||||||
|
- provider: "ollama_local"
|
||||||
|
model: "nomic-embed-text"
|
||||||
|
|
||||||
metadata:
|
metadata:
|
||||||
model: "gpt-4o-mini"
|
|
||||||
fallback_models: []
|
|
||||||
temperature: 0.1
|
temperature: 0.1
|
||||||
log_conversations: false
|
log_conversations: false
|
||||||
litellm:
|
timeout: "10s"
|
||||||
base_url: "http://localhost:4000/v1"
|
primary:
|
||||||
api_key: "replace-me"
|
provider: "default"
|
||||||
use_responses_api: false
|
model: "gpt-4o-mini"
|
||||||
request_headers: {}
|
fallbacks:
|
||||||
embedding_model: "openrouter/openai/text-embedding-3-small"
|
- provider: "openrouter"
|
||||||
metadata_model: "gpt-4o-mini"
|
model: "openai/gpt-4.1-mini"
|
||||||
fallback_metadata_models: []
|
|
||||||
ollama:
|
# Optional overrides for background jobs (backfill_embeddings,
|
||||||
base_url: "http://localhost:11434/v1"
|
# retry_failed_metadata, reparse_thought_metadata).
|
||||||
api_key: "ollama"
|
background:
|
||||||
request_headers: {}
|
embeddings:
|
||||||
openrouter:
|
primary:
|
||||||
base_url: "https://openrouter.ai/api/v1"
|
provider: "default"
|
||||||
api_key: ""
|
model: "openai/text-embedding-3-small"
|
||||||
app_name: "amcs"
|
metadata:
|
||||||
site_url: ""
|
primary:
|
||||||
extra_headers: {}
|
provider: "default"
|
||||||
|
model: "gpt-4o-mini"
|
||||||
|
|
||||||
capture:
|
capture:
|
||||||
source: "mcp"
|
source: "mcp"
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ server:
|
|||||||
|
|
||||||
mcp:
|
mcp:
|
||||||
path: "/mcp"
|
path: "/mcp"
|
||||||
|
sse_path: "/sse"
|
||||||
server_name: "amcs"
|
server_name: "amcs"
|
||||||
transport: "streamable_http"
|
transport: "streamable_http"
|
||||||
session_timeout: "10m"
|
session_timeout: "10m"
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ server:
|
|||||||
|
|
||||||
mcp:
|
mcp:
|
||||||
path: "/mcp"
|
path: "/mcp"
|
||||||
|
sse_path: "/sse"
|
||||||
server_name: "amcs"
|
server_name: "amcs"
|
||||||
transport: "streamable_http"
|
transport: "streamable_http"
|
||||||
session_timeout: "10m"
|
session_timeout: "10m"
|
||||||
|
|||||||
@@ -36,6 +36,18 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
- "8080:8080"
|
||||||
|
|
||||||
|
migrate-config:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
profiles: ["tools"]
|
||||||
|
restart: "no"
|
||||||
|
volumes:
|
||||||
|
- ./configs:/app/configs
|
||||||
|
environment:
|
||||||
|
AMCS_CONFIG: /app/configs/docker.yaml
|
||||||
|
entrypoint: ["/app/amcs-migrate-config"]
|
||||||
|
command: ["--config", "/app/configs/docker.yaml", "--dry-run"]
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
@@ -36,36 +35,39 @@ Rules:
|
|||||||
- If unsure, prefer "observation".
|
- If unsure, prefer "observation".
|
||||||
- Do not include any text outside the JSON object.`
|
- Do not include any text outside the JSON object.`
|
||||||
|
|
||||||
|
// Client is a low-level OpenAI-compatible HTTP client. It knows nothing about
|
||||||
|
// role chains, fallbacks, or health — those concerns belong to ai.Runner. Each
|
||||||
|
// method takes the model name per-call so a single Client instance can service
|
||||||
|
// many different models on the same base URL.
|
||||||
type Client struct {
|
type Client struct {
|
||||||
name string
|
name string
|
||||||
baseURL string
|
baseURL string
|
||||||
apiKey string
|
apiKey string
|
||||||
embeddingModel string
|
headers map[string]string
|
||||||
metadataModel string
|
httpClient *http.Client
|
||||||
fallbackMetadataModels []string
|
log *slog.Logger
|
||||||
temperature float64
|
|
||||||
headers map[string]string
|
|
||||||
httpClient *http.Client
|
|
||||||
log *slog.Logger
|
|
||||||
dimensions int
|
|
||||||
logConversations bool
|
|
||||||
modelHealthMu sync.Mutex
|
|
||||||
modelHealth map[string]modelHealthState
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Name string
|
Name string
|
||||||
BaseURL string
|
BaseURL string
|
||||||
APIKey string
|
APIKey string
|
||||||
EmbeddingModel string
|
Headers map[string]string
|
||||||
MetadataModel string
|
HTTPClient *http.Client
|
||||||
FallbackMetadataModels []string
|
Log *slog.Logger
|
||||||
Temperature float64
|
}
|
||||||
Headers map[string]string
|
|
||||||
HTTPClient *http.Client
|
// MetadataOptions control a single ExtractMetadataWith call.
|
||||||
Log *slog.Logger
|
type MetadataOptions struct {
|
||||||
Dimensions int
|
Model string
|
||||||
LogConversations bool
|
Temperature float64
|
||||||
|
LogConversations bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// SummarizeOptions control a single SummarizeWith call.
|
||||||
|
type SummarizeOptions struct {
|
||||||
|
Model string
|
||||||
|
Temperature float64
|
||||||
}
|
}
|
||||||
|
|
||||||
type embeddingsRequest struct {
|
type embeddingsRequest struct {
|
||||||
@@ -127,65 +129,38 @@ type providerError struct {
|
|||||||
|
|
||||||
const maxMetadataAttempts = 3
|
const maxMetadataAttempts = 3
|
||||||
|
|
||||||
const (
|
// ErrEmptyResponse and ErrNoJSONObject are sentinel errors callers can inspect
|
||||||
emptyResponseCircuitThreshold = 3
|
// to classify metadata failures (e.g. bump empty-response health counters).
|
||||||
emptyResponseCircuitTTL = 5 * time.Minute
|
|
||||||
permanentModelFailureTTL = 24 * time.Hour
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
errMetadataEmptyResponse = errors.New("metadata empty response")
|
ErrEmptyResponse = errors.New("metadata empty response")
|
||||||
errMetadataNoJSONObject = errors.New("metadata response contains no JSON object")
|
ErrNoJSONObject = errors.New("metadata response contains no JSON object")
|
||||||
)
|
)
|
||||||
|
|
||||||
type modelHealthState struct {
|
|
||||||
consecutiveEmpty int
|
|
||||||
unhealthyUntil time.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
func New(cfg Config) *Client {
|
func New(cfg Config) *Client {
|
||||||
fallbacks := make([]string, 0, len(cfg.FallbackMetadataModels))
|
|
||||||
seen := make(map[string]struct{}, len(cfg.FallbackMetadataModels))
|
|
||||||
for _, model := range cfg.FallbackMetadataModels {
|
|
||||||
model = strings.TrimSpace(model)
|
|
||||||
if model == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := seen[model]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[model] = struct{}{}
|
|
||||||
fallbacks = append(fallbacks, model)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Client{
|
return &Client{
|
||||||
name: cfg.Name,
|
name: cfg.Name,
|
||||||
baseURL: cfg.BaseURL,
|
baseURL: cfg.BaseURL,
|
||||||
apiKey: cfg.APIKey,
|
apiKey: cfg.APIKey,
|
||||||
embeddingModel: cfg.EmbeddingModel,
|
headers: cfg.Headers,
|
||||||
metadataModel: cfg.MetadataModel,
|
httpClient: cfg.HTTPClient,
|
||||||
fallbackMetadataModels: fallbacks,
|
log: cfg.Log,
|
||||||
temperature: cfg.Temperature,
|
|
||||||
headers: cfg.Headers,
|
|
||||||
httpClient: cfg.HTTPClient,
|
|
||||||
log: cfg.Log,
|
|
||||||
dimensions: cfg.Dimensions,
|
|
||||||
logConversations: cfg.LogConversations,
|
|
||||||
modelHealth: make(map[string]modelHealthState),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Embed(ctx context.Context, input string) ([]float32, error) {
|
func (c *Client) Name() string { return c.name }
|
||||||
|
|
||||||
|
// EmbedWith generates an embedding for the given input using model.
|
||||||
|
func (c *Client) EmbedWith(ctx context.Context, model, input string) ([]float32, error) {
|
||||||
input = strings.TrimSpace(input)
|
input = strings.TrimSpace(input)
|
||||||
if input == "" {
|
if input == "" {
|
||||||
return nil, fmt.Errorf("%s embed: input must not be empty", c.name)
|
return nil, fmt.Errorf("%s embed: input must not be empty", c.name)
|
||||||
}
|
}
|
||||||
|
if strings.TrimSpace(model) == "" {
|
||||||
|
return nil, fmt.Errorf("%s embed: model is required", c.name)
|
||||||
|
}
|
||||||
|
|
||||||
var resp embeddingsResponse
|
var resp embeddingsResponse
|
||||||
err := c.doJSON(ctx, "/embeddings", embeddingsRequest{
|
err := c.doJSON(ctx, "/embeddings", embeddingsRequest{Input: input, Model: model}, &resp)
|
||||||
Input: input,
|
|
||||||
Model: c.embeddingModel,
|
|
||||||
}, &resp)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -195,141 +170,34 @@ func (c *Client) Embed(ctx context.Context, input string) ([]float32, error) {
|
|||||||
if len(resp.Data) == 0 {
|
if len(resp.Data) == 0 {
|
||||||
return nil, fmt.Errorf("%s embed: no embedding returned", c.name)
|
return nil, fmt.Errorf("%s embed: no embedding returned", c.name)
|
||||||
}
|
}
|
||||||
if c.dimensions > 0 && len(resp.Data[0].Embedding) != c.dimensions {
|
|
||||||
return nil, fmt.Errorf("%s embed: expected %d dimensions, got %d", c.name, c.dimensions, len(resp.Data[0].Embedding))
|
|
||||||
}
|
|
||||||
|
|
||||||
return resp.Data[0].Embedding, nil
|
return resp.Data[0].Embedding, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error) {
|
// ExtractMetadataWith extracts structured metadata for input using opts.Model.
|
||||||
|
// Returns compat.ErrEmptyResponse / ErrNoJSONObject wrapped when the model
|
||||||
|
// produces unusable output so callers can classify the failure.
|
||||||
|
func (c *Client) ExtractMetadataWith(ctx context.Context, opts MetadataOptions, input string) (thoughttypes.ThoughtMetadata, error) {
|
||||||
input = strings.TrimSpace(input)
|
input = strings.TrimSpace(input)
|
||||||
if input == "" {
|
if input == "" {
|
||||||
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: input must not be empty", c.name)
|
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: input must not be empty", c.name)
|
||||||
}
|
}
|
||||||
|
if strings.TrimSpace(opts.Model) == "" {
|
||||||
start := time.Now()
|
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: model is required", c.name)
|
||||||
if c.log != nil {
|
|
||||||
c.log.Info("metadata client started",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", c.metadataModel),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logCompletion := func(model string, err error) {
|
|
||||||
if c.log == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
attrs := []any{
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", model),
|
|
||||||
slog.String("duration", formatLogDuration(time.Since(start))),
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
attrs = append(attrs, slog.String("error", err.Error()))
|
|
||||||
c.log.Error("metadata client completed", attrs...)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.log.Info("metadata client completed", attrs...)
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := c.extractMetadataWithModel(ctx, input, c.metadataModel)
|
|
||||||
if errors.Is(err, errMetadataEmptyResponse) {
|
|
||||||
c.noteEmptyResponse(c.metadataModel)
|
|
||||||
}
|
|
||||||
if isPermanentModelError(err) {
|
|
||||||
c.notePermanentModelFailure(c.metadataModel, err)
|
|
||||||
}
|
|
||||||
if err == nil {
|
|
||||||
c.noteModelSuccess(c.metadataModel)
|
|
||||||
logCompletion(c.metadataModel, nil)
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, fallbackModel := range c.fallbackMetadataModels {
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if fallbackModel == "" || fallbackModel == c.metadataModel {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if c.shouldBypassModel(fallbackModel) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata extraction failed, trying fallback model",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("primary_model", c.metadataModel),
|
|
||||||
slog.String("fallback_model", fallbackModel),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
fallbackResult, fallbackErr := c.extractMetadataWithModel(ctx, input, fallbackModel)
|
|
||||||
if errors.Is(fallbackErr, errMetadataEmptyResponse) {
|
|
||||||
c.noteEmptyResponse(fallbackModel)
|
|
||||||
}
|
|
||||||
if isPermanentModelError(fallbackErr) {
|
|
||||||
c.notePermanentModelFailure(fallbackModel, fallbackErr)
|
|
||||||
}
|
|
||||||
if fallbackErr == nil {
|
|
||||||
c.noteModelSuccess(fallbackModel)
|
|
||||||
logCompletion(fallbackModel, nil)
|
|
||||||
return fallbackResult, nil
|
|
||||||
}
|
|
||||||
err = fallbackErr
|
|
||||||
}
|
|
||||||
|
|
||||||
if ctx.Err() != nil {
|
|
||||||
err = fmt.Errorf("%s metadata: %w", c.name, ctx.Err())
|
|
||||||
logCompletion(c.metadataModel, err)
|
|
||||||
return thoughttypes.ThoughtMetadata{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
heuristic := heuristicMetadataFromInput(input)
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata extraction failed for all models, using heuristic fallback",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
logCompletion(c.metadataModel, nil)
|
|
||||||
return heuristic, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func formatLogDuration(d time.Duration) string {
|
|
||||||
if d < 0 {
|
|
||||||
d = -d
|
|
||||||
}
|
|
||||||
|
|
||||||
totalMilliseconds := d.Milliseconds()
|
|
||||||
minutes := totalMilliseconds / 60000
|
|
||||||
seconds := (totalMilliseconds / 1000) % 60
|
|
||||||
milliseconds := totalMilliseconds % 1000
|
|
||||||
return fmt.Sprintf("%02d:%02d:%03d", minutes, seconds, milliseconds)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) extractMetadataWithModel(ctx context.Context, input, model string) (thoughttypes.ThoughtMetadata, error) {
|
|
||||||
if c.shouldBypassModel(model) {
|
|
||||||
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: model %q temporarily bypassed after repeated empty responses", c.name, model)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
stream := true
|
stream := true
|
||||||
req := chatCompletionsRequest{
|
req := chatCompletionsRequest{
|
||||||
Model: model,
|
Model: opts.Model,
|
||||||
Temperature: c.temperature,
|
Temperature: opts.Temperature,
|
||||||
ResponseFormat: &responseType{
|
ResponseFormat: &responseType{Type: "json_object"},
|
||||||
Type: "json_object",
|
Stream: &stream,
|
||||||
},
|
|
||||||
Stream: &stream,
|
|
||||||
Messages: []chatMessage{
|
Messages: []chatMessage{
|
||||||
{Role: "system", Content: metadataSystemPrompt},
|
{Role: "system", Content: metadataSystemPrompt},
|
||||||
{Role: "user", Content: input},
|
{Role: "user", Content: input},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
metadata, err := c.extractMetadataWithRequest(ctx, req, input, model)
|
metadata, err := c.extractMetadataWithRequest(ctx, req, input, opts)
|
||||||
if err == nil || !shouldRetryWithoutJSONMode(err) {
|
if err == nil || !shouldRetryWithoutJSONMode(err) {
|
||||||
return metadata, err
|
return metadata, err
|
||||||
}
|
}
|
||||||
@@ -337,23 +205,22 @@ func (c *Client) extractMetadataWithModel(ctx context.Context, input, model stri
|
|||||||
if c.log != nil {
|
if c.log != nil {
|
||||||
c.log.Warn("metadata json mode failed, retrying without response_format",
|
c.log.Warn("metadata json mode failed, retrying without response_format",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.String("error", err.Error()),
|
slog.String("error", err.Error()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
req.ResponseFormat = nil
|
req.ResponseFormat = nil
|
||||||
return c.extractMetadataWithRequest(ctx, req, input, model)
|
return c.extractMetadataWithRequest(ctx, req, input, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatCompletionsRequest, input, model string) (thoughttypes.ThoughtMetadata, error) {
|
func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatCompletionsRequest, input string, opts MetadataOptions) (thoughttypes.ThoughtMetadata, error) {
|
||||||
|
|
||||||
var lastErr error
|
var lastErr error
|
||||||
for attempt := 1; attempt <= maxMetadataAttempts; attempt++ {
|
for attempt := 1; attempt <= maxMetadataAttempts; attempt++ {
|
||||||
if c.logConversations && c.log != nil {
|
if opts.LogConversations && c.log != nil {
|
||||||
c.log.Info("metadata conversation request",
|
c.log.Info("metadata conversation request",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.Int("attempt", attempt),
|
slog.Int("attempt", attempt),
|
||||||
slog.String("system", metadataSystemPrompt),
|
slog.String("system", metadataSystemPrompt),
|
||||||
slog.String("input", input),
|
slog.String("input", input),
|
||||||
@@ -373,10 +240,10 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
|
|
||||||
rawResponse := extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text)
|
rawResponse := extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text)
|
||||||
|
|
||||||
if c.logConversations && c.log != nil {
|
if opts.LogConversations && c.log != nil {
|
||||||
c.log.Info("metadata conversation response",
|
c.log.Info("metadata conversation response",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.Int("attempt", attempt),
|
slog.Int("attempt", attempt),
|
||||||
slog.String("response", rawResponse),
|
slog.String("response", rawResponse),
|
||||||
)
|
)
|
||||||
@@ -387,13 +254,13 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
metadataText = stripCodeFence(metadataText)
|
metadataText = stripCodeFence(metadataText)
|
||||||
metadataText = extractJSONObject(metadataText)
|
metadataText = extractJSONObject(metadataText)
|
||||||
if metadataText == "" {
|
if metadataText == "" {
|
||||||
lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataNoJSONObject)
|
lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrNoJSONObject)
|
||||||
if strings.TrimSpace(rawResponse) == "" && attempt < maxMetadataAttempts && ctx.Err() == nil {
|
if strings.TrimSpace(rawResponse) == "" && attempt < maxMetadataAttempts && ctx.Err() == nil {
|
||||||
lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataEmptyResponse)
|
lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrEmptyResponse)
|
||||||
if c.log != nil {
|
if c.log != nil {
|
||||||
c.log.Warn("metadata response empty, waiting and retrying",
|
c.log.Warn("metadata response empty, waiting and retrying",
|
||||||
slog.String("provider", c.name),
|
slog.String("provider", c.name),
|
||||||
slog.String("model", model),
|
slog.String("model", opts.Model),
|
||||||
slog.Int("attempt", attempt+1),
|
slog.Int("attempt", attempt+1),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -403,7 +270,7 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(rawResponse) == "" {
|
if strings.TrimSpace(rawResponse) == "" {
|
||||||
lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataEmptyResponse)
|
lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrEmptyResponse)
|
||||||
}
|
}
|
||||||
return thoughttypes.ThoughtMetadata{}, lastErr
|
return thoughttypes.ThoughtMetadata{}, lastErr
|
||||||
}
|
}
|
||||||
@@ -420,13 +287,17 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet
|
|||||||
if lastErr != nil {
|
if lastErr != nil {
|
||||||
return thoughttypes.ThoughtMetadata{}, lastErr
|
return thoughttypes.ThoughtMetadata{}, lastErr
|
||||||
}
|
}
|
||||||
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: %w", c.name, errMetadataNoJSONObject)
|
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: %w", c.name, ErrNoJSONObject)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error) {
|
// SummarizeWith runs a chat-completion summarisation using opts.Model.
|
||||||
|
func (c *Client) SummarizeWith(ctx context.Context, opts SummarizeOptions, systemPrompt, userPrompt string) (string, error) {
|
||||||
|
if strings.TrimSpace(opts.Model) == "" {
|
||||||
|
return "", fmt.Errorf("%s summarize: model is required", c.name)
|
||||||
|
}
|
||||||
req := chatCompletionsRequest{
|
req := chatCompletionsRequest{
|
||||||
Model: c.metadataModel,
|
Model: opts.Model,
|
||||||
Temperature: 0.2,
|
Temperature: opts.Temperature,
|
||||||
Messages: []chatMessage{
|
Messages: []chatMessage{
|
||||||
{Role: "system", Content: systemPrompt},
|
{Role: "system", Content: systemPrompt},
|
||||||
{Role: "user", Content: userPrompt},
|
{Role: "user", Content: userPrompt},
|
||||||
@@ -447,12 +318,49 @@ func (c *Client) Summarize(ctx context.Context, systemPrompt, userPrompt string)
|
|||||||
return extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text), nil
|
return extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Name() string {
|
// IsPermanentModelError reports whether err indicates the model itself is
|
||||||
return c.name
|
// invalid or missing (vs. a transient outage). Runners use this to mark a
|
||||||
|
// target unhealthy for longer.
|
||||||
|
func IsPermanentModelError(err error) bool {
|
||||||
|
if err == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
lower := strings.ToLower(err.Error())
|
||||||
|
for _, marker := range []string{
|
||||||
|
"invalid model name",
|
||||||
|
"model_not_found",
|
||||||
|
"model not found",
|
||||||
|
"unknown model",
|
||||||
|
"no such model",
|
||||||
|
"does not exist",
|
||||||
|
} {
|
||||||
|
if strings.Contains(lower, marker) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) EmbeddingModel() string {
|
// HeuristicMetadataFromInput produces best-effort metadata from the note text
|
||||||
return c.embeddingModel
|
// when every model in the chain has failed. Exported so ai.Runner can use it.
|
||||||
|
func HeuristicMetadataFromInput(input string) thoughttypes.ThoughtMetadata {
|
||||||
|
text := strings.TrimSpace(input)
|
||||||
|
lower := strings.ToLower(text)
|
||||||
|
|
||||||
|
metadata := thoughttypes.ThoughtMetadata{
|
||||||
|
People: heuristicPeople(text),
|
||||||
|
ActionItems: heuristicActionItems(text),
|
||||||
|
DatesMentioned: heuristicDates(text),
|
||||||
|
Topics: heuristicTopics(lower),
|
||||||
|
Type: heuristicType(lower),
|
||||||
|
}
|
||||||
|
if len(metadata.Topics) == 0 {
|
||||||
|
metadata.Topics = []string{"uncategorized"}
|
||||||
|
}
|
||||||
|
if metadata.Type == "" {
|
||||||
|
metadata.Type = "observation"
|
||||||
|
}
|
||||||
|
return metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) doJSON(ctx context.Context, path string, requestBody any, dest any) error {
|
func (c *Client) doJSON(ctx context.Context, path string, requestBody any, dest any) error {
|
||||||
@@ -724,8 +632,6 @@ func isRetryableChatResponseError(err error) bool {
|
|||||||
return strings.Contains(lower, "read response") || strings.Contains(lower, "read stream response")
|
return strings.Contains(lower, "read response") || strings.Contains(lower, "read stream response")
|
||||||
}
|
}
|
||||||
|
|
||||||
// extractJSONObject finds the first complete {...} block in s.
|
|
||||||
// It handles models that prepend prose to a JSON response despite json_object mode.
|
|
||||||
func extractJSONObject(s string) string {
|
func extractJSONObject(s string) string {
|
||||||
for start := 0; start < len(s); start++ {
|
for start := 0; start < len(s); start++ {
|
||||||
if s[start] != '{' {
|
if s[start] != '{' {
|
||||||
@@ -768,10 +674,6 @@ func extractJSONObject(s string) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// stripThinkingBlocks removes <think>...</think> and <thinking>...</thinking>
|
|
||||||
// blocks produced by reasoning models (DeepSeek R1, QwQ, etc.) so that the
|
|
||||||
// remaining text can be parsed as JSON without interference from thinking content
|
|
||||||
// that may itself contain braces.
|
|
||||||
func stripThinkingBlocks(s string) string {
|
func stripThinkingBlocks(s string) string {
|
||||||
for _, tag := range []string{"think", "thinking"} {
|
for _, tag := range []string{"think", "thinking"} {
|
||||||
open := "<" + tag + ">"
|
open := "<" + tag + ">"
|
||||||
@@ -857,7 +759,6 @@ func extractTextFromAny(value any) string {
|
|||||||
}
|
}
|
||||||
return strings.Join(parts, "\n")
|
return strings.Join(parts, "\n")
|
||||||
case map[string]any:
|
case map[string]any:
|
||||||
// Common provider shapes for chat content parts.
|
|
||||||
for _, key := range []string{"text", "output_text", "content", "value"} {
|
for _, key := range []string{"text", "output_text", "content", "value"} {
|
||||||
if nested, ok := typed[key]; ok {
|
if nested, ok := typed[key]; ok {
|
||||||
if text := strings.TrimSpace(extractTextFromAny(nested)); text != "" {
|
if text := strings.TrimSpace(extractTextFromAny(nested)); text != "" {
|
||||||
@@ -875,28 +776,6 @@ var (
|
|||||||
wordPattern = regexp.MustCompile(`[a-zA-Z][a-zA-Z0-9_/-]{2,}`)
|
wordPattern = regexp.MustCompile(`[a-zA-Z][a-zA-Z0-9_/-]{2,}`)
|
||||||
)
|
)
|
||||||
|
|
||||||
func heuristicMetadataFromInput(input string) thoughttypes.ThoughtMetadata {
|
|
||||||
text := strings.TrimSpace(input)
|
|
||||||
lower := strings.ToLower(text)
|
|
||||||
|
|
||||||
metadata := thoughttypes.ThoughtMetadata{
|
|
||||||
People: heuristicPeople(text),
|
|
||||||
ActionItems: heuristicActionItems(text),
|
|
||||||
DatesMentioned: heuristicDates(text),
|
|
||||||
Topics: heuristicTopics(lower),
|
|
||||||
Type: heuristicType(lower),
|
|
||||||
Source: "",
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(metadata.Topics) == 0 {
|
|
||||||
metadata.Topics = []string{"uncategorized"}
|
|
||||||
}
|
|
||||||
if metadata.Type == "" {
|
|
||||||
metadata.Type = "observation"
|
|
||||||
}
|
|
||||||
return metadata
|
|
||||||
}
|
|
||||||
|
|
||||||
func heuristicType(lower string) string {
|
func heuristicType(lower string) string {
|
||||||
switch {
|
switch {
|
||||||
case strings.Contains(lower, "preferred name"), strings.Contains(lower, "personal profile"), strings.Contains(lower, "wife:"), strings.Contains(lower, "daughter:"), strings.Contains(lower, "born:"):
|
case strings.Contains(lower, "preferred name"), strings.Contains(lower, "personal profile"), strings.Contains(lower, "wife:"), strings.Contains(lower, "daughter:"), strings.Contains(lower, "born:"):
|
||||||
@@ -1055,7 +934,7 @@ func shouldRetryWithoutJSONMode(err error) bool {
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if errors.Is(err, errMetadataEmptyResponse) || errors.Is(err, errMetadataNoJSONObject) {
|
if errors.Is(err, ErrEmptyResponse) || errors.Is(err, ErrNoJSONObject) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1063,27 +942,6 @@ func shouldRetryWithoutJSONMode(err error) bool {
|
|||||||
return strings.Contains(lower, "parse json")
|
return strings.Contains(lower, "parse json")
|
||||||
}
|
}
|
||||||
|
|
||||||
func isPermanentModelError(err error) bool {
|
|
||||||
if err == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
lower := strings.ToLower(err.Error())
|
|
||||||
for _, marker := range []string{
|
|
||||||
"invalid model name",
|
|
||||||
"model_not_found",
|
|
||||||
"model not found",
|
|
||||||
"unknown model",
|
|
||||||
"no such model",
|
|
||||||
"does not exist",
|
|
||||||
} {
|
|
||||||
if strings.Contains(lower, marker) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func sleepRetry(ctx context.Context, attempt int, log *slog.Logger, provider string) error {
|
func sleepRetry(ctx context.Context, attempt int, log *slog.Logger, provider string) error {
|
||||||
delay := time.Duration(attempt*attempt) * 200 * time.Millisecond
|
delay := time.Duration(attempt*attempt) * 200 * time.Millisecond
|
||||||
if log != nil {
|
if log != nil {
|
||||||
@@ -1110,59 +968,3 @@ func sleepMetadataRetry(ctx context.Context, attempt int) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) shouldBypassModel(model string) bool {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
state, ok := c.modelHealth[model]
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return !state.unhealthyUntil.IsZero() && time.Now().Before(state.unhealthyUntil)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) noteEmptyResponse(model string) {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
state := c.modelHealth[model]
|
|
||||||
state.consecutiveEmpty++
|
|
||||||
if state.consecutiveEmpty >= emptyResponseCircuitThreshold {
|
|
||||||
state.unhealthyUntil = time.Now().Add(emptyResponseCircuitTTL)
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata model marked temporarily unhealthy after repeated empty responses",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", model),
|
|
||||||
slog.Time("until", state.unhealthyUntil),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
c.modelHealth[model] = state
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) noteModelSuccess(model string) {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
delete(c.modelHealth, model)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) notePermanentModelFailure(model string, err error) {
|
|
||||||
c.modelHealthMu.Lock()
|
|
||||||
defer c.modelHealthMu.Unlock()
|
|
||||||
|
|
||||||
state := c.modelHealth[model]
|
|
||||||
state.consecutiveEmpty = emptyResponseCircuitThreshold
|
|
||||||
state.unhealthyUntil = time.Now().Add(permanentModelFailureTTL)
|
|
||||||
c.modelHealth[model] = state
|
|
||||||
|
|
||||||
if c.log != nil {
|
|
||||||
c.log.Warn("metadata model marked unhealthy after permanent failure",
|
|
||||||
slog.String("provider", c.name),
|
|
||||||
slog.String("model", model),
|
|
||||||
slog.String("error", err.Error()),
|
|
||||||
slog.Time("until", state.unhealthyUntil),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -11,6 +11,17 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func newTestClient(t *testing.T, url string) *Client {
|
||||||
|
t.Helper()
|
||||||
|
return New(Config{
|
||||||
|
Name: "litellm",
|
||||||
|
BaseURL: url,
|
||||||
|
APIKey: "test-key",
|
||||||
|
HTTPClient: http.DefaultClient,
|
||||||
|
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
@@ -26,6 +37,9 @@ func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
|||||||
if req.Stream == nil || !*req.Stream {
|
if req.Stream == nil || !*req.Stream {
|
||||||
t.Fatalf("stream flag = %v, want true", req.Stream)
|
t.Fatalf("stream flag = %v, want true", req.Stream)
|
||||||
}
|
}
|
||||||
|
if req.Model != "qwen3.5:latest" {
|
||||||
|
t.Fatalf("model = %q, want qwen3.5:latest", req.Model)
|
||||||
|
}
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "text/event-stream")
|
w.Header().Set("Content-Type", "text/event-stream")
|
||||||
_, _ = io.WriteString(w, "data: {\"choices\":[{\"delta\":{\"content\":\"{\\\"people\\\":[],\"}}]}\n\n")
|
_, _ = io.WriteString(w, "data: {\"choices\":[{\"delta\":{\"content\":\"{\\\"people\\\":[],\"}}]}\n\n")
|
||||||
@@ -35,20 +49,13 @@ func TestExtractMetadataFromStreamingResponse(t *testing.T) {
|
|||||||
}))
|
}))
|
||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
client := New(Config{
|
client := newTestClient(t, server.URL)
|
||||||
Name: "litellm",
|
metadata, err := client.ExtractMetadataWith(context.Background(), MetadataOptions{
|
||||||
BaseURL: server.URL,
|
Model: "qwen3.5:latest",
|
||||||
APIKey: "test-key",
|
Temperature: 0.1,
|
||||||
MetadataModel: "qwen3.5:latest",
|
}, "Project idea: Build an Android companion app.")
|
||||||
Temperature: 0.1,
|
|
||||||
HTTPClient: server.Client(),
|
|
||||||
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
|
||||||
EmbeddingModel: "unused",
|
|
||||||
})
|
|
||||||
|
|
||||||
metadata, err := client.ExtractMetadata(context.Background(), "Project idea: Build an Android companion app.")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("ExtractMetadata() error = %v", err)
|
t.Fatalf("ExtractMetadataWith() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if metadata.Type != "idea" {
|
if metadata.Type != "idea" {
|
||||||
@@ -94,20 +101,13 @@ func TestExtractMetadataRetriesWithoutJSONMode(t *testing.T) {
|
|||||||
}))
|
}))
|
||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
client := New(Config{
|
client := newTestClient(t, server.URL)
|
||||||
Name: "litellm",
|
metadata, err := client.ExtractMetadataWith(context.Background(), MetadataOptions{
|
||||||
BaseURL: server.URL,
|
Model: "qwen3.5:latest",
|
||||||
APIKey: "test-key",
|
Temperature: 0.1,
|
||||||
MetadataModel: "qwen3.5:latest",
|
}, "Project idea: Build an Android companion app.")
|
||||||
Temperature: 0.1,
|
|
||||||
HTTPClient: server.Client(),
|
|
||||||
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
|
||||||
EmbeddingModel: "unused",
|
|
||||||
})
|
|
||||||
|
|
||||||
metadata, err := client.ExtractMetadata(context.Background(), "Project idea: Build an Android companion app.")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("ExtractMetadata() error = %v", err)
|
t.Fatalf("ExtractMetadataWith() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if metadata.Type != "idea" {
|
if metadata.Type != "idea" {
|
||||||
@@ -127,71 +127,33 @@ func TestExtractMetadataRetriesWithoutJSONMode(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExtractMetadataBypassesInvalidFallbackModelAfterFirstFailure(t *testing.T) {
|
func TestIsPermanentModelError(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
var mu sync.Mutex
|
cases := []struct {
|
||||||
primaryCalls := 0
|
name string
|
||||||
invalidFallbackCalls := 0
|
err error
|
||||||
|
want bool
|
||||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
}{
|
||||||
defer func() {
|
{"nil", nil, false},
|
||||||
_ = r.Body.Close()
|
{"invalid model", errMsg("Invalid model name passed in model=qwen3"), true},
|
||||||
}()
|
{"model not found", errMsg("model_not_found"), true},
|
||||||
|
{"no such model", errMsg("no such model"), true},
|
||||||
var req chatCompletionsRequest
|
{"transient", errMsg("connection refused"), false},
|
||||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
||||||
t.Fatalf("decode request: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch req.Model {
|
|
||||||
case "empty-primary":
|
|
||||||
_, _ = io.WriteString(w, `{"choices":[{"message":{"role":"assistant","content":""}}]}`)
|
|
||||||
case "qwen3.5:latest":
|
|
||||||
mu.Lock()
|
|
||||||
primaryCalls++
|
|
||||||
mu.Unlock()
|
|
||||||
_, _ = io.WriteString(w, `{"choices":[{"message":{"role":"assistant","content":"{\"people\":[],\"action_items\":[],\"dates_mentioned\":[],\"topics\":[\"metadata\"],\"type\":\"observation\",\"source\":\"primary\"}"}}]}`)
|
|
||||||
case "qwen3":
|
|
||||||
mu.Lock()
|
|
||||||
invalidFallbackCalls++
|
|
||||||
mu.Unlock()
|
|
||||||
w.WriteHeader(http.StatusBadRequest)
|
|
||||||
_, _ = io.WriteString(w, "{\"error\":{\"message\":\"{'error': '/chat/completions: Invalid model name passed in model=qwen3. Call `/v1/models` to view available models for your key.'}\"}}")
|
|
||||||
default:
|
|
||||||
t.Fatalf("unexpected model %q", req.Model)
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
defer server.Close()
|
|
||||||
|
|
||||||
client := New(Config{
|
|
||||||
Name: "litellm",
|
|
||||||
BaseURL: server.URL,
|
|
||||||
APIKey: "test-key",
|
|
||||||
MetadataModel: "empty-primary",
|
|
||||||
FallbackMetadataModels: []string{"qwen3", "qwen3.5:latest"},
|
|
||||||
Temperature: 0.1,
|
|
||||||
HTTPClient: server.Client(),
|
|
||||||
Log: slog.New(slog.NewTextHandler(io.Discard, nil)),
|
|
||||||
EmbeddingModel: "unused",
|
|
||||||
})
|
|
||||||
|
|
||||||
for i := 0; i < 2; i++ {
|
|
||||||
metadata, err := client.ExtractMetadata(context.Background(), "A short note about metadata.")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("ExtractMetadata() error = %v", err)
|
|
||||||
}
|
|
||||||
if metadata.Source != "primary" {
|
|
||||||
t.Fatalf("metadata source = %q, want primary", metadata.Source)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mu.Lock()
|
for _, tc := range cases {
|
||||||
defer mu.Unlock()
|
tc := tc
|
||||||
if invalidFallbackCalls != 1 {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
t.Fatalf("invalid fallback calls = %d, want 1", invalidFallbackCalls)
|
if got := IsPermanentModelError(tc.err); got != tc.want {
|
||||||
}
|
t.Fatalf("IsPermanentModelError(%v) = %v, want %v", tc.err, got, tc.want)
|
||||||
if primaryCalls != 2 {
|
}
|
||||||
t.Fatalf("valid fallback calls = %d, want 2", primaryCalls)
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type stringError string
|
||||||
|
|
||||||
|
func (s stringError) Error() string { return string(s) }
|
||||||
|
|
||||||
|
func errMsg(s string) error { return stringError(s) }
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
package ai
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/litellm"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/ollama"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/openrouter"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewProvider(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (Provider, error) {
|
|
||||||
switch cfg.Provider {
|
|
||||||
case "litellm":
|
|
||||||
return litellm.New(cfg, httpClient, log)
|
|
||||||
case "ollama":
|
|
||||||
return ollama.New(cfg, httpClient, log)
|
|
||||||
case "openrouter":
|
|
||||||
return openrouter.New(cfg, httpClient, log)
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("unsupported ai.provider: %s", cfg.Provider)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
package ai
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestNewProviderSupportsOllama(t *testing.T) {
|
|
||||||
provider, err := NewProvider(config.AIConfig{
|
|
||||||
Provider: "ollama",
|
|
||||||
Embeddings: config.AIEmbeddingConfig{
|
|
||||||
Model: "nomic-embed-text",
|
|
||||||
Dimensions: 768,
|
|
||||||
},
|
|
||||||
Metadata: config.AIMetadataConfig{
|
|
||||||
Model: "llama3.2",
|
|
||||||
},
|
|
||||||
Ollama: config.OllamaConfig{
|
|
||||||
BaseURL: "http://localhost:11434/v1",
|
|
||||||
APIKey: "ollama",
|
|
||||||
},
|
|
||||||
}, &http.Client{}, slog.New(slog.NewTextHandler(io.Discard, nil)))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("NewProvider() error = %v", err)
|
|
||||||
}
|
|
||||||
if provider.Name() != "ollama" {
|
|
||||||
t.Fatalf("provider name = %q, want ollama", provider.Name())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
package litellm
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) {
|
|
||||||
fallbacks := cfg.LiteLLM.EffectiveFallbackMetadataModels()
|
|
||||||
if len(fallbacks) == 0 {
|
|
||||||
fallbacks = cfg.Metadata.EffectiveFallbackModels()
|
|
||||||
}
|
|
||||||
return compat.New(compat.Config{
|
|
||||||
Name: "litellm",
|
|
||||||
BaseURL: cfg.LiteLLM.BaseURL,
|
|
||||||
APIKey: cfg.LiteLLM.APIKey,
|
|
||||||
EmbeddingModel: cfg.LiteLLM.EmbeddingModel,
|
|
||||||
MetadataModel: cfg.LiteLLM.MetadataModel,
|
|
||||||
FallbackMetadataModels: fallbacks,
|
|
||||||
Temperature: cfg.Metadata.Temperature,
|
|
||||||
Headers: cfg.LiteLLM.RequestHeaders,
|
|
||||||
HTTPClient: httpClient,
|
|
||||||
Log: log,
|
|
||||||
Dimensions: cfg.Embeddings.Dimensions,
|
|
||||||
LogConversations: cfg.Metadata.LogConversations,
|
|
||||||
}), nil
|
|
||||||
}
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
package ollama
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) {
|
|
||||||
return compat.New(compat.Config{
|
|
||||||
Name: "ollama",
|
|
||||||
BaseURL: cfg.Ollama.BaseURL,
|
|
||||||
APIKey: cfg.Ollama.APIKey,
|
|
||||||
EmbeddingModel: cfg.Embeddings.Model,
|
|
||||||
MetadataModel: cfg.Metadata.Model,
|
|
||||||
FallbackMetadataModels: cfg.Metadata.EffectiveFallbackModels(),
|
|
||||||
Temperature: cfg.Metadata.Temperature,
|
|
||||||
Headers: cfg.Ollama.RequestHeaders,
|
|
||||||
HTTPClient: httpClient,
|
|
||||||
Log: log,
|
|
||||||
Dimensions: cfg.Embeddings.Dimensions,
|
|
||||||
LogConversations: cfg.Metadata.LogConversations,
|
|
||||||
}), nil
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
package openrouter
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log/slog"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) {
|
|
||||||
headers := make(map[string]string, len(cfg.OpenRouter.ExtraHeaders)+2)
|
|
||||||
for key, value := range cfg.OpenRouter.ExtraHeaders {
|
|
||||||
headers[key] = value
|
|
||||||
}
|
|
||||||
if cfg.OpenRouter.SiteURL != "" {
|
|
||||||
headers["HTTP-Referer"] = cfg.OpenRouter.SiteURL
|
|
||||||
}
|
|
||||||
if cfg.OpenRouter.AppName != "" {
|
|
||||||
headers["X-Title"] = cfg.OpenRouter.AppName
|
|
||||||
}
|
|
||||||
|
|
||||||
return compat.New(compat.Config{
|
|
||||||
Name: "openrouter",
|
|
||||||
BaseURL: cfg.OpenRouter.BaseURL,
|
|
||||||
APIKey: cfg.OpenRouter.APIKey,
|
|
||||||
EmbeddingModel: cfg.Embeddings.Model,
|
|
||||||
MetadataModel: cfg.Metadata.Model,
|
|
||||||
FallbackMetadataModels: cfg.Metadata.EffectiveFallbackModels(),
|
|
||||||
Temperature: cfg.Metadata.Temperature,
|
|
||||||
Headers: headers,
|
|
||||||
HTTPClient: httpClient,
|
|
||||||
Log: log,
|
|
||||||
Dimensions: cfg.Embeddings.Dimensions,
|
|
||||||
LogConversations: cfg.Metadata.LogConversations,
|
|
||||||
}), nil
|
|
||||||
}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
package ai
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Provider interface {
|
|
||||||
Embed(ctx context.Context, input string) ([]float32, error)
|
|
||||||
ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error)
|
|
||||||
Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error)
|
|
||||||
Name() string
|
|
||||||
EmbeddingModel() string
|
|
||||||
}
|
|
||||||
96
internal/ai/registry.go
Normal file
96
internal/ai/registry.go
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Registry holds one compat.Client per named provider. Runners look up clients
|
||||||
|
// by provider name when walking a role chain.
|
||||||
|
type Registry struct {
|
||||||
|
clients map[string]*compat.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRegistry builds a Registry from the configured providers. Each provider
|
||||||
|
// type maps onto a compat.Client with type-specific header plumbing (e.g.
|
||||||
|
// openrouter's HTTP-Referer / X-Title).
|
||||||
|
func NewRegistry(providers map[string]config.ProviderConfig, httpClient *http.Client, log *slog.Logger) (*Registry, error) {
|
||||||
|
if httpClient == nil {
|
||||||
|
return nil, fmt.Errorf("ai registry: http client is required")
|
||||||
|
}
|
||||||
|
if len(providers) == 0 {
|
||||||
|
return nil, fmt.Errorf("ai registry: no providers configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
clients := make(map[string]*compat.Client, len(providers))
|
||||||
|
for name, p := range providers {
|
||||||
|
headers, err := providerHeaders(p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("ai registry: provider %q: %w", name, err)
|
||||||
|
}
|
||||||
|
clients[name] = compat.New(compat.Config{
|
||||||
|
Name: name,
|
||||||
|
BaseURL: p.BaseURL,
|
||||||
|
APIKey: p.APIKey,
|
||||||
|
Headers: headers,
|
||||||
|
HTTPClient: httpClient,
|
||||||
|
Log: log,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return &Registry{clients: clients}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client returns the compat.Client registered under name.
|
||||||
|
func (r *Registry) Client(name string) (*compat.Client, error) {
|
||||||
|
c, ok := r.clients[name]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("ai registry: provider %q is not configured", name)
|
||||||
|
}
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Names returns the registered provider names.
|
||||||
|
func (r *Registry) Names() []string {
|
||||||
|
names := make([]string, 0, len(r.clients))
|
||||||
|
for name := range r.clients {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
|
func providerHeaders(p config.ProviderConfig) (map[string]string, error) {
|
||||||
|
switch p.Type {
|
||||||
|
case "litellm", "ollama":
|
||||||
|
return cloneHeaders(p.RequestHeaders), nil
|
||||||
|
case "openrouter":
|
||||||
|
headers := cloneHeaders(p.RequestHeaders)
|
||||||
|
if headers == nil {
|
||||||
|
headers = map[string]string{}
|
||||||
|
}
|
||||||
|
if s := strings.TrimSpace(p.SiteURL); s != "" {
|
||||||
|
headers["HTTP-Referer"] = s
|
||||||
|
}
|
||||||
|
if s := strings.TrimSpace(p.AppName); s != "" {
|
||||||
|
headers["X-Title"] = s
|
||||||
|
}
|
||||||
|
return headers, nil
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported provider type %q", p.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func cloneHeaders(in map[string]string) map[string]string {
|
||||||
|
if len(in) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out := make(map[string]string, len(in))
|
||||||
|
for k, v := range in {
|
||||||
|
out[k] = v
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
80
internal/ai/registry_test.go
Normal file
80
internal/ai/registry_test.go
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewRegistryOpenRouterHeaders(t *testing.T) {
|
||||||
|
var (
|
||||||
|
gotReferer string
|
||||||
|
gotTitle string
|
||||||
|
gotCustom string
|
||||||
|
)
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
gotReferer = r.Header.Get("HTTP-Referer")
|
||||||
|
gotTitle = r.Header.Get("X-Title")
|
||||||
|
gotCustom = r.Header.Get("X-Custom")
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||||
|
"choices": []map[string]any{{"message": map[string]any{"role": "assistant", "content": "ok"}}},
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
providers := map[string]config.ProviderConfig{
|
||||||
|
"router": {
|
||||||
|
Type: "openrouter",
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
APIKey: "secret",
|
||||||
|
RequestHeaders: map[string]string{
|
||||||
|
"X-Custom": "value",
|
||||||
|
},
|
||||||
|
AppName: "amcs",
|
||||||
|
SiteURL: "https://example.com",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
reg, err := NewRegistry(providers, srv.Client(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRegistry() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := reg.Client("router")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Client(router) error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := client.SummarizeWith(context.Background(), compat.SummarizeOptions{Model: "gpt-4.1-mini"}, "system", "user"); err != nil {
|
||||||
|
t.Fatalf("SummarizeWith() error = %v", err)
|
||||||
|
}
|
||||||
|
if gotReferer != "https://example.com" {
|
||||||
|
t.Fatalf("HTTP-Referer = %q, want https://example.com", gotReferer)
|
||||||
|
}
|
||||||
|
if gotTitle != "amcs" {
|
||||||
|
t.Fatalf("X-Title = %q, want amcs", gotTitle)
|
||||||
|
}
|
||||||
|
if gotCustom != "value" {
|
||||||
|
t.Fatalf("X-Custom = %q, want value", gotCustom)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewRegistryRejectsUnsupportedProviderType(t *testing.T) {
|
||||||
|
providers := map[string]config.ProviderConfig{
|
||||||
|
"bad": {
|
||||||
|
Type: "unknown",
|
||||||
|
BaseURL: "http://localhost:4000/v1",
|
||||||
|
APIKey: "secret",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := NewRegistry(providers, &http.Client{}, nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("NewRegistry() error = nil, want unsupported provider type error")
|
||||||
|
}
|
||||||
|
}
|
||||||
367
internal/ai/runner.go
Normal file
367
internal/ai/runner.go
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Health TTLs per failure class. These are short enough that a healed target
|
||||||
|
// gets retried without manual intervention, but long enough to avoid hammering
|
||||||
|
// a broken provider every call.
|
||||||
|
const (
|
||||||
|
transientCooldown = 30 * time.Second
|
||||||
|
permanentCooldown = 10 * time.Minute
|
||||||
|
emptyResponseThreshold = 3
|
||||||
|
emptyResponseCooldown = 2 * time.Minute
|
||||||
|
dimensionMismatchWarning = "embedding dimension mismatch"
|
||||||
|
)
|
||||||
|
|
||||||
|
// EmbedResult carries the vector plus the (provider, model) that produced it —
|
||||||
|
// callers store the actual model so later searches against that row use the
|
||||||
|
// matching query embedding.
|
||||||
|
type EmbedResult struct {
|
||||||
|
Vector []float32
|
||||||
|
Provider string
|
||||||
|
Model string
|
||||||
|
}
|
||||||
|
|
||||||
|
// EmbeddingRunner executes the embeddings role chain with sequential fallback.
|
||||||
|
type EmbeddingRunner struct {
|
||||||
|
registry *Registry
|
||||||
|
chain []config.RoleTarget
|
||||||
|
dimensions int
|
||||||
|
health *healthTracker
|
||||||
|
log *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// MetadataRunner executes the metadata role chain with sequential fallback and
|
||||||
|
// a heuristic fallthrough when every target is unhealthy or fails.
|
||||||
|
type MetadataRunner struct {
|
||||||
|
registry *Registry
|
||||||
|
chain []config.RoleTarget
|
||||||
|
opts metadataRunOpts
|
||||||
|
health *healthTracker
|
||||||
|
log *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
type metadataRunOpts struct {
|
||||||
|
temperature float64
|
||||||
|
logConversations bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEmbeddingRunner builds a runner for the embeddings role. chain must be
|
||||||
|
// non-empty and every target must be registered.
|
||||||
|
func NewEmbeddingRunner(registry *Registry, chain []config.RoleTarget, dimensions int, log *slog.Logger) (*EmbeddingRunner, error) {
|
||||||
|
if registry == nil {
|
||||||
|
return nil, fmt.Errorf("embedding runner: registry is required")
|
||||||
|
}
|
||||||
|
if len(chain) == 0 {
|
||||||
|
return nil, fmt.Errorf("embedding runner: chain is empty")
|
||||||
|
}
|
||||||
|
if dimensions <= 0 {
|
||||||
|
return nil, fmt.Errorf("embedding runner: dimensions must be > 0")
|
||||||
|
}
|
||||||
|
for i, t := range chain {
|
||||||
|
if _, err := registry.Client(t.Provider); err != nil {
|
||||||
|
return nil, fmt.Errorf("embedding runner: chain[%d]: %w", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &EmbeddingRunner{
|
||||||
|
registry: registry,
|
||||||
|
chain: chain,
|
||||||
|
dimensions: dimensions,
|
||||||
|
health: newHealthTracker(),
|
||||||
|
log: log,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMetadataRunner builds a runner for the metadata role.
|
||||||
|
func NewMetadataRunner(registry *Registry, chain []config.RoleTarget, temperature float64, logConversations bool, log *slog.Logger) (*MetadataRunner, error) {
|
||||||
|
if registry == nil {
|
||||||
|
return nil, fmt.Errorf("metadata runner: registry is required")
|
||||||
|
}
|
||||||
|
if len(chain) == 0 {
|
||||||
|
return nil, fmt.Errorf("metadata runner: chain is empty")
|
||||||
|
}
|
||||||
|
for i, t := range chain {
|
||||||
|
if _, err := registry.Client(t.Provider); err != nil {
|
||||||
|
return nil, fmt.Errorf("metadata runner: chain[%d]: %w", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &MetadataRunner{
|
||||||
|
registry: registry,
|
||||||
|
chain: chain,
|
||||||
|
opts: metadataRunOpts{
|
||||||
|
temperature: temperature,
|
||||||
|
logConversations: logConversations,
|
||||||
|
},
|
||||||
|
health: newHealthTracker(),
|
||||||
|
log: log,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrimaryProvider returns the first provider in the chain.
|
||||||
|
func (r *EmbeddingRunner) PrimaryProvider() string { return r.chain[0].Provider }
|
||||||
|
|
||||||
|
// PrimaryModel returns the first model in the chain — the one used as the
|
||||||
|
// storage key for search matching.
|
||||||
|
func (r *EmbeddingRunner) PrimaryModel() string { return r.chain[0].Model }
|
||||||
|
|
||||||
|
// Dimensions returns the required vector dimension.
|
||||||
|
func (r *EmbeddingRunner) Dimensions() int { return r.dimensions }
|
||||||
|
|
||||||
|
// Embed walks the chain and returns the first successful embedding. The
|
||||||
|
// returned EmbedResult names the actual (provider, model) that produced the
|
||||||
|
// vector — callers use that when recording the row.
|
||||||
|
func (r *EmbeddingRunner) Embed(ctx context.Context, input string) (EmbedResult, error) {
|
||||||
|
var errs []error
|
||||||
|
for _, target := range r.chain {
|
||||||
|
if r.health.skip(target) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
errs = append(errs, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
vec, err := client.EmbedWith(ctx, target.Model, input)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return EmbedResult{}, ctx.Err()
|
||||||
|
}
|
||||||
|
r.classify(target, err)
|
||||||
|
r.logFailure("embed", target, err)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(vec) != r.dimensions {
|
||||||
|
dimErr := fmt.Errorf("%s: expected %d, got %d", dimensionMismatchWarning, r.dimensions, len(vec))
|
||||||
|
r.health.markTransient(target)
|
||||||
|
r.logFailure("embed", target, dimErr)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, dimErr))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return EmbedResult{Vector: vec, Provider: target.Provider, Model: target.Model}, nil
|
||||||
|
}
|
||||||
|
return EmbedResult{}, fmt.Errorf("all embedding targets failed: %w", errors.Join(errs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// EmbedPrimary embeds using only the primary target — used for search queries
|
||||||
|
// so the query vector matches rows stored under the primary model. Falls back
|
||||||
|
// to returning the error without walking the chain.
|
||||||
|
func (r *EmbeddingRunner) EmbedPrimary(ctx context.Context, input string) ([]float32, error) {
|
||||||
|
target := r.chain[0]
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
vec, err := client.EmbedWith(ctx, target.Model, input)
|
||||||
|
if err != nil {
|
||||||
|
r.classify(target, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(vec) != r.dimensions {
|
||||||
|
return nil, fmt.Errorf("%s: expected %d, got %d", dimensionMismatchWarning, r.dimensions, len(vec))
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return vec, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrimaryProvider / PrimaryModel for metadata mirror the embedding runner.
|
||||||
|
func (r *MetadataRunner) PrimaryProvider() string { return r.chain[0].Provider }
|
||||||
|
func (r *MetadataRunner) PrimaryModel() string { return r.chain[0].Model }
|
||||||
|
|
||||||
|
// ExtractMetadata walks the chain sequentially. If every target fails or is
|
||||||
|
// unhealthy, it returns a heuristic metadata so capture never hard-fails.
|
||||||
|
func (r *MetadataRunner) ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error) {
|
||||||
|
var errs []error
|
||||||
|
for _, target := range r.chain {
|
||||||
|
if r.health.skip(target) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
errs = append(errs, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
md, err := client.ExtractMetadataWith(ctx, compat.MetadataOptions{
|
||||||
|
Model: target.Model,
|
||||||
|
Temperature: r.opts.temperature,
|
||||||
|
LogConversations: r.opts.logConversations,
|
||||||
|
}, input)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return thoughttypes.ThoughtMetadata{}, ctx.Err()
|
||||||
|
}
|
||||||
|
r.classify(target, err)
|
||||||
|
r.logFailure("metadata", target, err)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return md, nil
|
||||||
|
}
|
||||||
|
if r.log != nil {
|
||||||
|
r.log.Warn("metadata chain exhausted, using heuristic fallback",
|
||||||
|
slog.Int("targets", len(r.chain)),
|
||||||
|
slog.String("error", errors.Join(errs...).Error()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return compat.HeuristicMetadataFromInput(input), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summarize walks the chain; unlike metadata, there is no heuristic fallback —
|
||||||
|
// returns the joined error when everything fails.
|
||||||
|
func (r *MetadataRunner) Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error) {
|
||||||
|
var errs []error
|
||||||
|
for _, target := range r.chain {
|
||||||
|
if r.health.skip(target) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
client, err := r.registry.Client(target.Provider)
|
||||||
|
if err != nil {
|
||||||
|
errs = append(errs, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
out, err := client.SummarizeWith(ctx, compat.SummarizeOptions{
|
||||||
|
Model: target.Model,
|
||||||
|
Temperature: r.opts.temperature,
|
||||||
|
}, systemPrompt, userPrompt)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return "", ctx.Err()
|
||||||
|
}
|
||||||
|
r.classify(target, err)
|
||||||
|
r.logFailure("summarize", target, err)
|
||||||
|
errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.health.markHealthy(target)
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("all summarize targets failed: %w", errors.Join(errs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *EmbeddingRunner) classify(target config.RoleTarget, err error) {
|
||||||
|
switch {
|
||||||
|
case compat.IsPermanentModelError(err):
|
||||||
|
r.health.markPermanent(target)
|
||||||
|
default:
|
||||||
|
r.health.markTransient(target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MetadataRunner) classify(target config.RoleTarget, err error) {
|
||||||
|
switch {
|
||||||
|
case compat.IsPermanentModelError(err):
|
||||||
|
r.health.markPermanent(target)
|
||||||
|
case errors.Is(err, compat.ErrEmptyResponse):
|
||||||
|
r.health.markEmpty(target)
|
||||||
|
default:
|
||||||
|
r.health.markTransient(target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *EmbeddingRunner) logFailure(role string, target config.RoleTarget, err error) {
|
||||||
|
if r.log == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.log.Warn("ai target failed",
|
||||||
|
slog.String("role", role),
|
||||||
|
slog.String("provider", target.Provider),
|
||||||
|
slog.String("model", target.Model),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *MetadataRunner) logFailure(role string, target config.RoleTarget, err error) {
|
||||||
|
if r.log == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.log.Warn("ai target failed",
|
||||||
|
slog.String("role", role),
|
||||||
|
slog.String("provider", target.Provider),
|
||||||
|
slog.String("model", target.Model),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// healthTracker records per-(provider, model) failure state. skip returns true
|
||||||
|
// when a target is still inside its cooldown window; the caller then tries the
|
||||||
|
// next target in the chain.
|
||||||
|
type healthTracker struct {
|
||||||
|
mu sync.Mutex
|
||||||
|
states map[config.RoleTarget]*healthState
|
||||||
|
}
|
||||||
|
|
||||||
|
type healthState struct {
|
||||||
|
unhealthyUntil time.Time
|
||||||
|
emptyCount int
|
||||||
|
}
|
||||||
|
|
||||||
|
func newHealthTracker() *healthTracker {
|
||||||
|
return &healthTracker{states: map[config.RoleTarget]*healthState{}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) skip(target config.RoleTarget) bool {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
s, ok := h.states[target]
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return time.Now().Before(s.unhealthyUntil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markTransient(target config.RoleTarget) {
|
||||||
|
h.setCooldown(target, transientCooldown)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markPermanent(target config.RoleTarget) {
|
||||||
|
h.setCooldown(target, permanentCooldown)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markEmpty(target config.RoleTarget) {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
s := h.states[target]
|
||||||
|
if s == nil {
|
||||||
|
s = &healthState{}
|
||||||
|
h.states[target] = s
|
||||||
|
}
|
||||||
|
s.emptyCount++
|
||||||
|
if s.emptyCount >= emptyResponseThreshold {
|
||||||
|
s.unhealthyUntil = time.Now().Add(emptyResponseCooldown)
|
||||||
|
s.emptyCount = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) markHealthy(target config.RoleTarget) {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
if s, ok := h.states[target]; ok {
|
||||||
|
s.unhealthyUntil = time.Time{}
|
||||||
|
s.emptyCount = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *healthTracker) setCooldown(target config.RoleTarget, d time.Duration) {
|
||||||
|
h.mu.Lock()
|
||||||
|
defer h.mu.Unlock()
|
||||||
|
s := h.states[target]
|
||||||
|
if s == nil {
|
||||||
|
s = &healthState{}
|
||||||
|
h.states[target] = s
|
||||||
|
}
|
||||||
|
s.unhealthyUntil = time.Now().Add(d)
|
||||||
|
s.emptyCount = 0
|
||||||
|
}
|
||||||
139
internal/ai/runner_test.go
Normal file
139
internal/ai/runner_test.go
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"sync"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestEmbeddingRunnerFallsBackAndSkipsUnhealthyPrimary(t *testing.T) {
|
||||||
|
var (
|
||||||
|
mu sync.Mutex
|
||||||
|
primaryCalls int
|
||||||
|
)
|
||||||
|
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/embeddings" {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var req struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch req.Model {
|
||||||
|
case "embed-primary":
|
||||||
|
mu.Lock()
|
||||||
|
primaryCalls++
|
||||||
|
mu.Unlock()
|
||||||
|
http.Error(w, "upstream down", http.StatusBadGateway)
|
||||||
|
case "embed-fallback":
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||||
|
"data": []map[string]any{{"embedding": []float32{0.1, 0.2, 0.3}}},
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "unknown model", http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg, err := NewRegistry(map[string]config.ProviderConfig{
|
||||||
|
"p1": {Type: "litellm", BaseURL: srv.URL, APIKey: "k1"},
|
||||||
|
"p2": {Type: "litellm", BaseURL: srv.URL, APIKey: "k2"},
|
||||||
|
}, srv.Client(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRegistry() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
runner, err := NewEmbeddingRunner(reg, []config.RoleTarget{
|
||||||
|
{Provider: "p1", Model: "embed-primary"},
|
||||||
|
{Provider: "p2", Model: "embed-fallback"},
|
||||||
|
}, 3, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewEmbeddingRunner() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := runner.Embed(context.Background(), "hello")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Embed() first call error = %v", err)
|
||||||
|
}
|
||||||
|
if res.Provider != "p2" || res.Model != "embed-fallback" {
|
||||||
|
t.Fatalf("Embed() first call target = %s/%s, want p2/embed-fallback", res.Provider, res.Model)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err = runner.Embed(context.Background(), "hello again")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Embed() second call error = %v", err)
|
||||||
|
}
|
||||||
|
if res.Provider != "p2" || res.Model != "embed-fallback" {
|
||||||
|
t.Fatalf("Embed() second call target = %s/%s, want p2/embed-fallback", res.Provider, res.Model)
|
||||||
|
}
|
||||||
|
|
||||||
|
mu.Lock()
|
||||||
|
calls := primaryCalls
|
||||||
|
mu.Unlock()
|
||||||
|
if calls != 3 {
|
||||||
|
t.Fatalf("primary calls = %d, want 3 (first request retries 3x; second call should skip unhealthy primary)", calls)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMetadataRunnerSummarizeFallsBack(t *testing.T) {
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Path != "/chat/completions" {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var req struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch req.Model {
|
||||||
|
case "sum-primary":
|
||||||
|
http.Error(w, "provider error", http.StatusBadGateway)
|
||||||
|
case "sum-fallback":
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||||
|
"choices": []map[string]any{{
|
||||||
|
"message": map[string]any{"role": "assistant", "content": "fallback summary"},
|
||||||
|
}},
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "unknown model", http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg, err := NewRegistry(map[string]config.ProviderConfig{
|
||||||
|
"p1": {Type: "litellm", BaseURL: srv.URL, APIKey: "k1"},
|
||||||
|
"p2": {Type: "litellm", BaseURL: srv.URL, APIKey: "k2"},
|
||||||
|
}, srv.Client(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewRegistry() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
runner, err := NewMetadataRunner(reg, []config.RoleTarget{
|
||||||
|
{Provider: "p1", Model: "sum-primary"},
|
||||||
|
{Provider: "p2", Model: "sum-fallback"},
|
||||||
|
}, 0.1, false, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewMetadataRunner() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
summary, err := runner.Summarize(context.Background(), "system", "user")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Summarize() error = %v", err)
|
||||||
|
}
|
||||||
|
if summary != "fallback summary" {
|
||||||
|
t.Fatalf("summary = %q, want %q", summary, "fallback summary")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -34,7 +34,7 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
|
|
||||||
logger.Info("loaded configuration",
|
logger.Info("loaded configuration",
|
||||||
slog.String("path", loadedFrom),
|
slog.String("path", loadedFrom),
|
||||||
slog.String("provider", cfg.AI.Provider),
|
slog.Int("config_version", cfg.Version),
|
||||||
slog.String("version", info.Version),
|
slog.String("version", info.Version),
|
||||||
slog.String("tag_name", info.TagName),
|
slog.String("tag_name", info.TagName),
|
||||||
slog.String("build_date", info.BuildDate),
|
slog.String("build_date", info.BuildDate),
|
||||||
@@ -52,11 +52,37 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
httpClient := &http.Client{Timeout: 30 * time.Second}
|
httpClient := &http.Client{Timeout: 30 * time.Second}
|
||||||
provider, err := ai.NewProvider(cfg.AI, httpClient, logger)
|
registry, err := ai.NewRegistry(cfg.AI.Providers, httpClient, logger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
foregroundEmbeddings, err := ai.NewEmbeddingRunner(registry, cfg.AI.Embeddings.Chain(), cfg.AI.Embeddings.Dimensions, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
foregroundMetadata, err := ai.NewMetadataRunner(registry, cfg.AI.Metadata.Chain(), cfg.AI.Metadata.Temperature, cfg.AI.Metadata.LogConversations, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
backgroundEmbeddings := foregroundEmbeddings
|
||||||
|
backgroundMetadata := foregroundMetadata
|
||||||
|
if cfg.AI.Background != nil {
|
||||||
|
if cfg.AI.Background.Embeddings != nil {
|
||||||
|
backgroundEmbeddings, err = ai.NewEmbeddingRunner(registry, cfg.AI.Background.Embeddings.AsTargets(), cfg.AI.Embeddings.Dimensions, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cfg.AI.Background.Metadata != nil {
|
||||||
|
backgroundMetadata, err = ai.NewMetadataRunner(registry, cfg.AI.Background.Metadata.AsTargets(), cfg.AI.Metadata.Temperature, cfg.AI.Metadata.LogConversations, logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var keyring *auth.Keyring
|
var keyring *auth.Keyring
|
||||||
var oauthRegistry *auth.OAuthRegistry
|
var oauthRegistry *auth.OAuthRegistry
|
||||||
var tokenStore *auth.TokenStore
|
var tokenStore *auth.TokenStore
|
||||||
@@ -77,12 +103,13 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
dynClients := auth.NewDynamicClientStore()
|
dynClients := auth.NewDynamicClientStore()
|
||||||
activeProjects := session.NewActiveProjects()
|
activeProjects := session.NewActiveProjects()
|
||||||
|
|
||||||
logger.Info("database connection verified",
|
logger.Info("ai providers initialised",
|
||||||
slog.String("provider", provider.Name()),
|
slog.String("embedding_primary", foregroundEmbeddings.PrimaryProvider()+"/"+foregroundEmbeddings.PrimaryModel()),
|
||||||
|
slog.String("metadata_primary", foregroundMetadata.PrimaryProvider()+"/"+foregroundMetadata.PrimaryModel()),
|
||||||
)
|
)
|
||||||
|
|
||||||
if cfg.Backfill.Enabled && cfg.Backfill.RunOnStartup {
|
if cfg.Backfill.Enabled && cfg.Backfill.RunOnStartup {
|
||||||
go runBackfillPass(ctx, db, provider, cfg.Backfill, logger)
|
go runBackfillPass(ctx, db, backgroundEmbeddings, cfg.Backfill, logger)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.Backfill.Enabled && cfg.Backfill.Interval > 0 {
|
if cfg.Backfill.Enabled && cfg.Backfill.Interval > 0 {
|
||||||
@@ -94,14 +121,14 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return
|
return
|
||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
runBackfillPass(ctx, db, provider, cfg.Backfill, logger)
|
runBackfillPass(ctx, db, backgroundEmbeddings, cfg.Backfill, logger)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.RunOnStartup {
|
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.RunOnStartup {
|
||||||
go runMetadataRetryPass(ctx, db, provider, cfg, activeProjects, logger)
|
go runMetadataRetryPass(ctx, db, backgroundMetadata, cfg, activeProjects, logger)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.Interval > 0 {
|
if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.Interval > 0 {
|
||||||
@@ -113,13 +140,13 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return
|
return
|
||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
runMetadataRetryPass(ctx, db, provider, cfg, activeProjects, logger)
|
runMetadataRetryPass(ctx, db, backgroundMetadata, cfg, activeProjects, logger)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
handler, err := routes(logger, cfg, info, db, provider, keyring, oauthRegistry, tokenStore, authCodes, dynClients, activeProjects)
|
handler, err := routes(logger, cfg, info, db, foregroundEmbeddings, foregroundMetadata, backgroundEmbeddings, backgroundMetadata, keyring, oauthRegistry, tokenStore, authCodes, dynClients, activeProjects)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -156,48 +183,50 @@ func Run(ctx context.Context, configPath string) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *store.DB, provider ai.Provider, keyring *auth.Keyring, oauthRegistry *auth.OAuthRegistry, tokenStore *auth.TokenStore, authCodes *auth.AuthCodeStore, dynClients *auth.DynamicClientStore, activeProjects *session.ActiveProjects) (http.Handler, error) {
|
func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, bgEmbeddings *ai.EmbeddingRunner, bgMetadata *ai.MetadataRunner, keyring *auth.Keyring, oauthRegistry *auth.OAuthRegistry, tokenStore *auth.TokenStore, authCodes *auth.AuthCodeStore, dynClients *auth.DynamicClientStore, activeProjects *session.ActiveProjects) (http.Handler, error) {
|
||||||
mux := http.NewServeMux()
|
mux := http.NewServeMux()
|
||||||
accessTracker := auth.NewAccessTracker()
|
accessTracker := auth.NewAccessTracker()
|
||||||
oauthEnabled := oauthRegistry != nil && tokenStore != nil
|
oauthEnabled := oauthRegistry != nil && tokenStore != nil
|
||||||
authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger)
|
authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger)
|
||||||
filesTool := tools.NewFilesTool(db, activeProjects)
|
filesTool := tools.NewFilesTool(db, activeProjects)
|
||||||
metadataRetryer := tools.NewMetadataRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
enrichmentRetryer := tools.NewEnrichmentRetryer(context.Background(), db, bgMetadata, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
||||||
|
backfillTool := tools.NewBackfillTool(db, bgEmbeddings, activeProjects, logger)
|
||||||
|
|
||||||
toolSet := mcpserver.ToolSet{
|
toolSet := mcpserver.ToolSet{
|
||||||
Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, metadataRetryer, logger),
|
Capture: tools.NewCaptureTool(db, embeddings, cfg.Capture, activeProjects, enrichmentRetryer, backfillTool),
|
||||||
Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects),
|
Search: tools.NewSearchTool(db, embeddings, cfg.Search, activeProjects),
|
||||||
List: tools.NewListTool(db, cfg.Search, activeProjects),
|
List: tools.NewListTool(db, cfg.Search, activeProjects),
|
||||||
Stats: tools.NewStatsTool(db),
|
Stats: tools.NewStatsTool(db),
|
||||||
Get: tools.NewGetTool(db),
|
Get: tools.NewGetTool(db),
|
||||||
Update: tools.NewUpdateTool(db, provider, cfg.Capture, logger),
|
Update: tools.NewUpdateTool(db, embeddings, metadata, cfg.Capture, logger),
|
||||||
Delete: tools.NewDeleteTool(db),
|
Delete: tools.NewDeleteTool(db),
|
||||||
Archive: tools.NewArchiveTool(db),
|
Archive: tools.NewArchiveTool(db),
|
||||||
Projects: tools.NewProjectsTool(db, activeProjects),
|
Projects: tools.NewProjectsTool(db, activeProjects),
|
||||||
Version: tools.NewVersionTool(cfg.MCP.ServerName, info),
|
Version: tools.NewVersionTool(cfg.MCP.ServerName, info),
|
||||||
Context: tools.NewContextTool(db, provider, cfg.Search, activeProjects),
|
Learnings: tools.NewLearningsTool(db, activeProjects, cfg.Search),
|
||||||
Recall: tools.NewRecallTool(db, provider, cfg.Search, activeProjects),
|
Context: tools.NewContextTool(db, embeddings, cfg.Search, activeProjects),
|
||||||
Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects),
|
Recall: tools.NewRecallTool(db, embeddings, cfg.Search, activeProjects),
|
||||||
Links: tools.NewLinksTool(db, provider, cfg.Search),
|
Summarize: tools.NewSummarizeTool(db, embeddings, metadata, cfg.Search, activeProjects),
|
||||||
|
Links: tools.NewLinksTool(db, embeddings, cfg.Search),
|
||||||
Files: filesTool,
|
Files: filesTool,
|
||||||
Backfill: tools.NewBackfillTool(db, provider, activeProjects, logger),
|
Backfill: backfillTool,
|
||||||
Reparse: tools.NewReparseMetadataTool(db, provider, cfg.Capture, activeProjects, logger),
|
Reparse: tools.NewReparseMetadataTool(db, bgMetadata, cfg.Capture, activeProjects, logger),
|
||||||
RetryMetadata: tools.NewRetryMetadataTool(metadataRetryer),
|
RetryMetadata: tools.NewRetryEnrichmentTool(enrichmentRetryer),
|
||||||
Household: tools.NewHouseholdTool(db),
|
|
||||||
Maintenance: tools.NewMaintenanceTool(db),
|
Maintenance: tools.NewMaintenanceTool(db),
|
||||||
Calendar: tools.NewCalendarTool(db),
|
|
||||||
Meals: tools.NewMealsTool(db),
|
|
||||||
CRM: tools.NewCRMTool(db),
|
|
||||||
Skills: tools.NewSkillsTool(db, activeProjects),
|
Skills: tools.NewSkillsTool(db, activeProjects),
|
||||||
ChatHistory: tools.NewChatHistoryTool(db, activeProjects),
|
ChatHistory: tools.NewChatHistoryTool(db, activeProjects),
|
||||||
Describe: tools.NewDescribeTool(db, mcpserver.BuildToolCatalog()),
|
Describe: tools.NewDescribeTool(db, mcpserver.BuildToolCatalog()),
|
||||||
}
|
}
|
||||||
|
|
||||||
mcpHandler, err := mcpserver.New(cfg.MCP, logger, toolSet, activeProjects.Clear)
|
mcpHandlers, err := mcpserver.NewHandlers(cfg.MCP, logger, toolSet, activeProjects.Clear)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("build mcp handler: %w", err)
|
return nil, fmt.Errorf("build mcp handler: %w", err)
|
||||||
}
|
}
|
||||||
mux.Handle(cfg.MCP.Path, authMiddleware(mcpHandler))
|
mux.Handle(cfg.MCP.Path, authMiddleware(mcpHandlers.StreamableHTTP))
|
||||||
|
if mcpHandlers.SSE != nil {
|
||||||
|
mux.Handle(cfg.MCP.SSEPath, authMiddleware(mcpHandlers.SSE))
|
||||||
|
logger.Info("SSE transport enabled", slog.String("sse_path", cfg.MCP.SSEPath))
|
||||||
|
}
|
||||||
mux.Handle("/files", authMiddleware(fileHandler(filesTool)))
|
mux.Handle("/files", authMiddleware(fileHandler(filesTool)))
|
||||||
mux.Handle("/files/{id}", authMiddleware(fileHandler(filesTool)))
|
mux.Handle("/files/{id}", authMiddleware(fileHandler(filesTool)))
|
||||||
if oauthEnabled {
|
if oauthEnabled {
|
||||||
@@ -241,8 +270,8 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st
|
|||||||
), nil
|
), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func runMetadataRetryPass(ctx context.Context, db *store.DB, provider ai.Provider, cfg *config.Config, activeProjects *session.ActiveProjects, logger *slog.Logger) {
|
func runMetadataRetryPass(ctx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, cfg *config.Config, activeProjects *session.ActiveProjects, logger *slog.Logger) {
|
||||||
retryer := tools.NewMetadataRetryer(ctx, db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
retryer := tools.NewMetadataRetryer(ctx, db, metadataRunner, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
||||||
_, out, err := retryer.Handle(ctx, nil, tools.RetryMetadataInput{
|
_, out, err := retryer.Handle(ctx, nil, tools.RetryMetadataInput{
|
||||||
Limit: cfg.MetadataRetry.MaxPerRun,
|
Limit: cfg.MetadataRetry.MaxPerRun,
|
||||||
IncludeArchived: cfg.MetadataRetry.IncludeArchived,
|
IncludeArchived: cfg.MetadataRetry.IncludeArchived,
|
||||||
@@ -260,8 +289,8 @@ func runMetadataRetryPass(ctx context.Context, db *store.DB, provider ai.Provide
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func runBackfillPass(ctx context.Context, db *store.DB, provider ai.Provider, cfg config.BackfillConfig, logger *slog.Logger) {
|
func runBackfillPass(ctx context.Context, db *store.DB, embeddings *ai.EmbeddingRunner, cfg config.BackfillConfig, logger *slog.Logger) {
|
||||||
backfiller := tools.NewBackfillTool(db, provider, nil, logger)
|
backfiller := tools.NewBackfillTool(db, embeddings, nil, logger)
|
||||||
_, out, err := backfiller.Handle(ctx, nil, tools.BackfillInput{
|
_, out, err := backfiller.Handle(ctx, nil, tools.BackfillInput{
|
||||||
Limit: cfg.MaxPerRun,
|
Limit: cfg.MaxPerRun,
|
||||||
IncludeArchived: cfg.IncludeArchived,
|
IncludeArchived: cfg.IncludeArchived,
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/auth"
|
"git.warky.dev/wdevs/amcs/internal/auth"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/requestip"
|
||||||
)
|
)
|
||||||
|
|
||||||
// --- JSON types ---
|
// --- JSON types ---
|
||||||
@@ -261,7 +262,7 @@ func handleClientCredentials(w http.ResponseWriter, r *http.Request, oauthRegist
|
|||||||
}
|
}
|
||||||
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("oauth token: invalid client credentials", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("oauth token: invalid client credentials", slog.String("remote_addr", requestip.FromRequest(r)))
|
||||||
w.Header().Set("WWW-Authenticate", `Basic realm="oauth"`)
|
w.Header().Set("WWW-Authenticate", `Basic realm="oauth"`)
|
||||||
writeTokenError(w, "invalid_client", http.StatusUnauthorized)
|
writeTokenError(w, "invalid_client", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
@@ -290,7 +291,7 @@ func handleAuthorizationCode(w http.ResponseWriter, r *http.Request, authCodes *
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !verifyPKCE(codeVerifier, entry.CodeChallenge, entry.CodeChallengeMethod) {
|
if !verifyPKCE(codeVerifier, entry.CodeChallenge, entry.CodeChallengeMethod) {
|
||||||
log.Warn("oauth token: PKCE verification failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("oauth token: PKCE verification failed", slog.String("remote_addr", requestip.FromRequest(r)))
|
||||||
writeTokenError(w, "invalid_grant", http.StatusBadRequest)
|
writeTokenError(w, "invalid_grant", http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type AccessSnapshot struct {
|
type AccessSnapshot struct {
|
||||||
KeyID string
|
KeyID string `json:"key_id"`
|
||||||
LastPath string
|
LastPath string `json:"last_path"`
|
||||||
RemoteAddr string
|
RemoteAddr string `json:"remote_addr"`
|
||||||
UserAgent string
|
UserAgent string `json:"user_agent"`
|
||||||
RequestCount int
|
RequestCount int `json:"request_count"`
|
||||||
LastAccessedAt time.Time
|
LastAccessedAt time.Time `json:"last_accessed_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type AccessTracker struct {
|
type AccessTracker struct {
|
||||||
|
|||||||
@@ -157,3 +157,34 @@ func TestMiddlewareRejectsMissingOrInvalidKey(t *testing.T) {
|
|||||||
t.Fatalf("invalid key status = %d, want %d", rec.Code, http.StatusUnauthorized)
|
t.Fatalf("invalid key status = %d, want %d", rec.Code, http.StatusUnauthorized)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestMiddlewareRecordsForwardedRemoteAddr(t *testing.T) {
|
||||||
|
keyring, err := NewKeyring([]config.APIKey{{ID: "client-a", Value: "secret"}})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewKeyring() error = %v", err)
|
||||||
|
}
|
||||||
|
tracker := NewAccessTracker()
|
||||||
|
|
||||||
|
handler := Middleware(config.AuthConfig{HeaderName: "x-brain-key"}, keyring, nil, nil, tracker, testLogger())(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/mcp", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.5:2222"
|
||||||
|
req.Header.Set("x-brain-key", "secret")
|
||||||
|
req.Header.Set("X-Real-IP", "203.0.113.99")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
snap := tracker.Snapshot()
|
||||||
|
if len(snap) != 1 {
|
||||||
|
t.Fatalf("len(snapshot) = %d, want 1", len(snap))
|
||||||
|
}
|
||||||
|
if snap[0].RemoteAddr != "203.0.113.99" {
|
||||||
|
t.Fatalf("snapshot remote_addr = %q, want %q", snap[0].RemoteAddr, "203.0.113.99")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/requestip"
|
||||||
)
|
)
|
||||||
|
|
||||||
type contextKey string
|
type contextKey string
|
||||||
@@ -22,17 +23,18 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
}
|
}
|
||||||
recordAccess := func(r *http.Request, keyID string) {
|
recordAccess := func(r *http.Request, keyID string) {
|
||||||
if tracker != nil {
|
if tracker != nil {
|
||||||
tracker.Record(keyID, r.URL.Path, r.RemoteAddr, r.UserAgent(), time.Now())
|
tracker.Record(keyID, r.URL.Path, requestip.FromRequest(r), r.UserAgent(), time.Now())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
remoteAddr := requestip.FromRequest(r)
|
||||||
// 1. Custom header → keyring only.
|
// 1. Custom header → keyring only.
|
||||||
if keyring != nil {
|
if keyring != nil {
|
||||||
if token := strings.TrimSpace(r.Header.Get(headerName)); token != "" {
|
if token := strings.TrimSpace(r.Header.Get(headerName)); token != "" {
|
||||||
keyID, ok := keyring.Lookup(token)
|
keyID, ok := keyring.Lookup(token)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("authentication failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("authentication failed", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -58,7 +60,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.Warn("bearer token rejected", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("bearer token rejected", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid token or API key", http.StatusUnauthorized)
|
http.Error(w, "invalid token or API key", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -71,7 +73,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
}
|
}
|
||||||
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
keyID, ok := oauthRegistry.Lookup(clientID, clientSecret)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("oauth client authentication failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("oauth client authentication failed", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid OAuth client credentials", http.StatusUnauthorized)
|
http.Error(w, "invalid OAuth client credentials", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -85,7 +87,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg
|
|||||||
if token := strings.TrimSpace(r.URL.Query().Get(cfg.QueryParam)); token != "" {
|
if token := strings.TrimSpace(r.URL.Query().Get(cfg.QueryParam)); token != "" {
|
||||||
keyID, ok := keyring.Lookup(token)
|
keyID, ok := keyring.Lookup(token)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warn("authentication failed", slog.String("remote_addr", r.RemoteAddr))
|
log.Warn("authentication failed", slog.String("remote_addr", remoteAddr))
|
||||||
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
http.Error(w, "invalid API key", http.StatusUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
|
Version int `yaml:"version"`
|
||||||
Server ServerConfig `yaml:"server"`
|
Server ServerConfig `yaml:"server"`
|
||||||
MCP MCPConfig `yaml:"mcp"`
|
MCP MCPConfig `yaml:"mcp"`
|
||||||
Auth AuthConfig `yaml:"auth"`
|
Auth AuthConfig `yaml:"auth"`
|
||||||
@@ -32,15 +33,13 @@ type ServerConfig struct {
|
|||||||
|
|
||||||
type MCPConfig struct {
|
type MCPConfig struct {
|
||||||
Path string `yaml:"path"`
|
Path string `yaml:"path"`
|
||||||
|
SSEPath string `yaml:"sse_path"`
|
||||||
ServerName string `yaml:"server_name"`
|
ServerName string `yaml:"server_name"`
|
||||||
Version string `yaml:"version"`
|
Version string `yaml:"version"`
|
||||||
Transport string `yaml:"transport"`
|
Transport string `yaml:"transport"`
|
||||||
SessionTimeout time.Duration `yaml:"session_timeout"`
|
SessionTimeout time.Duration `yaml:"session_timeout"`
|
||||||
// PublicURL is the externally reachable base URL of this server (e.g. https://amcs.example.com).
|
PublicURL string `yaml:"public_url"`
|
||||||
// When set, it is used to build absolute icon URLs in the MCP server identity.
|
Instructions string `yaml:"-"`
|
||||||
PublicURL string `yaml:"public_url"`
|
|
||||||
// Instructions is set at startup from the embedded memory.md and sent to MCP clients on initialise.
|
|
||||||
Instructions string `yaml:"-"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type AuthConfig struct {
|
type AuthConfig struct {
|
||||||
@@ -76,52 +75,82 @@ type DatabaseConfig struct {
|
|||||||
MaxConnIdleTime time.Duration `yaml:"max_conn_idle_time"`
|
MaxConnIdleTime time.Duration `yaml:"max_conn_idle_time"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AIConfig (v2): named providers + per-role chains.
|
||||||
type AIConfig struct {
|
type AIConfig struct {
|
||||||
Provider string `yaml:"provider"`
|
Providers map[string]ProviderConfig `yaml:"providers"`
|
||||||
Embeddings AIEmbeddingConfig `yaml:"embeddings"`
|
Embeddings EmbeddingsRoleConfig `yaml:"embeddings"`
|
||||||
Metadata AIMetadataConfig `yaml:"metadata"`
|
Metadata MetadataRoleConfig `yaml:"metadata"`
|
||||||
LiteLLM LiteLLMConfig `yaml:"litellm"`
|
Background *BackgroundRolesConfig `yaml:"background,omitempty"`
|
||||||
Ollama OllamaConfig `yaml:"ollama"`
|
|
||||||
OpenRouter OpenRouterAIConfig `yaml:"openrouter"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type AIEmbeddingConfig struct {
|
type ProviderConfig struct {
|
||||||
Model string `yaml:"model"`
|
Type string `yaml:"type"`
|
||||||
Dimensions int `yaml:"dimensions"`
|
BaseURL string `yaml:"base_url"`
|
||||||
|
APIKey string `yaml:"api_key"`
|
||||||
|
RequestHeaders map[string]string `yaml:"request_headers,omitempty"`
|
||||||
|
AppName string `yaml:"app_name,omitempty"`
|
||||||
|
SiteURL string `yaml:"site_url,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type AIMetadataConfig struct {
|
type RoleTarget struct {
|
||||||
Model string `yaml:"model"`
|
Provider string `yaml:"provider"`
|
||||||
FallbackModels []string `yaml:"fallback_models"`
|
Model string `yaml:"model"`
|
||||||
FallbackModel string `yaml:"fallback_model"` // legacy single fallback
|
}
|
||||||
|
|
||||||
|
type RoleChain struct {
|
||||||
|
Primary RoleTarget `yaml:"primary"`
|
||||||
|
Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type EmbeddingsRoleConfig struct {
|
||||||
|
Dimensions int `yaml:"dimensions"`
|
||||||
|
Primary RoleTarget `yaml:"primary"`
|
||||||
|
Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetadataRoleConfig struct {
|
||||||
Temperature float64 `yaml:"temperature"`
|
Temperature float64 `yaml:"temperature"`
|
||||||
LogConversations bool `yaml:"log_conversations"`
|
LogConversations bool `yaml:"log_conversations"`
|
||||||
Timeout time.Duration `yaml:"timeout"`
|
Timeout time.Duration `yaml:"timeout"`
|
||||||
|
Primary RoleTarget `yaml:"primary"`
|
||||||
|
Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type LiteLLMConfig struct {
|
// BackgroundRolesConfig overrides the foreground chains for background workers
|
||||||
BaseURL string `yaml:"base_url"`
|
// (backfill_embeddings, metadata_retry, reparse_metadata). Either field may be
|
||||||
APIKey string `yaml:"api_key"`
|
// nil to inherit the foreground role unchanged.
|
||||||
UseResponsesAPI bool `yaml:"use_responses_api"`
|
type BackgroundRolesConfig struct {
|
||||||
RequestHeaders map[string]string `yaml:"request_headers"`
|
Embeddings *RoleChain `yaml:"embeddings,omitempty"`
|
||||||
EmbeddingModel string `yaml:"embedding_model"`
|
Metadata *RoleChain `yaml:"metadata,omitempty"`
|
||||||
MetadataModel string `yaml:"metadata_model"`
|
|
||||||
FallbackMetadataModels []string `yaml:"fallback_metadata_models"`
|
|
||||||
FallbackMetadataModel string `yaml:"fallback_metadata_model"` // legacy single fallback
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type OllamaConfig struct {
|
// Chain returns primary followed by fallbacks (deduped, blanks dropped).
|
||||||
BaseURL string `yaml:"base_url"`
|
func (e EmbeddingsRoleConfig) Chain() []RoleTarget {
|
||||||
APIKey string `yaml:"api_key"`
|
return dedupeTargets(append([]RoleTarget{e.Primary}, e.Fallbacks...))
|
||||||
RequestHeaders map[string]string `yaml:"request_headers"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type OpenRouterAIConfig struct {
|
func (m MetadataRoleConfig) Chain() []RoleTarget {
|
||||||
BaseURL string `yaml:"base_url"`
|
return dedupeTargets(append([]RoleTarget{m.Primary}, m.Fallbacks...))
|
||||||
APIKey string `yaml:"api_key"`
|
}
|
||||||
AppName string `yaml:"app_name"`
|
|
||||||
SiteURL string `yaml:"site_url"`
|
func (c RoleChain) AsTargets() []RoleTarget {
|
||||||
ExtraHeaders map[string]string `yaml:"extra_headers"`
|
return dedupeTargets(append([]RoleTarget{c.Primary}, c.Fallbacks...))
|
||||||
|
}
|
||||||
|
|
||||||
|
func dedupeTargets(in []RoleTarget) []RoleTarget {
|
||||||
|
out := make([]RoleTarget, 0, len(in))
|
||||||
|
seen := make(map[RoleTarget]struct{}, len(in))
|
||||||
|
for _, t := range in {
|
||||||
|
if t.Provider == "" || t.Model == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := seen[t]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[t] = struct{}{}
|
||||||
|
out = append(out, t)
|
||||||
|
}
|
||||||
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
type CaptureConfig struct {
|
type CaptureConfig struct {
|
||||||
@@ -166,45 +195,3 @@ type MetadataRetryConfig struct {
|
|||||||
MaxPerRun int `yaml:"max_per_run"`
|
MaxPerRun int `yaml:"max_per_run"`
|
||||||
IncludeArchived bool `yaml:"include_archived"`
|
IncludeArchived bool `yaml:"include_archived"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c AIMetadataConfig) EffectiveFallbackModels() []string {
|
|
||||||
models := make([]string, 0, len(c.FallbackModels)+1)
|
|
||||||
for _, model := range c.FallbackModels {
|
|
||||||
if model != "" {
|
|
||||||
models = append(models, model)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c.FallbackModel != "" {
|
|
||||||
models = append(models, c.FallbackModel)
|
|
||||||
}
|
|
||||||
return dedupeNonEmpty(models)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c LiteLLMConfig) EffectiveFallbackMetadataModels() []string {
|
|
||||||
models := make([]string, 0, len(c.FallbackMetadataModels)+1)
|
|
||||||
for _, model := range c.FallbackMetadataModels {
|
|
||||||
if model != "" {
|
|
||||||
models = append(models, model)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c.FallbackMetadataModel != "" {
|
|
||||||
models = append(models, c.FallbackMetadataModel)
|
|
||||||
}
|
|
||||||
return dedupeNonEmpty(models)
|
|
||||||
}
|
|
||||||
|
|
||||||
func dedupeNonEmpty(values []string) []string {
|
|
||||||
seen := make(map[string]struct{}, len(values))
|
|
||||||
out := make([]string, 0, len(values))
|
|
||||||
for _, value := range values {
|
|
||||||
if value == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := seen[value]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[value] = struct{}{}
|
|
||||||
out = append(out, value)
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package config
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -12,6 +13,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func Load(explicitPath string) (*Config, string, error) {
|
func Load(explicitPath string) (*Config, string, error) {
|
||||||
|
return LoadWithLogger(explicitPath, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadWithLogger is Load with a logger surface for migration notices. Passing
|
||||||
|
// nil is fine — migration events will simply not be logged.
|
||||||
|
func LoadWithLogger(explicitPath string, log *slog.Logger) (*Config, string, error) {
|
||||||
path := ResolvePath(explicitPath)
|
path := ResolvePath(explicitPath)
|
||||||
|
|
||||||
data, err := os.ReadFile(path)
|
data, err := os.ReadFile(path)
|
||||||
@@ -19,10 +26,38 @@ func Load(explicitPath string) (*Config, string, error) {
|
|||||||
return nil, path, fmt.Errorf("read config %q: %w", path, err)
|
return nil, path, fmt.Errorf("read config %q: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := defaultConfig()
|
raw := map[string]any{}
|
||||||
if err := yaml.Unmarshal(data, &cfg); err != nil {
|
if err := yaml.Unmarshal(data, &raw); err != nil {
|
||||||
return nil, path, fmt.Errorf("decode config %q: %w", path, err)
|
return nil, path, fmt.Errorf("decode config %q: %w", path, err)
|
||||||
}
|
}
|
||||||
|
if raw == nil {
|
||||||
|
raw = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
applied, err := Migrate(raw)
|
||||||
|
if err != nil {
|
||||||
|
return nil, path, fmt.Errorf("migrate config %q: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(applied) > 0 {
|
||||||
|
if log != nil {
|
||||||
|
for _, step := range applied {
|
||||||
|
log.Warn("config migrated in memory",
|
||||||
|
slog.String("path", path),
|
||||||
|
slog.Int("from_version", step.From),
|
||||||
|
slog.Int("to_version", step.To),
|
||||||
|
slog.String("describe", step.Describe),
|
||||||
|
slog.String("hint", "persist with amcs-migrate-config"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg, err := decodeTyped(raw)
|
||||||
|
if err != nil {
|
||||||
|
return nil, path, fmt.Errorf("decode migrated config %q: %w", path, err)
|
||||||
|
}
|
||||||
|
cfg.Version = CurrentConfigVersion
|
||||||
|
|
||||||
applyEnvOverrides(&cfg)
|
applyEnvOverrides(&cfg)
|
||||||
if err := cfg.Validate(); err != nil {
|
if err := cfg.Validate(); err != nil {
|
||||||
@@ -32,6 +67,18 @@ func Load(explicitPath string) (*Config, string, error) {
|
|||||||
return &cfg, path, nil
|
return &cfg, path, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func decodeTyped(raw map[string]any) (Config, error) {
|
||||||
|
out, err := yaml.Marshal(raw)
|
||||||
|
if err != nil {
|
||||||
|
return Config{}, fmt.Errorf("re-marshal migrated config: %w", err)
|
||||||
|
}
|
||||||
|
cfg := defaultConfig()
|
||||||
|
if err := yaml.Unmarshal(out, &cfg); err != nil {
|
||||||
|
return Config{}, err
|
||||||
|
}
|
||||||
|
return cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
func ResolvePath(explicitPath string) string {
|
func ResolvePath(explicitPath string) string {
|
||||||
if path := strings.TrimSpace(explicitPath); path != "" {
|
if path := strings.TrimSpace(explicitPath); path != "" {
|
||||||
if path != ".yaml" && path != ".yml" {
|
if path != ".yaml" && path != ".yml" {
|
||||||
@@ -49,6 +96,7 @@ func ResolvePath(explicitPath string) string {
|
|||||||
func defaultConfig() Config {
|
func defaultConfig() Config {
|
||||||
info := buildinfo.Current()
|
info := buildinfo.Current()
|
||||||
return Config{
|
return Config{
|
||||||
|
Version: CurrentConfigVersion,
|
||||||
Server: ServerConfig{
|
Server: ServerConfig{
|
||||||
Host: "0.0.0.0",
|
Host: "0.0.0.0",
|
||||||
Port: 8080,
|
Port: 8080,
|
||||||
@@ -58,6 +106,7 @@ func defaultConfig() Config {
|
|||||||
},
|
},
|
||||||
MCP: MCPConfig{
|
MCP: MCPConfig{
|
||||||
Path: "/mcp",
|
Path: "/mcp",
|
||||||
|
SSEPath: "/sse",
|
||||||
ServerName: "amcs",
|
ServerName: "amcs",
|
||||||
Version: info.Version,
|
Version: info.Version,
|
||||||
Transport: "streamable_http",
|
Transport: "streamable_http",
|
||||||
@@ -68,20 +117,14 @@ func defaultConfig() Config {
|
|||||||
QueryParam: "key",
|
QueryParam: "key",
|
||||||
},
|
},
|
||||||
AI: AIConfig{
|
AI: AIConfig{
|
||||||
Provider: "litellm",
|
Providers: map[string]ProviderConfig{},
|
||||||
Embeddings: AIEmbeddingConfig{
|
Embeddings: EmbeddingsRoleConfig{
|
||||||
Model: "openai/text-embedding-3-small",
|
|
||||||
Dimensions: 1536,
|
Dimensions: 1536,
|
||||||
},
|
},
|
||||||
Metadata: AIMetadataConfig{
|
Metadata: MetadataRoleConfig{
|
||||||
Model: "gpt-4o-mini",
|
|
||||||
Temperature: 0.1,
|
Temperature: 0.1,
|
||||||
Timeout: 10 * time.Second,
|
Timeout: 10 * time.Second,
|
||||||
},
|
},
|
||||||
Ollama: OllamaConfig{
|
|
||||||
BaseURL: "http://localhost:11434/v1",
|
|
||||||
APIKey: "ollama",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
Capture: CaptureConfig{
|
Capture: CaptureConfig{
|
||||||
Source: DefaultSource,
|
Source: DefaultSource,
|
||||||
@@ -118,11 +161,12 @@ func defaultConfig() Config {
|
|||||||
func applyEnvOverrides(cfg *Config) {
|
func applyEnvOverrides(cfg *Config) {
|
||||||
overrideString(&cfg.Database.URL, "AMCS_DATABASE_URL")
|
overrideString(&cfg.Database.URL, "AMCS_DATABASE_URL")
|
||||||
overrideString(&cfg.MCP.PublicURL, "AMCS_PUBLIC_URL")
|
overrideString(&cfg.MCP.PublicURL, "AMCS_PUBLIC_URL")
|
||||||
overrideString(&cfg.AI.LiteLLM.BaseURL, "AMCS_LITELLM_BASE_URL")
|
|
||||||
overrideString(&cfg.AI.LiteLLM.APIKey, "AMCS_LITELLM_API_KEY")
|
overrideProviderField(cfg, "AMCS_LITELLM_BASE_URL", "litellm", func(p *ProviderConfig, v string) { p.BaseURL = v })
|
||||||
overrideString(&cfg.AI.Ollama.BaseURL, "AMCS_OLLAMA_BASE_URL")
|
overrideProviderField(cfg, "AMCS_LITELLM_API_KEY", "litellm", func(p *ProviderConfig, v string) { p.APIKey = v })
|
||||||
overrideString(&cfg.AI.Ollama.APIKey, "AMCS_OLLAMA_API_KEY")
|
overrideProviderField(cfg, "AMCS_OLLAMA_BASE_URL", "ollama", func(p *ProviderConfig, v string) { p.BaseURL = v })
|
||||||
overrideString(&cfg.AI.OpenRouter.APIKey, "AMCS_OPENROUTER_API_KEY")
|
overrideProviderField(cfg, "AMCS_OLLAMA_API_KEY", "ollama", func(p *ProviderConfig, v string) { p.APIKey = v })
|
||||||
|
overrideProviderField(cfg, "AMCS_OPENROUTER_API_KEY", "openrouter", func(p *ProviderConfig, v string) { p.APIKey = v })
|
||||||
|
|
||||||
if value, ok := os.LookupEnv("AMCS_SERVER_PORT"); ok {
|
if value, ok := os.LookupEnv("AMCS_SERVER_PORT"); ok {
|
||||||
if port, err := strconv.Atoi(strings.TrimSpace(value)); err == nil {
|
if port, err := strconv.Atoi(strings.TrimSpace(value)); err == nil {
|
||||||
@@ -131,6 +175,24 @@ func applyEnvOverrides(cfg *Config) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// overrideProviderField applies an env var to every configured provider of the
|
||||||
|
// given type. This preserves the v1 behaviour where e.g. AMCS_LITELLM_API_KEY
|
||||||
|
// rewrote the single litellm block — in v2 it rewrites every litellm provider.
|
||||||
|
func overrideProviderField(cfg *Config, envKey, providerType string, apply func(*ProviderConfig, string)) {
|
||||||
|
value, ok := os.LookupEnv(envKey)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
value = strings.TrimSpace(value)
|
||||||
|
for name, p := range cfg.AI.Providers {
|
||||||
|
if p.Type != providerType {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
apply(&p, value)
|
||||||
|
cfg.AI.Providers[name] = p
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func overrideString(target *string, envKey string) {
|
func overrideString(target *string, envKey string) {
|
||||||
if value, ok := os.LookupEnv(envKey); ok {
|
if value, ok := os.LookupEnv(envKey); ok {
|
||||||
*target = strings.TrimSpace(value)
|
*target = strings.TrimSpace(value)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package config
|
|||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@@ -31,9 +32,8 @@ func TestResolvePathIgnoresBareYAMLExtension(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadAppliesEnvOverrides(t *testing.T) {
|
const v2ConfigYAML = `
|
||||||
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
version: 2
|
||||||
if err := os.WriteFile(configPath, []byte(`
|
|
||||||
server:
|
server:
|
||||||
port: 8080
|
port: 8080
|
||||||
mcp:
|
mcp:
|
||||||
@@ -46,18 +46,30 @@ auth:
|
|||||||
database:
|
database:
|
||||||
url: "postgres://from-file"
|
url: "postgres://from-file"
|
||||||
ai:
|
ai:
|
||||||
provider: "litellm"
|
providers:
|
||||||
|
default:
|
||||||
|
type: "litellm"
|
||||||
|
base_url: "http://localhost:4000/v1"
|
||||||
|
api_key: "file-key"
|
||||||
embeddings:
|
embeddings:
|
||||||
dimensions: 1536
|
dimensions: 1536
|
||||||
litellm:
|
primary:
|
||||||
base_url: "http://localhost:4000/v1"
|
provider: "default"
|
||||||
api_key: "file-key"
|
model: "text-embed"
|
||||||
|
metadata:
|
||||||
|
primary:
|
||||||
|
provider: "default"
|
||||||
|
model: "gpt-4"
|
||||||
search:
|
search:
|
||||||
default_limit: 10
|
default_limit: 10
|
||||||
max_limit: 50
|
max_limit: 50
|
||||||
logging:
|
logging:
|
||||||
level: "info"
|
level: "info"
|
||||||
`), 0o600); err != nil {
|
`
|
||||||
|
|
||||||
|
func TestLoadAppliesEnvOverrides(t *testing.T) {
|
||||||
|
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
||||||
|
if err := os.WriteFile(configPath, []byte(v2ConfigYAML), 0o600); err != nil {
|
||||||
t.Fatalf("write config: %v", err)
|
t.Fatalf("write config: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -76,8 +88,8 @@ logging:
|
|||||||
if cfg.Database.URL != "postgres://from-env" {
|
if cfg.Database.URL != "postgres://from-env" {
|
||||||
t.Fatalf("database url = %q, want env override", cfg.Database.URL)
|
t.Fatalf("database url = %q, want env override", cfg.Database.URL)
|
||||||
}
|
}
|
||||||
if cfg.AI.LiteLLM.APIKey != "env-key" {
|
if cfg.AI.Providers["default"].APIKey != "env-key" {
|
||||||
t.Fatalf("litellm api key = %q, want env override", cfg.AI.LiteLLM.APIKey)
|
t.Fatalf("litellm api key = %q, want env override", cfg.AI.Providers["default"].APIKey)
|
||||||
}
|
}
|
||||||
if cfg.Server.Port != 9090 {
|
if cfg.Server.Port != 9090 {
|
||||||
t.Fatalf("server port = %d, want 9090", cfg.Server.Port)
|
t.Fatalf("server port = %d, want 9090", cfg.Server.Port)
|
||||||
@@ -90,10 +102,12 @@ logging:
|
|||||||
func TestLoadAppliesOllamaEnvOverrides(t *testing.T) {
|
func TestLoadAppliesOllamaEnvOverrides(t *testing.T) {
|
||||||
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
configPath := filepath.Join(t.TempDir(), "test.yaml")
|
||||||
if err := os.WriteFile(configPath, []byte(`
|
if err := os.WriteFile(configPath, []byte(`
|
||||||
|
version: 2
|
||||||
server:
|
server:
|
||||||
port: 8080
|
port: 8080
|
||||||
mcp:
|
mcp:
|
||||||
path: "/mcp"
|
path: "/mcp"
|
||||||
|
session_timeout: "10m"
|
||||||
auth:
|
auth:
|
||||||
keys:
|
keys:
|
||||||
- id: "test"
|
- id: "test"
|
||||||
@@ -101,15 +115,20 @@ auth:
|
|||||||
database:
|
database:
|
||||||
url: "postgres://from-file"
|
url: "postgres://from-file"
|
||||||
ai:
|
ai:
|
||||||
provider: "ollama"
|
providers:
|
||||||
|
local:
|
||||||
|
type: "ollama"
|
||||||
|
base_url: "http://localhost:11434/v1"
|
||||||
|
api_key: "ollama"
|
||||||
embeddings:
|
embeddings:
|
||||||
model: "nomic-embed-text"
|
|
||||||
dimensions: 768
|
dimensions: 768
|
||||||
|
primary:
|
||||||
|
provider: "local"
|
||||||
|
model: "nomic-embed-text"
|
||||||
metadata:
|
metadata:
|
||||||
model: "llama3.2"
|
primary:
|
||||||
ollama:
|
provider: "local"
|
||||||
base_url: "http://localhost:11434/v1"
|
model: "llama3.2"
|
||||||
api_key: "ollama"
|
|
||||||
search:
|
search:
|
||||||
default_limit: 10
|
default_limit: 10
|
||||||
max_limit: 50
|
max_limit: 50
|
||||||
@@ -127,10 +146,85 @@ logging:
|
|||||||
t.Fatalf("Load() error = %v", err)
|
t.Fatalf("Load() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.AI.Ollama.BaseURL != "https://ollama.example.com/v1" {
|
p := cfg.AI.Providers["local"]
|
||||||
t.Fatalf("ollama base url = %q, want env override", cfg.AI.Ollama.BaseURL)
|
if p.BaseURL != "https://ollama.example.com/v1" {
|
||||||
|
t.Fatalf("ollama base url = %q, want env override", p.BaseURL)
|
||||||
}
|
}
|
||||||
if cfg.AI.Ollama.APIKey != "remote-key" {
|
if p.APIKey != "remote-key" {
|
||||||
t.Fatalf("ollama api key = %q, want env override", cfg.AI.Ollama.APIKey)
|
t.Fatalf("ollama api key = %q, want env override", p.APIKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadMigratesV1Config(t *testing.T) {
|
||||||
|
configPath := filepath.Join(t.TempDir(), "v1.yaml")
|
||||||
|
v1 := `
|
||||||
|
server:
|
||||||
|
port: 8080
|
||||||
|
mcp:
|
||||||
|
path: "/mcp"
|
||||||
|
session_timeout: "10m"
|
||||||
|
auth:
|
||||||
|
keys:
|
||||||
|
- id: "test"
|
||||||
|
value: "secret"
|
||||||
|
database:
|
||||||
|
url: "postgres://from-file"
|
||||||
|
ai:
|
||||||
|
provider: "litellm"
|
||||||
|
embeddings:
|
||||||
|
model: "text-embed"
|
||||||
|
dimensions: 1536
|
||||||
|
metadata:
|
||||||
|
model: "gpt-4"
|
||||||
|
temperature: 0.2
|
||||||
|
fallback_models: ["gpt-3.5"]
|
||||||
|
litellm:
|
||||||
|
base_url: "http://localhost:4000/v1"
|
||||||
|
api_key: "file-key"
|
||||||
|
search:
|
||||||
|
default_limit: 10
|
||||||
|
max_limit: 50
|
||||||
|
logging:
|
||||||
|
level: "info"
|
||||||
|
`
|
||||||
|
if err := os.WriteFile(configPath, []byte(v1), 0o600); err != nil {
|
||||||
|
t.Fatalf("write config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg, _, err := Load(configPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Load() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.Version != CurrentConfigVersion {
|
||||||
|
t.Fatalf("version = %d, want %d", cfg.Version, CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
if p, ok := cfg.AI.Providers["default"]; !ok || p.Type != "litellm" || p.APIKey != "file-key" {
|
||||||
|
t.Fatalf("providers[default] = %+v, want litellm/file-key", p)
|
||||||
|
}
|
||||||
|
if cfg.AI.Embeddings.Primary.Model != "text-embed" || cfg.AI.Embeddings.Primary.Provider != "default" {
|
||||||
|
t.Fatalf("embeddings.primary = %+v, want default/text-embed", cfg.AI.Embeddings.Primary)
|
||||||
|
}
|
||||||
|
if cfg.AI.Metadata.Primary.Model != "gpt-4" || cfg.AI.Metadata.Primary.Provider != "default" {
|
||||||
|
t.Fatalf("metadata.primary = %+v, want default/gpt-4", cfg.AI.Metadata.Primary)
|
||||||
|
}
|
||||||
|
if len(cfg.AI.Metadata.Fallbacks) != 1 || cfg.AI.Metadata.Fallbacks[0].Model != "gpt-3.5" {
|
||||||
|
t.Fatalf("metadata.fallbacks = %+v, want [default/gpt-3.5]", cfg.AI.Metadata.Fallbacks)
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := filepath.Glob(configPath + ".bak.*")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("glob backups: %v", err)
|
||||||
|
}
|
||||||
|
if len(entries) != 0 {
|
||||||
|
t.Fatalf("backup files = %d, want 0 (load should not rewrite config)", len(entries))
|
||||||
|
}
|
||||||
|
|
||||||
|
originalOnDisk, err := os.ReadFile(configPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("read original config: %v", err)
|
||||||
|
}
|
||||||
|
if !strings.Contains(string(originalOnDisk), "provider: \"litellm\"") {
|
||||||
|
t.Fatalf("expected source config to remain unchanged on disk")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
341
internal/config/migrate.go
Normal file
341
internal/config/migrate.go
Normal file
@@ -0,0 +1,341 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CurrentConfigVersion is the schema version this binary expects. Files at a
|
||||||
|
// lower version are migrated automatically when loaded.
|
||||||
|
const CurrentConfigVersion = 2
|
||||||
|
|
||||||
|
// ConfigMigration upgrades a raw YAML map by one version.
|
||||||
|
type ConfigMigration struct {
|
||||||
|
From, To int
|
||||||
|
Describe string
|
||||||
|
Apply func(map[string]any) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrations is the ordered ladder of upgrades. Add new entries at the end.
|
||||||
|
var migrations = []ConfigMigration{
|
||||||
|
{From: 1, To: 2, Describe: "named providers + role chains", Apply: migrateV1toV2},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate brings raw up to CurrentConfigVersion in place. Returns the list of
|
||||||
|
// migrations that were applied (may be empty if already current).
|
||||||
|
func Migrate(raw map[string]any) ([]ConfigMigration, error) {
|
||||||
|
if raw == nil {
|
||||||
|
return nil, fmt.Errorf("migrate: raw config is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
version := readVersion(raw)
|
||||||
|
if version > CurrentConfigVersion {
|
||||||
|
return nil, fmt.Errorf("migrate: config version %d is newer than supported version %d", version, CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
applied := make([]ConfigMigration, 0)
|
||||||
|
for {
|
||||||
|
if version >= CurrentConfigVersion {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
step, ok := findMigration(version)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("migrate: no migration registered from version %d", version)
|
||||||
|
}
|
||||||
|
if err := step.Apply(raw); err != nil {
|
||||||
|
return nil, fmt.Errorf("migrate v%d->v%d: %w", step.From, step.To, err)
|
||||||
|
}
|
||||||
|
raw["version"] = step.To
|
||||||
|
version = step.To
|
||||||
|
applied = append(applied, step)
|
||||||
|
}
|
||||||
|
return applied, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findMigration(from int) (ConfigMigration, bool) {
|
||||||
|
for _, m := range migrations {
|
||||||
|
if m.From == from {
|
||||||
|
return m, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ConfigMigration{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// readVersion returns the version from raw. Files without a version field are
|
||||||
|
// treated as version 1 (the original schema).
|
||||||
|
func readVersion(raw map[string]any) int {
|
||||||
|
v, ok := raw["version"]
|
||||||
|
if !ok {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
switch n := v.(type) {
|
||||||
|
case int:
|
||||||
|
return n
|
||||||
|
case int64:
|
||||||
|
return int(n)
|
||||||
|
case float64:
|
||||||
|
return int(n)
|
||||||
|
}
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateV1toV2 lifts the single-provider config into the named-providers +
|
||||||
|
// role-chains layout. The pre-v2 config implicitly used one provider for both
|
||||||
|
// embeddings and metadata; we materialise that as a provider named "default".
|
||||||
|
func migrateV1toV2(raw map[string]any) error {
|
||||||
|
aiRaw := mapValue(raw, "ai")
|
||||||
|
if aiRaw == nil {
|
||||||
|
aiRaw = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
providerType := stringValue(aiRaw, "provider")
|
||||||
|
if providerType == "" {
|
||||||
|
providerType = "litellm"
|
||||||
|
}
|
||||||
|
|
||||||
|
providers, embeddingModel, metadataModel, fallbackModels := buildV1Provider(aiRaw, providerType)
|
||||||
|
|
||||||
|
embeddingsOld := mapValue(aiRaw, "embeddings")
|
||||||
|
dimensions := intValue(embeddingsOld, "dimensions")
|
||||||
|
if dimensions <= 0 {
|
||||||
|
dimensions = 1536
|
||||||
|
}
|
||||||
|
if embeddingModel == "" {
|
||||||
|
embeddingModel = stringValue(embeddingsOld, "model")
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataOld := mapValue(aiRaw, "metadata")
|
||||||
|
if metadataModel == "" {
|
||||||
|
metadataModel = stringValue(metadataOld, "model")
|
||||||
|
}
|
||||||
|
temperature := floatValue(metadataOld, "temperature")
|
||||||
|
logConversations := boolValue(metadataOld, "log_conversations")
|
||||||
|
timeoutStr := stringValue(metadataOld, "timeout")
|
||||||
|
|
||||||
|
if list := stringListValue(metadataOld, "fallback_models"); len(list) > 0 {
|
||||||
|
fallbackModels = append(fallbackModels, list...)
|
||||||
|
}
|
||||||
|
if v := stringValue(metadataOld, "fallback_model"); v != "" {
|
||||||
|
fallbackModels = append(fallbackModels, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
embeddings := map[string]any{
|
||||||
|
"dimensions": dimensions,
|
||||||
|
"primary": map[string]any{"provider": "default", "model": embeddingModel},
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata := map[string]any{
|
||||||
|
"temperature": temperature,
|
||||||
|
"log_conversations": logConversations,
|
||||||
|
"primary": map[string]any{"provider": "default", "model": metadataModel},
|
||||||
|
}
|
||||||
|
if timeoutStr != "" {
|
||||||
|
metadata["timeout"] = timeoutStr
|
||||||
|
}
|
||||||
|
if fallbacks := chainTargets("default", fallbackModels); len(fallbacks) > 0 {
|
||||||
|
metadata["fallbacks"] = fallbacks
|
||||||
|
}
|
||||||
|
|
||||||
|
raw["ai"] = map[string]any{
|
||||||
|
"providers": providers,
|
||||||
|
"embeddings": embeddings,
|
||||||
|
"metadata": metadata,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildV1Provider(aiRaw map[string]any, providerType string) (map[string]any, string, string, []string) {
|
||||||
|
providers := map[string]any{}
|
||||||
|
defaultEntry := map[string]any{"type": providerType}
|
||||||
|
embedModel := ""
|
||||||
|
metaModel := ""
|
||||||
|
var fallbacks []string
|
||||||
|
|
||||||
|
switch providerType {
|
||||||
|
case "litellm":
|
||||||
|
block := mapValue(aiRaw, "litellm")
|
||||||
|
copyKeys(defaultEntry, block, "base_url", "api_key")
|
||||||
|
copyHeaders(defaultEntry, block, "request_headers")
|
||||||
|
embedModel = stringValue(block, "embedding_model")
|
||||||
|
metaModel = stringValue(block, "metadata_model")
|
||||||
|
if list := stringListValue(block, "fallback_metadata_models"); len(list) > 0 {
|
||||||
|
fallbacks = append(fallbacks, list...)
|
||||||
|
}
|
||||||
|
if v := stringValue(block, "fallback_metadata_model"); v != "" {
|
||||||
|
fallbacks = append(fallbacks, v)
|
||||||
|
}
|
||||||
|
case "ollama":
|
||||||
|
block := mapValue(aiRaw, "ollama")
|
||||||
|
copyKeys(defaultEntry, block, "base_url", "api_key")
|
||||||
|
copyHeaders(defaultEntry, block, "request_headers")
|
||||||
|
case "openrouter":
|
||||||
|
block := mapValue(aiRaw, "openrouter")
|
||||||
|
copyKeys(defaultEntry, block, "base_url", "api_key", "app_name", "site_url")
|
||||||
|
copyHeaders(defaultEntry, block, "extra_headers")
|
||||||
|
// rename: extra_headers → request_headers
|
||||||
|
if hdr, ok := defaultEntry["extra_headers"]; ok {
|
||||||
|
defaultEntry["request_headers"] = hdr
|
||||||
|
delete(defaultEntry, "extra_headers")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
providers["default"] = defaultEntry
|
||||||
|
return providers, embedModel, metaModel, fallbacks
|
||||||
|
}
|
||||||
|
|
||||||
|
func chainTargets(provider string, models []string) []any {
|
||||||
|
out := make([]any, 0, len(models))
|
||||||
|
seen := map[string]struct{}{}
|
||||||
|
for _, m := range models {
|
||||||
|
if m == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
key := provider + "|" + m
|
||||||
|
if _, ok := seen[key]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[key] = struct{}{}
|
||||||
|
out = append(out, map[string]any{"provider": provider, "model": m})
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapValue(raw map[string]any, key string) map[string]any {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
v, ok := raw[key]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch m := v.(type) {
|
||||||
|
case map[string]any:
|
||||||
|
return m
|
||||||
|
case map[any]any:
|
||||||
|
return convertAnyMap(m)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertAnyMap(in map[any]any) map[string]any {
|
||||||
|
out := make(map[string]any, len(in))
|
||||||
|
keys := make([]string, 0, len(in))
|
||||||
|
for k, v := range in {
|
||||||
|
ks, ok := k.(string)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
keys = append(keys, ks)
|
||||||
|
out[ks] = v
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringValue(raw map[string]any, key string) string {
|
||||||
|
if raw == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
v, ok := raw[key]
|
||||||
|
if !ok {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if s, ok := v.(string); ok {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func intValue(raw map[string]any, key string) int {
|
||||||
|
if raw == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
switch n := raw[key].(type) {
|
||||||
|
case int:
|
||||||
|
return n
|
||||||
|
case int64:
|
||||||
|
return int(n)
|
||||||
|
case float64:
|
||||||
|
return int(n)
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func floatValue(raw map[string]any, key string) float64 {
|
||||||
|
if raw == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
switch n := raw[key].(type) {
|
||||||
|
case float64:
|
||||||
|
return n
|
||||||
|
case int:
|
||||||
|
return float64(n)
|
||||||
|
case int64:
|
||||||
|
return float64(n)
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func boolValue(raw map[string]any, key string) bool {
|
||||||
|
if raw == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if b, ok := raw[key].(bool); ok {
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringListValue(raw map[string]any, key string) []string {
|
||||||
|
if raw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
v, ok := raw[key]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
list, ok := v.([]any)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out := make([]string, 0, len(list))
|
||||||
|
for _, item := range list {
|
||||||
|
if s, ok := item.(string); ok && s != "" {
|
||||||
|
out = append(out, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyKeys(dst, src map[string]any, keys ...string) {
|
||||||
|
if src == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, k := range keys {
|
||||||
|
if v, ok := src[k]; ok {
|
||||||
|
dst[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyHeaders(dst, src map[string]any, key string) {
|
||||||
|
if src == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
v, ok := src[key]
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch headers := v.(type) {
|
||||||
|
case map[string]any:
|
||||||
|
if len(headers) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
dst[key] = headers
|
||||||
|
case map[any]any:
|
||||||
|
if len(headers) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
dst[key] = convertAnyMap(headers)
|
||||||
|
}
|
||||||
|
}
|
||||||
77
internal/config/migrate_test.go
Normal file
77
internal/config/migrate_test.go
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestMigrateV1ToV2Litellm(t *testing.T) {
|
||||||
|
raw := map[string]any{
|
||||||
|
"ai": map[string]any{
|
||||||
|
"provider": "litellm",
|
||||||
|
"embeddings": map[string]any{
|
||||||
|
"model": "text-embedding-3-small",
|
||||||
|
"dimensions": 1536,
|
||||||
|
},
|
||||||
|
"metadata": map[string]any{
|
||||||
|
"model": "gpt-4o-mini",
|
||||||
|
"temperature": 0.2,
|
||||||
|
"fallback_models": []any{"gpt-4.1-mini"},
|
||||||
|
},
|
||||||
|
"litellm": map[string]any{
|
||||||
|
"base_url": "http://localhost:4000/v1",
|
||||||
|
"api_key": "secret",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
applied, err := Migrate(raw)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Migrate() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(applied) != 1 || applied[0].From != 1 || applied[0].To != 2 {
|
||||||
|
t.Fatalf("applied = %+v, want [v1->v2]", applied)
|
||||||
|
}
|
||||||
|
if got := readVersion(raw); got != CurrentConfigVersion {
|
||||||
|
t.Fatalf("version = %d, want %d", got, CurrentConfigVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
ai := mapValue(raw, "ai")
|
||||||
|
providers := mapValue(ai, "providers")
|
||||||
|
def := mapValue(providers, "default")
|
||||||
|
if got := stringValue(def, "type"); got != "litellm" {
|
||||||
|
t.Fatalf("providers.default.type = %q, want litellm", got)
|
||||||
|
}
|
||||||
|
if got := stringValue(def, "base_url"); got != "http://localhost:4000/v1" {
|
||||||
|
t.Fatalf("providers.default.base_url = %q", got)
|
||||||
|
}
|
||||||
|
|
||||||
|
emb := mapValue(ai, "embeddings")
|
||||||
|
embPrimary := mapValue(emb, "primary")
|
||||||
|
if stringValue(embPrimary, "provider") != "default" || stringValue(embPrimary, "model") != "text-embedding-3-small" {
|
||||||
|
t.Fatalf("embeddings.primary = %+v, want default/text-embedding-3-small", embPrimary)
|
||||||
|
}
|
||||||
|
|
||||||
|
meta := mapValue(ai, "metadata")
|
||||||
|
metaPrimary := mapValue(meta, "primary")
|
||||||
|
if stringValue(metaPrimary, "provider") != "default" || stringValue(metaPrimary, "model") != "gpt-4o-mini" {
|
||||||
|
t.Fatalf("metadata.primary = %+v, want default/gpt-4o-mini", metaPrimary)
|
||||||
|
}
|
||||||
|
fallbacks, ok := meta["fallbacks"].([]any)
|
||||||
|
if !ok || len(fallbacks) != 1 {
|
||||||
|
t.Fatalf("metadata.fallbacks = %#v, want len=1", meta["fallbacks"])
|
||||||
|
}
|
||||||
|
firstFallback, ok := fallbacks[0].(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
t.Fatalf("metadata.fallbacks[0] type = %T, want map[string]any", fallbacks[0])
|
||||||
|
}
|
||||||
|
if stringValue(firstFallback, "provider") != "default" || stringValue(firstFallback, "model") != "gpt-4.1-mini" {
|
||||||
|
t.Fatalf("metadata fallback = %+v, want default/gpt-4.1-mini", firstFallback)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMigrateRejectsNewerVersion(t *testing.T) {
|
||||||
|
raw := map[string]any{"version": CurrentConfigVersion + 1}
|
||||||
|
|
||||||
|
_, err := Migrate(raw)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Migrate() error = nil, want error for newer config version")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -33,42 +33,20 @@ func (c Config) Validate() error {
|
|||||||
if strings.TrimSpace(c.MCP.Path) == "" {
|
if strings.TrimSpace(c.MCP.Path) == "" {
|
||||||
return fmt.Errorf("invalid config: mcp.path is required")
|
return fmt.Errorf("invalid config: mcp.path is required")
|
||||||
}
|
}
|
||||||
|
if c.MCP.SSEPath != "" {
|
||||||
|
if strings.TrimSpace(c.MCP.SSEPath) == "" {
|
||||||
|
return fmt.Errorf("invalid config: mcp.sse_path must not be blank whitespace")
|
||||||
|
}
|
||||||
|
if c.MCP.SSEPath == c.MCP.Path {
|
||||||
|
return fmt.Errorf("invalid config: mcp.sse_path %q must differ from mcp.path", c.MCP.SSEPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
if c.MCP.SessionTimeout <= 0 {
|
if c.MCP.SessionTimeout <= 0 {
|
||||||
return fmt.Errorf("invalid config: mcp.session_timeout must be greater than zero")
|
return fmt.Errorf("invalid config: mcp.session_timeout must be greater than zero")
|
||||||
}
|
}
|
||||||
|
|
||||||
switch c.AI.Provider {
|
if err := c.AI.validate(); err != nil {
|
||||||
case "litellm", "ollama", "openrouter":
|
return err
|
||||||
default:
|
|
||||||
return fmt.Errorf("invalid config: unsupported ai.provider %q", c.AI.Provider)
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.AI.Embeddings.Dimensions <= 0 {
|
|
||||||
return fmt.Errorf("invalid config: ai.embeddings.dimensions must be greater than zero")
|
|
||||||
}
|
|
||||||
|
|
||||||
switch c.AI.Provider {
|
|
||||||
case "litellm":
|
|
||||||
if strings.TrimSpace(c.AI.LiteLLM.BaseURL) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.litellm.base_url is required when ai.provider=litellm")
|
|
||||||
}
|
|
||||||
if strings.TrimSpace(c.AI.LiteLLM.APIKey) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.litellm.api_key is required when ai.provider=litellm")
|
|
||||||
}
|
|
||||||
case "ollama":
|
|
||||||
if strings.TrimSpace(c.AI.Ollama.BaseURL) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.ollama.base_url is required when ai.provider=ollama")
|
|
||||||
}
|
|
||||||
if strings.TrimSpace(c.AI.Ollama.APIKey) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.ollama.api_key is required when ai.provider=ollama")
|
|
||||||
}
|
|
||||||
case "openrouter":
|
|
||||||
if strings.TrimSpace(c.AI.OpenRouter.BaseURL) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.openrouter.base_url is required when ai.provider=openrouter")
|
|
||||||
}
|
|
||||||
if strings.TrimSpace(c.AI.OpenRouter.APIKey) == "" {
|
|
||||||
return fmt.Errorf("invalid config: ai.openrouter.api_key is required when ai.provider=openrouter")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.Server.Port <= 0 {
|
if c.Server.Port <= 0 {
|
||||||
@@ -100,3 +78,61 @@ func (c Config) Validate() error {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a AIConfig) validate() error {
|
||||||
|
if len(a.Providers) == 0 {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers must contain at least one entry")
|
||||||
|
}
|
||||||
|
for name, p := range a.Providers {
|
||||||
|
if strings.TrimSpace(name) == "" {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers contains an entry with an empty name")
|
||||||
|
}
|
||||||
|
switch p.Type {
|
||||||
|
case "litellm", "ollama", "openrouter":
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("invalid config: ai.providers.%s.type %q is not supported", name, p.Type)
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(p.BaseURL) == "" {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers.%s.base_url is required", name)
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(p.APIKey) == "" {
|
||||||
|
return fmt.Errorf("invalid config: ai.providers.%s.api_key is required", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.Embeddings.Dimensions <= 0 {
|
||||||
|
return fmt.Errorf("invalid config: ai.embeddings.dimensions must be greater than zero")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := a.validateChain("ai.embeddings", a.Embeddings.Chain()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := a.validateChain("ai.metadata", a.Metadata.Chain()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if a.Background != nil {
|
||||||
|
if a.Background.Embeddings != nil {
|
||||||
|
if err := a.validateChain("ai.background.embeddings", a.Background.Embeddings.AsTargets()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if a.Background.Metadata != nil {
|
||||||
|
if err := a.validateChain("ai.background.metadata", a.Background.Metadata.AsTargets()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a AIConfig) validateChain(prefix string, chain []RoleTarget) error {
|
||||||
|
if len(chain) == 0 {
|
||||||
|
return fmt.Errorf("invalid config: %s.primary must reference a configured provider and model", prefix)
|
||||||
|
}
|
||||||
|
for i, target := range chain {
|
||||||
|
if _, ok := a.Providers[target.Provider]; !ok {
|
||||||
|
return fmt.Errorf("invalid config: %s[%d] references unknown provider %q", prefix, i, target.Provider)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,28 +7,23 @@ import (
|
|||||||
|
|
||||||
func validConfig() Config {
|
func validConfig() Config {
|
||||||
return Config{
|
return Config{
|
||||||
Server: ServerConfig{Port: 8080},
|
Version: CurrentConfigVersion,
|
||||||
MCP: MCPConfig{Path: "/mcp", SessionTimeout: 10 * time.Minute},
|
Server: ServerConfig{Port: 8080},
|
||||||
|
MCP: MCPConfig{Path: "/mcp", SessionTimeout: 10 * time.Minute},
|
||||||
Auth: AuthConfig{
|
Auth: AuthConfig{
|
||||||
Keys: []APIKey{{ID: "test", Value: "secret"}},
|
Keys: []APIKey{{ID: "test", Value: "secret"}},
|
||||||
},
|
},
|
||||||
Database: DatabaseConfig{URL: "postgres://example"},
|
Database: DatabaseConfig{URL: "postgres://example"},
|
||||||
AI: AIConfig{
|
AI: AIConfig{
|
||||||
Provider: "litellm",
|
Providers: map[string]ProviderConfig{
|
||||||
Embeddings: AIEmbeddingConfig{
|
"default": {Type: "litellm", BaseURL: "http://localhost:4000/v1", APIKey: "key"},
|
||||||
|
},
|
||||||
|
Embeddings: EmbeddingsRoleConfig{
|
||||||
Dimensions: 1536,
|
Dimensions: 1536,
|
||||||
|
Primary: RoleTarget{Provider: "default", Model: "text-embed"},
|
||||||
},
|
},
|
||||||
LiteLLM: LiteLLMConfig{
|
Metadata: MetadataRoleConfig{
|
||||||
BaseURL: "http://localhost:4000/v1",
|
Primary: RoleTarget{Provider: "default", Model: "gpt-4"},
|
||||||
APIKey: "key",
|
|
||||||
},
|
|
||||||
Ollama: OllamaConfig{
|
|
||||||
BaseURL: "http://localhost:11434/v1",
|
|
||||||
APIKey: "ollama",
|
|
||||||
},
|
|
||||||
OpenRouter: OpenRouterAIConfig{
|
|
||||||
BaseURL: "https://openrouter.ai/api/v1",
|
|
||||||
APIKey: "key",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Search: SearchConfig{DefaultLimit: 10, MaxLimit: 50},
|
Search: SearchConfig{DefaultLimit: 10, MaxLimit: 50},
|
||||||
@@ -36,29 +31,44 @@ func validConfig() Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateAcceptsSupportedProviders(t *testing.T) {
|
func TestValidateAcceptsSupportedProviderTypes(t *testing.T) {
|
||||||
cfg := validConfig()
|
for _, providerType := range []string{"litellm", "ollama", "openrouter"} {
|
||||||
if err := cfg.Validate(); err != nil {
|
cfg := validConfig()
|
||||||
t.Fatalf("Validate litellm error = %v", err)
|
p := cfg.AI.Providers["default"]
|
||||||
}
|
p.Type = providerType
|
||||||
|
cfg.AI.Providers["default"] = p
|
||||||
cfg.AI.Provider = "ollama"
|
if err := cfg.Validate(); err != nil {
|
||||||
if err := cfg.Validate(); err != nil {
|
t.Fatalf("Validate %s error = %v", providerType, err)
|
||||||
t.Fatalf("Validate ollama error = %v", err)
|
}
|
||||||
}
|
|
||||||
|
|
||||||
cfg.AI.Provider = "openrouter"
|
|
||||||
if err := cfg.Validate(); err != nil {
|
|
||||||
t.Fatalf("Validate openrouter error = %v", err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateRejectsInvalidProvider(t *testing.T) {
|
func TestValidateRejectsInvalidProviderType(t *testing.T) {
|
||||||
cfg := validConfig()
|
cfg := validConfig()
|
||||||
cfg.AI.Provider = "unknown"
|
p := cfg.AI.Providers["default"]
|
||||||
|
p.Type = "unknown"
|
||||||
|
cfg.AI.Providers["default"] = p
|
||||||
|
|
||||||
if err := cfg.Validate(); err == nil {
|
if err := cfg.Validate(); err == nil {
|
||||||
t.Fatal("Validate() error = nil, want error for unsupported provider")
|
t.Fatal("Validate() error = nil, want error for unsupported provider type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateRejectsChainWithUnknownProvider(t *testing.T) {
|
||||||
|
cfg := validConfig()
|
||||||
|
cfg.AI.Metadata.Primary = RoleTarget{Provider: "does-not-exist", Model: "x"}
|
||||||
|
|
||||||
|
if err := cfg.Validate(); err == nil {
|
||||||
|
t.Fatal("Validate() error = nil, want error for chain referencing unknown provider")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateRejectsEmptyProviders(t *testing.T) {
|
||||||
|
cfg := validConfig()
|
||||||
|
cfg.AI.Providers = map[string]ProviderConfig{}
|
||||||
|
|
||||||
|
if err := cfg.Validate(); err == nil {
|
||||||
|
t.Fatal("Validate() error = nil, want error for empty providers")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -221,12 +221,19 @@ func formatLogDuration(d time.Duration) string {
|
|||||||
return fmt.Sprintf("%02d:%02d:%03d", minutes, seconds, milliseconds)
|
return fmt.Sprintf("%02d:%02d:%03d", minutes, seconds, milliseconds)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func normalizeObjectSchema(schema *jsonschema.Schema) {
|
||||||
|
if schema != nil && schema.Type == "object" && schema.Properties == nil {
|
||||||
|
schema.Properties = map[string]*jsonschema.Schema{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func setToolSchemas[In any, Out any](tool *mcp.Tool) error {
|
func setToolSchemas[In any, Out any](tool *mcp.Tool) error {
|
||||||
if tool.InputSchema == nil {
|
if tool.InputSchema == nil {
|
||||||
inputSchema, err := jsonschema.For[In](toolSchemaOptions)
|
inputSchema, err := jsonschema.For[In](toolSchemaOptions)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("infer input schema: %w", err)
|
return fmt.Errorf("infer input schema: %w", err)
|
||||||
}
|
}
|
||||||
|
normalizeObjectSchema(inputSchema)
|
||||||
tool.InputSchema = inputSchema
|
tool.InputSchema = inputSchema
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,24 @@ import (
|
|||||||
"git.warky.dev/wdevs/amcs/internal/tools"
|
"git.warky.dev/wdevs/amcs/internal/tools"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestSetToolSchemasAddsEmptyPropertiesForNoArgInput(t *testing.T) {
|
||||||
|
type noArgInput struct{}
|
||||||
|
type anyOutput struct{}
|
||||||
|
|
||||||
|
tool := &mcp.Tool{Name: "no_args"}
|
||||||
|
if err := setToolSchemas[noArgInput, anyOutput](tool); err != nil {
|
||||||
|
t.Fatalf("set tool schemas: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schema, ok := tool.InputSchema.(*jsonschema.Schema)
|
||||||
|
if !ok {
|
||||||
|
t.Fatalf("input schema type = %T, want *jsonschema.Schema", tool.InputSchema)
|
||||||
|
}
|
||||||
|
if schema.Properties == nil {
|
||||||
|
t.Fatal("input schema missing properties: strict MCP clients require properties:{} on object schemas")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestSetToolSchemasUsesStringUUIDsInListOutput(t *testing.T) {
|
func TestSetToolSchemasUsesStringUUIDsInListOutput(t *testing.T) {
|
||||||
tool := &mcp.Tool{Name: "list_thoughts"}
|
tool := &mcp.Tool{Name: "list_thoughts"}
|
||||||
|
|
||||||
|
|||||||
@@ -35,18 +35,36 @@ type ToolSet struct {
|
|||||||
Files *tools.FilesTool
|
Files *tools.FilesTool
|
||||||
Backfill *tools.BackfillTool
|
Backfill *tools.BackfillTool
|
||||||
Reparse *tools.ReparseMetadataTool
|
Reparse *tools.ReparseMetadataTool
|
||||||
RetryMetadata *tools.RetryMetadataTool
|
RetryMetadata *tools.RetryEnrichmentTool
|
||||||
Household *tools.HouseholdTool
|
|
||||||
Maintenance *tools.MaintenanceTool
|
Maintenance *tools.MaintenanceTool
|
||||||
Calendar *tools.CalendarTool
|
|
||||||
Meals *tools.MealsTool
|
|
||||||
CRM *tools.CRMTool
|
|
||||||
Skills *tools.SkillsTool
|
Skills *tools.SkillsTool
|
||||||
ChatHistory *tools.ChatHistoryTool
|
ChatHistory *tools.ChatHistoryTool
|
||||||
Describe *tools.DescribeTool
|
Describe *tools.DescribeTool
|
||||||
|
Learnings *tools.LearningsTool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handlers groups the HTTP handlers produced for an MCP server instance.
|
||||||
|
type Handlers struct {
|
||||||
|
// StreamableHTTP is the primary MCP handler (always non-nil).
|
||||||
|
StreamableHTTP http.Handler
|
||||||
|
// SSE is the SSE transport handler; nil when SSEPath is empty.
|
||||||
|
// SSE is the de facto transport for MCP over the internet and is required by most hosted MCP clients.
|
||||||
|
SSE http.Handler
|
||||||
|
}
|
||||||
|
|
||||||
|
// New builds the StreamableHTTP MCP handler. It is a convenience wrapper
|
||||||
|
// around NewHandlers for callers that only need the primary transport.
|
||||||
func New(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionClosed func(string)) (http.Handler, error) {
|
func New(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionClosed func(string)) (http.Handler, error) {
|
||||||
|
h, err := NewHandlers(cfg, logger, toolSet, onSessionClosed)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return h.StreamableHTTP, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandlers builds MCP HTTP handlers for both transports.
|
||||||
|
// SSE is nil when cfg.SSEPath is empty.
|
||||||
|
func NewHandlers(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionClosed func(string)) (Handlers, error) {
|
||||||
instructions := cfg.Instructions
|
instructions := cfg.Instructions
|
||||||
if instructions == "" {
|
if instructions == "" {
|
||||||
instructions = string(amcsllm.MemoryInstructions)
|
instructions = string(amcsllm.MemoryInstructions)
|
||||||
@@ -66,18 +84,15 @@ func New(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionCl
|
|||||||
registerSystemTools,
|
registerSystemTools,
|
||||||
registerThoughtTools,
|
registerThoughtTools,
|
||||||
registerProjectTools,
|
registerProjectTools,
|
||||||
|
registerLearningTools,
|
||||||
registerFileTools,
|
registerFileTools,
|
||||||
registerMaintenanceTools,
|
registerMaintenanceTools,
|
||||||
registerHouseholdTools,
|
|
||||||
registerCalendarTools,
|
|
||||||
registerMealTools,
|
|
||||||
registerCRMTools,
|
|
||||||
registerSkillTools,
|
registerSkillTools,
|
||||||
registerChatHistoryTools,
|
registerChatHistoryTools,
|
||||||
registerDescribeTools,
|
registerDescribeTools,
|
||||||
} {
|
} {
|
||||||
if err := register(server, logger, toolSet); err != nil {
|
if err := register(server, logger, toolSet); err != nil {
|
||||||
return nil, err
|
return Handlers{}, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -89,9 +104,19 @@ func New(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionCl
|
|||||||
opts.EventStore = newCleanupEventStore(mcp.NewMemoryEventStore(nil), onSessionClosed)
|
opts.EventStore = newCleanupEventStore(mcp.NewMemoryEventStore(nil), onSessionClosed)
|
||||||
}
|
}
|
||||||
|
|
||||||
return mcp.NewStreamableHTTPHandler(func(*http.Request) *mcp.Server {
|
h := Handlers{
|
||||||
return server
|
StreamableHTTP: mcp.NewStreamableHTTPHandler(func(*http.Request) *mcp.Server {
|
||||||
}, opts), nil
|
return server
|
||||||
|
}, opts),
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.TrimSpace(cfg.SSEPath) != "" {
|
||||||
|
h.SSE = mcp.NewSSEHandler(func(*http.Request) *mcp.Server {
|
||||||
|
return server
|
||||||
|
}, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
return h, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildServerIcons returns icon definitions referencing the server's own /images/icon.png endpoint.
|
// buildServerIcons returns icon definitions referencing the server's own /images/icon.png endpoint.
|
||||||
@@ -109,7 +134,7 @@ func buildServerIcons(publicURL string) []mcp.Icon {
|
|||||||
func registerSystemTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerSystemTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "get_version_info",
|
Name: "get_version_info",
|
||||||
Description: "Return the server build version information, including version, tag name, commit, and build date.",
|
Description: "Build version, commit, and date.",
|
||||||
}, toolSet.Version.GetInfo); err != nil {
|
}, toolSet.Version.GetInfo); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -119,13 +144,13 @@ func registerSystemTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSe
|
|||||||
func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "capture_thought",
|
Name: "capture_thought",
|
||||||
Description: "Store a thought with generated embeddings and extracted metadata. The thought is saved immediately even if metadata extraction times out; pending thoughts are retried in the background.",
|
Description: "Store a thought; embeddings and metadata extracted async.",
|
||||||
}, toolSet.Capture.Handle); err != nil {
|
}, toolSet.Capture.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "search_thoughts",
|
Name: "search_thoughts",
|
||||||
Description: "Search stored thoughts by semantic similarity. Falls back to Postgres full-text search automatically when no embeddings exist for the active model.",
|
Description: "Semantic search; falls back to full-text if no embeddings.",
|
||||||
}, toolSet.Search.Handle); err != nil {
|
}, toolSet.Search.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -137,7 +162,7 @@ func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
|||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "thought_stats",
|
Name: "thought_stats",
|
||||||
Description: "Get counts and top metadata buckets across stored thoughts.",
|
Description: "Counts and top metadata buckets for stored thoughts.",
|
||||||
}, toolSet.Stats.Handle); err != nil {
|
}, toolSet.Stats.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -161,19 +186,19 @@ func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
|||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "archive_thought",
|
Name: "archive_thought",
|
||||||
Description: "Archive a thought so it is hidden from default search and listing.",
|
Description: "Hide a thought from default search and listing.",
|
||||||
}, toolSet.Archive.Handle); err != nil {
|
}, toolSet.Archive.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "summarize_thoughts",
|
Name: "summarize_thoughts",
|
||||||
Description: "Produce an LLM prose summary of a filtered or searched set of thoughts.",
|
Description: "LLM summary of a filtered set of thoughts.",
|
||||||
}, toolSet.Summarize.Handle); err != nil {
|
}, toolSet.Summarize.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "recall_context",
|
Name: "recall_context",
|
||||||
Description: "Recall semantically relevant and recent context for prompt injection. Combines vector similarity with recency. Falls back to full-text search when no embeddings exist.",
|
Description: "Semantic + recency context for prompt injection; falls back to full-text.",
|
||||||
}, toolSet.Recall.Handle); err != nil {
|
}, toolSet.Recall.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -185,7 +210,7 @@ func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
|||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "related_thoughts",
|
Name: "related_thoughts",
|
||||||
Description: "Retrieve explicit links and semantic neighbours for a thought. Falls back to full-text search when no embeddings exist.",
|
Description: "Explicit links and semantic neighbours; falls back to full-text.",
|
||||||
}, toolSet.Links.Related); err != nil {
|
}, toolSet.Links.Related); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -207,25 +232,47 @@ func registerProjectTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
|||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "set_active_project",
|
Name: "set_active_project",
|
||||||
Description: "Set the active project for the current MCP session. Requires a stateful MCP client that reuses the same session across calls. If your client does not preserve sessions, pass project explicitly to each tool instead.",
|
Description: "Set session's active project. Pass project per call if client is stateless.",
|
||||||
}, toolSet.Projects.SetActive); err != nil {
|
}, toolSet.Projects.SetActive); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "get_active_project",
|
Name: "get_active_project",
|
||||||
Description: "Return the active project for the current MCP session. If your client does not preserve MCP sessions, pass project explicitly to project-scoped tools instead of relying on this.",
|
Description: "Return session's active project. Pass project per call if client is stateless.",
|
||||||
}, toolSet.Projects.GetActive); err != nil {
|
}, toolSet.Projects.GetActive); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "get_project_context",
|
Name: "get_project_context",
|
||||||
Description: "Get recent and semantic context for a project. Uses the explicit project when provided, otherwise the active MCP session project. Falls back to full-text search when no embeddings exist.",
|
Description: "Recent and semantic context for a project; falls back to full-text.",
|
||||||
}, toolSet.Context.Handle); err != nil {
|
}, toolSet.Context.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func registerLearningTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "add_learning",
|
||||||
|
Description: "Create a curated learning record distinct from raw thoughts.",
|
||||||
|
}, toolSet.Learnings.Add); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "get_learning",
|
||||||
|
Description: "Retrieve a structured learning by id.",
|
||||||
|
}, toolSet.Learnings.Get); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
|
Name: "list_learnings",
|
||||||
|
Description: "List structured learnings with optional project, status, priority, tag, and text filters.",
|
||||||
|
}, toolSet.Learnings.List); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
server.AddResourceTemplate(&mcp.ResourceTemplate{
|
server.AddResourceTemplate(&mcp.ResourceTemplate{
|
||||||
Name: "stored_file",
|
Name: "stored_file",
|
||||||
@@ -235,19 +282,19 @@ func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet)
|
|||||||
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "upload_file",
|
Name: "upload_file",
|
||||||
Description: "Stage a file and get an amcs://files/{id} resource URI. Use content_path (absolute server-side path, no size limit) for large or binary files, or content_base64 (≤10 MB) for small files. Pass thought_id/project to link immediately, or omit and pass the URI to save_file later.",
|
Description: "Stage a file; returns amcs://files/{id}. content_path for large/binary, content_base64 for ≤10 MB. Link now or pass URI to save_file.",
|
||||||
}, toolSet.Files.Upload); err != nil {
|
}, toolSet.Files.Upload); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "save_file",
|
Name: "save_file",
|
||||||
Description: "Store a file and optionally link it to a thought. Use content_base64 (≤10 MB) for small files, or content_uri (amcs://files/{id} from a prior upload_file) for previously staged files. For files larger than 10 MB, use upload_file with content_path first. If the goal is to retain the artifact, store the file directly instead of reading or summarising it first.",
|
Description: "Store and optionally link a file. content_base64 (≤10 MB) or content_uri from upload_file. >10 MB: use upload_file first.",
|
||||||
}, toolSet.Files.Save); err != nil {
|
}, toolSet.Files.Save); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "load_file",
|
Name: "load_file",
|
||||||
Description: "Load a stored file by id. Returns metadata, base64 content, and an embedded MCP binary resource at amcs://files/{id}. Prefer the embedded resource when your client supports it. The id field accepts a bare UUID or full amcs://files/{id} URI.",
|
Description: "Fetch file metadata and content by id (UUID or amcs://files/{id}); includes embedded MCP resource.",
|
||||||
}, toolSet.Files.Load); err != nil {
|
}, toolSet.Files.Load); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -263,19 +310,19 @@ func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet)
|
|||||||
func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "backfill_embeddings",
|
Name: "backfill_embeddings",
|
||||||
Description: "Generate missing embeddings for stored thoughts using the active embedding model. Run this after switching embedding models or importing thoughts that have no vectors.",
|
Description: "Generate missing embeddings. Run after model switch or bulk import.",
|
||||||
}, toolSet.Backfill.Handle); err != nil {
|
}, toolSet.Backfill.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "reparse_thought_metadata",
|
Name: "reparse_thought_metadata",
|
||||||
Description: "Re-extract and normalize metadata for stored thoughts from their content.",
|
Description: "Re-extract metadata from thought content.",
|
||||||
}, toolSet.Reparse.Handle); err != nil {
|
}, toolSet.Reparse.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "retry_failed_metadata",
|
Name: "retry_failed_metadata",
|
||||||
Description: "Retry metadata extraction for thoughts still marked pending or failed.",
|
Description: "Retry pending/failed metadata extraction.",
|
||||||
}, toolSet.RetryMetadata.Handle); err != nil {
|
}, toolSet.RetryMetadata.Handle); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -287,7 +334,7 @@ func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
|||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "log_maintenance",
|
Name: "log_maintenance",
|
||||||
Description: "Log completed maintenance work; automatically updates the task's next due date.",
|
Description: "Log completed maintenance; updates next due date.",
|
||||||
}, toolSet.Maintenance.LogWork); err != nil {
|
}, toolSet.Maintenance.LogWork); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -306,176 +353,10 @@ func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func registerHouseholdTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "add_household_item",
|
|
||||||
Description: "Store a household fact (paint color, appliance details, measurement, document, etc.).",
|
|
||||||
}, toolSet.Household.AddItem); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "search_household_items",
|
|
||||||
Description: "Search household items by name, category, or location.",
|
|
||||||
}, toolSet.Household.SearchItems); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "get_household_item",
|
|
||||||
Description: "Retrieve a household item by id.",
|
|
||||||
}, toolSet.Household.GetItem); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "add_vendor",
|
|
||||||
Description: "Add a service provider (plumber, electrician, landscaper, etc.).",
|
|
||||||
}, toolSet.Household.AddVendor); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "list_vendors",
|
|
||||||
Description: "List household service vendors, optionally filtered by service type.",
|
|
||||||
}, toolSet.Household.ListVendors); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func registerCalendarTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "add_family_member",
|
|
||||||
Description: "Add a family member to the household.",
|
|
||||||
}, toolSet.Calendar.AddMember); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "list_family_members",
|
|
||||||
Description: "List all family members.",
|
|
||||||
}, toolSet.Calendar.ListMembers); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "add_activity",
|
|
||||||
Description: "Schedule a one-time or recurring family activity.",
|
|
||||||
}, toolSet.Calendar.AddActivity); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "get_week_schedule",
|
|
||||||
Description: "Get all activities scheduled for a given week.",
|
|
||||||
}, toolSet.Calendar.GetWeekSchedule); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "search_activities",
|
|
||||||
Description: "Search activities by title, type, or family member.",
|
|
||||||
}, toolSet.Calendar.SearchActivities); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "add_important_date",
|
|
||||||
Description: "Track a birthday, anniversary, deadline, or other important date.",
|
|
||||||
}, toolSet.Calendar.AddImportantDate); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "get_upcoming_dates",
|
|
||||||
Description: "Get important dates coming up in the next N days.",
|
|
||||||
}, toolSet.Calendar.GetUpcomingDates); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func registerMealTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "add_recipe",
|
|
||||||
Description: "Save a recipe with ingredients and instructions.",
|
|
||||||
}, toolSet.Meals.AddRecipe); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "search_recipes",
|
|
||||||
Description: "Search recipes by name, cuisine, tags, or ingredient.",
|
|
||||||
}, toolSet.Meals.SearchRecipes); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "update_recipe",
|
|
||||||
Description: "Update an existing recipe.",
|
|
||||||
}, toolSet.Meals.UpdateRecipe); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "create_meal_plan",
|
|
||||||
Description: "Set the meal plan for a week; replaces any existing plan for that week.",
|
|
||||||
}, toolSet.Meals.CreateMealPlan); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "get_meal_plan",
|
|
||||||
Description: "Get the meal plan for a given week.",
|
|
||||||
}, toolSet.Meals.GetMealPlan); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "generate_shopping_list",
|
|
||||||
Description: "Auto-generate a shopping list from the meal plan for a given week.",
|
|
||||||
}, toolSet.Meals.GenerateShoppingList); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func registerCRMTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "add_professional_contact",
|
|
||||||
Description: "Add a professional contact to the CRM.",
|
|
||||||
}, toolSet.CRM.AddContact); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "search_contacts",
|
|
||||||
Description: "Search professional contacts by name, company, title, notes, or tags.",
|
|
||||||
}, toolSet.CRM.SearchContacts); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "log_interaction",
|
|
||||||
Description: "Log an interaction with a professional contact.",
|
|
||||||
}, toolSet.CRM.LogInteraction); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "get_contact_history",
|
|
||||||
Description: "Get full history (interactions and opportunities) for a contact.",
|
|
||||||
}, toolSet.CRM.GetHistory); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "create_opportunity",
|
|
||||||
Description: "Create a deal, project, or opportunity linked to a contact.",
|
|
||||||
}, toolSet.CRM.CreateOpportunity); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "get_follow_ups_due",
|
|
||||||
Description: "List contacts with a follow-up date due within the next N days.",
|
|
||||||
}, toolSet.CRM.GetFollowUpsDue); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
|
||||||
Name: "link_thought_to_contact",
|
|
||||||
Description: "Append a stored thought to a contact's notes.",
|
|
||||||
}, toolSet.CRM.LinkThought); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "add_skill",
|
Name: "add_skill",
|
||||||
Description: "Store a reusable agent skill (behavioural instruction or capability prompt).",
|
Description: "Store an agent skill (instruction or capability prompt).",
|
||||||
}, toolSet.Skills.AddSkill); err != nil {
|
}, toolSet.Skills.AddSkill); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -493,7 +374,7 @@ func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet
|
|||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "add_guardrail",
|
Name: "add_guardrail",
|
||||||
Description: "Store a reusable agent guardrail (constraint or safety rule).",
|
Description: "Store an agent guardrail (constraint or safety rule).",
|
||||||
}, toolSet.Skills.AddGuardrail); err != nil {
|
}, toolSet.Skills.AddGuardrail); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -511,37 +392,37 @@ func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet
|
|||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "add_project_skill",
|
Name: "add_project_skill",
|
||||||
Description: "Link an agent skill to a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
Description: "Link a skill to a project. Pass project if client is stateless.",
|
||||||
}, toolSet.Skills.AddProjectSkill); err != nil {
|
}, toolSet.Skills.AddProjectSkill); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "remove_project_skill",
|
Name: "remove_project_skill",
|
||||||
Description: "Unlink an agent skill from a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
Description: "Unlink a skill from a project. Pass project if client is stateless.",
|
||||||
}, toolSet.Skills.RemoveProjectSkill); err != nil {
|
}, toolSet.Skills.RemoveProjectSkill); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "list_project_skills",
|
Name: "list_project_skills",
|
||||||
Description: "List all skills linked to a project. Call this at the start of every project session to load agent behaviour instructions before generating new ones. Only create new skills if none are returned. Pass project explicitly when your client does not preserve MCP sessions.",
|
Description: "Skills for a project. Load at session start; only add new if none returned. Pass project if stateless.",
|
||||||
}, toolSet.Skills.ListProjectSkills); err != nil {
|
}, toolSet.Skills.ListProjectSkills); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "add_project_guardrail",
|
Name: "add_project_guardrail",
|
||||||
Description: "Link an agent guardrail to a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
Description: "Link a guardrail to a project. Pass project if client is stateless.",
|
||||||
}, toolSet.Skills.AddProjectGuardrail); err != nil {
|
}, toolSet.Skills.AddProjectGuardrail); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "remove_project_guardrail",
|
Name: "remove_project_guardrail",
|
||||||
Description: "Unlink an agent guardrail from a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
Description: "Unlink a guardrail from a project. Pass project if client is stateless.",
|
||||||
}, toolSet.Skills.RemoveProjectGuardrail); err != nil {
|
}, toolSet.Skills.RemoveProjectGuardrail); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "list_project_guardrails",
|
Name: "list_project_guardrails",
|
||||||
Description: "List all guardrails linked to a project. Call this at the start of every project session to load agent constraints before generating new ones. Only create new guardrails if none are returned. Pass project explicitly when your client does not preserve MCP sessions.",
|
Description: "Guardrails for a project. Load at session start; only add new if none returned. Pass project if stateless.",
|
||||||
}, toolSet.Skills.ListProjectGuardrails); err != nil {
|
}, toolSet.Skills.ListProjectGuardrails); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -551,25 +432,25 @@ func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet
|
|||||||
func registerChatHistoryTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerChatHistoryTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "save_chat_history",
|
Name: "save_chat_history",
|
||||||
Description: "Save a chat session's message history for later retrieval. Stores messages with optional title, summary, channel, agent, and project metadata.",
|
Description: "Save chat messages with optional title, summary, channel, agent, and project.",
|
||||||
}, toolSet.ChatHistory.SaveChatHistory); err != nil {
|
}, toolSet.ChatHistory.SaveChatHistory); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "get_chat_history",
|
Name: "get_chat_history",
|
||||||
Description: "Retrieve a saved chat history by its UUID or session_id. Returns the full message list.",
|
Description: "Fetch chat history by UUID or session_id.",
|
||||||
}, toolSet.ChatHistory.GetChatHistory); err != nil {
|
}, toolSet.ChatHistory.GetChatHistory); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "list_chat_histories",
|
Name: "list_chat_histories",
|
||||||
Description: "List saved chat histories with optional filters: project, channel, agent_id, session_id, or recent days.",
|
Description: "List chat histories; filter by project, channel, agent_id, session_id, or days.",
|
||||||
}, toolSet.ChatHistory.ListChatHistories); err != nil {
|
}, toolSet.ChatHistory.ListChatHistories); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "delete_chat_history",
|
Name: "delete_chat_history",
|
||||||
Description: "Permanently delete a saved chat history by id.",
|
Description: "Delete a chat history by id.",
|
||||||
}, toolSet.ChatHistory.DeleteChatHistory); err != nil {
|
}, toolSet.ChatHistory.DeleteChatHistory); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -579,13 +460,13 @@ func registerChatHistoryTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
|||||||
func registerDescribeTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
func registerDescribeTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "describe_tools",
|
Name: "describe_tools",
|
||||||
Description: "Call this first in every session. Returns all available MCP tools with names, descriptions, categories, and your accumulated usage notes. Filter by category to narrow results. Available categories: system, thoughts, projects, files, admin, household, maintenance, calendar, meals, crm, skills, chat, meta.",
|
Description: "Call first each session. All tools with categories and usage notes. Categories: system, thoughts, projects, files, admin, maintenance, skills, chat, meta.",
|
||||||
}, toolSet.Describe.Describe); err != nil {
|
}, toolSet.Describe.Describe); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := addTool(server, logger, &mcp.Tool{
|
if err := addTool(server, logger, &mcp.Tool{
|
||||||
Name: "annotate_tool",
|
Name: "annotate_tool",
|
||||||
Description: "Persist usage notes, gotchas, or workflow patterns for a specific tool. Notes survive across sessions and are returned by describe_tools. Call this whenever you discover something non-obvious about a tool's behaviour. Pass an empty string to clear notes.",
|
Description: "Save usage notes for a tool; returned by describe_tools. Empty string clears.",
|
||||||
}, toolSet.Describe.Annotate); err != nil {
|
}, toolSet.Describe.Annotate); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -620,6 +501,11 @@ func BuildToolCatalog() []tools.ToolEntry {
|
|||||||
{Name: "get_active_project", Description: "Return the active project for the current MCP session. If your client does not preserve MCP sessions, pass project explicitly to project-scoped tools instead of relying on this.", Category: "projects"},
|
{Name: "get_active_project", Description: "Return the active project for the current MCP session. If your client does not preserve MCP sessions, pass project explicitly to project-scoped tools instead of relying on this.", Category: "projects"},
|
||||||
{Name: "get_project_context", Description: "Get recent and semantic context for a project. Uses the explicit project when provided, otherwise the active MCP session project. Falls back to full-text search when no embeddings exist.", Category: "projects"},
|
{Name: "get_project_context", Description: "Get recent and semantic context for a project. Uses the explicit project when provided, otherwise the active MCP session project. Falls back to full-text search when no embeddings exist.", Category: "projects"},
|
||||||
|
|
||||||
|
// learnings
|
||||||
|
{Name: "add_learning", Description: "Create a curated learning record distinct from raw thoughts.", Category: "projects"},
|
||||||
|
{Name: "get_learning", Description: "Retrieve a structured learning by id.", Category: "projects"},
|
||||||
|
{Name: "list_learnings", Description: "List structured learnings with optional project, category, area, status, priority, tag, and text filters.", Category: "projects"},
|
||||||
|
|
||||||
// files
|
// files
|
||||||
{Name: "upload_file", Description: "Stage a file and get an amcs://files/{id} resource URI. Use content_path (absolute server-side path, no size limit) for large or binary files, or content_base64 (≤10 MB) for small files. Pass thought_id/project to link immediately, or omit and pass the URI to save_file later.", Category: "files"},
|
{Name: "upload_file", Description: "Stage a file and get an amcs://files/{id} resource URI. Use content_path (absolute server-side path, no size limit) for large or binary files, or content_base64 (≤10 MB) for small files. Pass thought_id/project to link immediately, or omit and pass the URI to save_file later.", Category: "files"},
|
||||||
{Name: "save_file", Description: "Store a file and optionally link it to a thought. Use content_base64 (≤10 MB) for small files, or content_uri (amcs://files/{id} from a prior upload_file) for previously staged files. For files larger than 10 MB, use upload_file with content_path first. If the goal is to retain the artifact, store the file directly instead of reading or summarising it first.", Category: "files"},
|
{Name: "save_file", Description: "Store a file and optionally link it to a thought. Use content_base64 (≤10 MB) for small files, or content_uri (amcs://files/{id} from a prior upload_file) for previously staged files. For files larger than 10 MB, use upload_file with content_path first. If the goal is to retain the artifact, store the file directly instead of reading or summarising it first.", Category: "files"},
|
||||||
@@ -631,45 +517,12 @@ func BuildToolCatalog() []tools.ToolEntry {
|
|||||||
{Name: "reparse_thought_metadata", Description: "Re-extract and normalize metadata for stored thoughts from their content.", Category: "admin"},
|
{Name: "reparse_thought_metadata", Description: "Re-extract and normalize metadata for stored thoughts from their content.", Category: "admin"},
|
||||||
{Name: "retry_failed_metadata", Description: "Retry metadata extraction for thoughts still marked pending or failed.", Category: "admin"},
|
{Name: "retry_failed_metadata", Description: "Retry metadata extraction for thoughts still marked pending or failed.", Category: "admin"},
|
||||||
|
|
||||||
// household
|
|
||||||
{Name: "add_household_item", Description: "Store a household fact (paint color, appliance details, measurement, document, etc.).", Category: "household"},
|
|
||||||
{Name: "search_household_items", Description: "Search household items by name, category, or location.", Category: "household"},
|
|
||||||
{Name: "get_household_item", Description: "Retrieve a household item by id.", Category: "household"},
|
|
||||||
{Name: "add_vendor", Description: "Add a service provider (plumber, electrician, landscaper, etc.).", Category: "household"},
|
|
||||||
{Name: "list_vendors", Description: "List household service vendors, optionally filtered by service type.", Category: "household"},
|
|
||||||
|
|
||||||
// maintenance
|
// maintenance
|
||||||
{Name: "add_maintenance_task", Description: "Create a recurring or one-time home maintenance task.", Category: "maintenance"},
|
{Name: "add_maintenance_task", Description: "Create a recurring or one-time home maintenance task.", Category: "maintenance"},
|
||||||
{Name: "log_maintenance", Description: "Log completed maintenance work; automatically updates the task's next due date.", Category: "maintenance"},
|
{Name: "log_maintenance", Description: "Log completed maintenance work; automatically updates the task's next due date.", Category: "maintenance"},
|
||||||
{Name: "get_upcoming_maintenance", Description: "List maintenance tasks due within the next N days.", Category: "maintenance"},
|
{Name: "get_upcoming_maintenance", Description: "List maintenance tasks due within the next N days.", Category: "maintenance"},
|
||||||
{Name: "search_maintenance_history", Description: "Search the maintenance log by task name, category, or date range.", Category: "maintenance"},
|
{Name: "search_maintenance_history", Description: "Search the maintenance log by task name, category, or date range.", Category: "maintenance"},
|
||||||
|
|
||||||
// calendar
|
|
||||||
{Name: "add_family_member", Description: "Add a family member to the household.", Category: "calendar"},
|
|
||||||
{Name: "list_family_members", Description: "List all family members.", Category: "calendar"},
|
|
||||||
{Name: "add_activity", Description: "Schedule a one-time or recurring family activity.", Category: "calendar"},
|
|
||||||
{Name: "get_week_schedule", Description: "Get all activities scheduled for a given week.", Category: "calendar"},
|
|
||||||
{Name: "search_activities", Description: "Search activities by title, type, or family member.", Category: "calendar"},
|
|
||||||
{Name: "add_important_date", Description: "Track a birthday, anniversary, deadline, or other important date.", Category: "calendar"},
|
|
||||||
{Name: "get_upcoming_dates", Description: "Get important dates coming up in the next N days.", Category: "calendar"},
|
|
||||||
|
|
||||||
// meals
|
|
||||||
{Name: "add_recipe", Description: "Save a recipe with ingredients and instructions.", Category: "meals"},
|
|
||||||
{Name: "search_recipes", Description: "Search recipes by name, cuisine, tags, or ingredient.", Category: "meals"},
|
|
||||||
{Name: "update_recipe", Description: "Update an existing recipe.", Category: "meals"},
|
|
||||||
{Name: "create_meal_plan", Description: "Set the meal plan for a week; replaces any existing plan for that week.", Category: "meals"},
|
|
||||||
{Name: "get_meal_plan", Description: "Get the meal plan for a given week.", Category: "meals"},
|
|
||||||
{Name: "generate_shopping_list", Description: "Auto-generate a shopping list from the meal plan for a given week.", Category: "meals"},
|
|
||||||
|
|
||||||
// crm
|
|
||||||
{Name: "add_professional_contact", Description: "Add a professional contact to the CRM.", Category: "crm"},
|
|
||||||
{Name: "search_contacts", Description: "Search professional contacts by name, company, title, notes, or tags.", Category: "crm"},
|
|
||||||
{Name: "log_interaction", Description: "Log an interaction with a professional contact.", Category: "crm"},
|
|
||||||
{Name: "get_contact_history", Description: "Get full history (interactions and opportunities) for a contact.", Category: "crm"},
|
|
||||||
{Name: "create_opportunity", Description: "Create a deal, project, or opportunity linked to a contact.", Category: "crm"},
|
|
||||||
{Name: "get_follow_ups_due", Description: "List contacts with a follow-up date due within the next N days.", Category: "crm"},
|
|
||||||
{Name: "link_thought_to_contact", Description: "Append a stored thought to a contact's notes.", Category: "crm"},
|
|
||||||
|
|
||||||
// skills
|
// skills
|
||||||
{Name: "add_skill", Description: "Store a reusable agent skill (behavioural instruction or capability prompt).", Category: "skills"},
|
{Name: "add_skill", Description: "Store a reusable agent skill (behavioural instruction or capability prompt).", Category: "skills"},
|
||||||
{Name: "remove_skill", Description: "Delete an agent skill by id.", Category: "skills"},
|
{Name: "remove_skill", Description: "Delete an agent skill by id.", Category: "skills"},
|
||||||
|
|||||||
@@ -28,55 +28,38 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
|||||||
sort.Strings(got)
|
sort.Strings(got)
|
||||||
|
|
||||||
want := []string{
|
want := []string{
|
||||||
"add_activity",
|
|
||||||
"add_family_member",
|
|
||||||
"add_guardrail",
|
"add_guardrail",
|
||||||
"add_household_item",
|
"add_learning",
|
||||||
"add_important_date",
|
|
||||||
"add_maintenance_task",
|
"add_maintenance_task",
|
||||||
"add_professional_contact",
|
|
||||||
"add_project_guardrail",
|
"add_project_guardrail",
|
||||||
"add_project_skill",
|
"add_project_skill",
|
||||||
"add_recipe",
|
|
||||||
"add_skill",
|
"add_skill",
|
||||||
"add_vendor",
|
|
||||||
"annotate_tool",
|
"annotate_tool",
|
||||||
"archive_thought",
|
"archive_thought",
|
||||||
"backfill_embeddings",
|
"backfill_embeddings",
|
||||||
"capture_thought",
|
"capture_thought",
|
||||||
"create_meal_plan",
|
|
||||||
"create_opportunity",
|
|
||||||
"create_project",
|
"create_project",
|
||||||
"delete_chat_history",
|
"delete_chat_history",
|
||||||
"delete_thought",
|
"delete_thought",
|
||||||
"describe_tools",
|
"describe_tools",
|
||||||
"generate_shopping_list",
|
|
||||||
"get_active_project",
|
"get_active_project",
|
||||||
"get_chat_history",
|
"get_chat_history",
|
||||||
"get_contact_history",
|
"get_learning",
|
||||||
"get_follow_ups_due",
|
|
||||||
"get_household_item",
|
|
||||||
"get_meal_plan",
|
|
||||||
"get_project_context",
|
"get_project_context",
|
||||||
"get_thought",
|
"get_thought",
|
||||||
"get_upcoming_dates",
|
|
||||||
"get_upcoming_maintenance",
|
"get_upcoming_maintenance",
|
||||||
"get_version_info",
|
"get_version_info",
|
||||||
"get_week_schedule",
|
|
||||||
"link_thought_to_contact",
|
|
||||||
"link_thoughts",
|
"link_thoughts",
|
||||||
"list_chat_histories",
|
"list_chat_histories",
|
||||||
"list_family_members",
|
|
||||||
"list_files",
|
"list_files",
|
||||||
"list_guardrails",
|
"list_guardrails",
|
||||||
|
"list_learnings",
|
||||||
"list_project_guardrails",
|
"list_project_guardrails",
|
||||||
"list_project_skills",
|
"list_project_skills",
|
||||||
"list_projects",
|
"list_projects",
|
||||||
"list_skills",
|
"list_skills",
|
||||||
"list_thoughts",
|
"list_thoughts",
|
||||||
"list_vendors",
|
|
||||||
"load_file",
|
"load_file",
|
||||||
"log_interaction",
|
|
||||||
"log_maintenance",
|
"log_maintenance",
|
||||||
"recall_context",
|
"recall_context",
|
||||||
"related_thoughts",
|
"related_thoughts",
|
||||||
@@ -88,16 +71,11 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
|||||||
"retry_failed_metadata",
|
"retry_failed_metadata",
|
||||||
"save_chat_history",
|
"save_chat_history",
|
||||||
"save_file",
|
"save_file",
|
||||||
"search_activities",
|
|
||||||
"search_contacts",
|
|
||||||
"search_household_items",
|
|
||||||
"search_maintenance_history",
|
"search_maintenance_history",
|
||||||
"search_recipes",
|
|
||||||
"search_thoughts",
|
"search_thoughts",
|
||||||
"set_active_project",
|
"set_active_project",
|
||||||
"summarize_thoughts",
|
"summarize_thoughts",
|
||||||
"thought_stats",
|
"thought_stats",
|
||||||
"update_recipe",
|
|
||||||
"update_thought",
|
"update_thought",
|
||||||
"upload_file",
|
"upload_file",
|
||||||
}
|
}
|
||||||
|
|||||||
136
internal/mcpserver/sse_test.go
Normal file
136
internal/mcpserver/sse_test.go
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
package mcpserver
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewHandlers_SSEDisabledByDefault(t *testing.T) {
|
||||||
|
h, err := NewHandlers(config.MCPConfig{
|
||||||
|
ServerName: "test",
|
||||||
|
Version: "0.0.1",
|
||||||
|
SessionTimeout: time.Minute,
|
||||||
|
}, nil, streamableTestToolSet(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewHandlers() error = %v", err)
|
||||||
|
}
|
||||||
|
if h.StreamableHTTP == nil {
|
||||||
|
t.Fatal("StreamableHTTP handler is nil")
|
||||||
|
}
|
||||||
|
if h.SSE != nil {
|
||||||
|
t.Fatal("SSE handler should be nil when SSEPath is empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewHandlers_SSEEnabledWhenPathSet(t *testing.T) {
|
||||||
|
h, err := NewHandlers(config.MCPConfig{
|
||||||
|
ServerName: "test",
|
||||||
|
Version: "0.0.1",
|
||||||
|
SessionTimeout: time.Minute,
|
||||||
|
SSEPath: "/sse",
|
||||||
|
}, nil, streamableTestToolSet(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewHandlers() error = %v", err)
|
||||||
|
}
|
||||||
|
if h.StreamableHTTP == nil {
|
||||||
|
t.Fatal("StreamableHTTP handler is nil")
|
||||||
|
}
|
||||||
|
if h.SSE == nil {
|
||||||
|
t.Fatal("SSE handler is nil when SSEPath is set")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNew_BackwardCompatibility(t *testing.T) {
|
||||||
|
handler, err := New(config.MCPConfig{
|
||||||
|
ServerName: "test",
|
||||||
|
Version: "0.0.1",
|
||||||
|
SessionTimeout: time.Minute,
|
||||||
|
}, nil, streamableTestToolSet(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("New() error = %v", err)
|
||||||
|
}
|
||||||
|
if handler == nil {
|
||||||
|
t.Fatal("New() returned nil handler")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSSEListTools(t *testing.T) {
|
||||||
|
h, err := NewHandlers(config.MCPConfig{
|
||||||
|
ServerName: "test",
|
||||||
|
Version: "0.0.1",
|
||||||
|
SessionTimeout: time.Minute,
|
||||||
|
SSEPath: "/sse",
|
||||||
|
}, nil, streamableTestToolSet(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewHandlers() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
srv := httptest.NewServer(h.SSE)
|
||||||
|
t.Cleanup(srv.Close)
|
||||||
|
|
||||||
|
client := mcp.NewClient(&mcp.Implementation{Name: "client", Version: "0.0.1"}, nil)
|
||||||
|
cs, err := client.Connect(context.Background(), &mcp.SSEClientTransport{Endpoint: srv.URL}, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("connect SSE client: %v", err)
|
||||||
|
}
|
||||||
|
t.Cleanup(func() { _ = cs.Close() })
|
||||||
|
|
||||||
|
result, err := cs.ListTools(context.Background(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ListTools() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(result.Tools) == 0 {
|
||||||
|
t.Fatal("ListTools() returned no tools")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSSEAndStreamableShareTools(t *testing.T) {
|
||||||
|
h, err := NewHandlers(config.MCPConfig{
|
||||||
|
ServerName: "test",
|
||||||
|
Version: "0.0.1",
|
||||||
|
SessionTimeout: time.Minute,
|
||||||
|
SSEPath: "/sse",
|
||||||
|
}, nil, streamableTestToolSet(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewHandlers() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
sseSrv := httptest.NewServer(h.SSE)
|
||||||
|
t.Cleanup(sseSrv.Close)
|
||||||
|
|
||||||
|
streamSrv := httptest.NewServer(h.StreamableHTTP)
|
||||||
|
t.Cleanup(streamSrv.Close)
|
||||||
|
|
||||||
|
sseClient := mcp.NewClient(&mcp.Implementation{Name: "sse-client", Version: "0.0.1"}, nil)
|
||||||
|
sseSession, err := sseClient.Connect(context.Background(), &mcp.SSEClientTransport{Endpoint: sseSrv.URL}, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("connect SSE client: %v", err)
|
||||||
|
}
|
||||||
|
t.Cleanup(func() { _ = sseSession.Close() })
|
||||||
|
|
||||||
|
streamClient := mcp.NewClient(&mcp.Implementation{Name: "stream-client", Version: "0.0.1"}, nil)
|
||||||
|
streamSession, err := streamClient.Connect(context.Background(), &mcp.StreamableClientTransport{Endpoint: streamSrv.URL}, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("connect StreamableHTTP client: %v", err)
|
||||||
|
}
|
||||||
|
t.Cleanup(func() { _ = streamSession.Close() })
|
||||||
|
|
||||||
|
sseTools, err := sseSession.ListTools(context.Background(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("SSE ListTools() error = %v", err)
|
||||||
|
}
|
||||||
|
streamTools, err := streamSession.ListTools(context.Background(), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("StreamableHTTP ListTools() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(sseTools.Tools) != len(streamTools.Tools) {
|
||||||
|
t.Fatalf("SSE tool count = %d, StreamableHTTP tool count = %d, want equal", len(sseTools.Tools), len(streamTools.Tools))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -126,12 +126,8 @@ func streamableTestToolSet() ToolSet {
|
|||||||
Files: new(tools.FilesTool),
|
Files: new(tools.FilesTool),
|
||||||
Backfill: new(tools.BackfillTool),
|
Backfill: new(tools.BackfillTool),
|
||||||
Reparse: new(tools.ReparseMetadataTool),
|
Reparse: new(tools.ReparseMetadataTool),
|
||||||
RetryMetadata: new(tools.RetryMetadataTool),
|
RetryMetadata: new(tools.RetryEnrichmentTool),
|
||||||
Household: new(tools.HouseholdTool),
|
|
||||||
Maintenance: new(tools.MaintenanceTool),
|
Maintenance: new(tools.MaintenanceTool),
|
||||||
Calendar: new(tools.CalendarTool),
|
|
||||||
Meals: new(tools.MealsTool),
|
|
||||||
CRM: new(tools.CRMTool),
|
|
||||||
Skills: new(tools.SkillsTool),
|
Skills: new(tools.SkillsTool),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,25 @@
|
|||||||
package observability
|
package observability
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/requestip"
|
||||||
)
|
)
|
||||||
|
|
||||||
type contextKey string
|
type contextKey string
|
||||||
|
|
||||||
const requestIDContextKey contextKey = "request_id"
|
const requestIDContextKey contextKey = "request_id"
|
||||||
|
const mcpToolContextKey contextKey = "mcp_tool"
|
||||||
|
|
||||||
func Chain(h http.Handler, middlewares ...func(http.Handler) http.Handler) http.Handler {
|
func Chain(h http.Handler, middlewares ...func(http.Handler) http.Handler) http.Handler {
|
||||||
for i := len(middlewares) - 1; i >= 0; i-- {
|
for i := len(middlewares) - 1; i >= 0; i-- {
|
||||||
@@ -57,18 +63,27 @@ func Recover(log *slog.Logger) func(http.Handler) http.Handler {
|
|||||||
func AccessLog(log *slog.Logger) func(http.Handler) http.Handler {
|
func AccessLog(log *slog.Logger) func(http.Handler) http.Handler {
|
||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if tool := mcpToolFromRequest(r); tool != "" {
|
||||||
|
r = r.WithContext(context.WithValue(r.Context(), mcpToolContextKey, tool))
|
||||||
|
}
|
||||||
|
|
||||||
recorder := &statusRecorder{ResponseWriter: w, status: http.StatusOK}
|
recorder := &statusRecorder{ResponseWriter: w, status: http.StatusOK}
|
||||||
started := time.Now()
|
started := time.Now()
|
||||||
next.ServeHTTP(recorder, r)
|
next.ServeHTTP(recorder, r)
|
||||||
|
|
||||||
log.Info("http request",
|
attrs := []any{
|
||||||
slog.String("request_id", RequestIDFromContext(r.Context())),
|
slog.String("request_id", RequestIDFromContext(r.Context())),
|
||||||
slog.String("method", r.Method),
|
slog.String("method", r.Method),
|
||||||
slog.String("path", r.URL.Path),
|
slog.String("path", r.URL.Path),
|
||||||
slog.Int("status", recorder.status),
|
slog.Int("status", recorder.status),
|
||||||
slog.Duration("duration", time.Since(started)),
|
slog.Duration("duration", time.Since(started)),
|
||||||
slog.String("remote_addr", stripPort(r.RemoteAddr)),
|
slog.String("remote_addr", requestip.FromRequest(r)),
|
||||||
)
|
slog.String("mcp_session_id", mcpSessionIDFromRequest(r)),
|
||||||
|
}
|
||||||
|
if tool, _ := r.Context().Value(mcpToolContextKey).(string); strings.TrimSpace(tool) != "" {
|
||||||
|
attrs = append(attrs, slog.String("tool", tool), slog.String("tool_call", tool))
|
||||||
|
}
|
||||||
|
log.Info("http request", attrs...)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -101,10 +116,67 @@ func (s *statusRecorder) WriteHeader(statusCode int) {
|
|||||||
s.ResponseWriter.WriteHeader(statusCode)
|
s.ResponseWriter.WriteHeader(statusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
func stripPort(remote string) string {
|
func mcpToolFromRequest(r *http.Request) string {
|
||||||
host, _, err := net.SplitHostPort(remote)
|
if r == nil || r.Method != http.MethodPost || !strings.HasPrefix(r.URL.Path, "/mcp") || r.Body == nil {
|
||||||
if err != nil {
|
return ""
|
||||||
return remote
|
|
||||||
}
|
}
|
||||||
return host
|
|
||||||
|
raw, err := io.ReadAll(r.Body)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
r.Body = io.NopCloser(bytes.NewReader(raw))
|
||||||
|
if len(raw) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support both single and batch JSON-RPC payloads.
|
||||||
|
if strings.HasPrefix(strings.TrimSpace(string(raw)), "[") {
|
||||||
|
var batch []rpcEnvelope
|
||||||
|
if err := json.Unmarshal(raw, &batch); err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
for _, msg := range batch {
|
||||||
|
if tool := msg.toolName(); tool != "" {
|
||||||
|
return tool
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var msg rpcEnvelope
|
||||||
|
if err := json.Unmarshal(raw, &msg); err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return msg.toolName()
|
||||||
|
}
|
||||||
|
|
||||||
|
func mcpSessionIDFromRequest(r *http.Request) string {
|
||||||
|
if r == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if v := strings.TrimSpace(r.Header.Get("MCP-Session-Id")); v != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
// Some clients/proxies may propagate the session in query params.
|
||||||
|
for _, key := range []string{"session_id", "sessionId", "mcp_session_id"} {
|
||||||
|
if v := strings.TrimSpace(r.URL.Query().Get(key)); v != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
type rpcEnvelope struct {
|
||||||
|
Method string `json:"method"`
|
||||||
|
Params struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
} `json:"params"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m rpcEnvelope) toolName() string {
|
||||||
|
if m.Method != "tools/call" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.TrimSpace(m.Params.Name)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
package observability
|
package observability
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@@ -57,3 +60,99 @@ func TestRecoverHandlesPanic(t *testing.T) {
|
|||||||
t.Fatalf("status = %d, want %d", rec.Code, http.StatusInternalServerError)
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAccessLogUsesForwardedClientIP(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/mcp", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.10:1234"
|
||||||
|
req.Header.Set("X-Real-IP", "203.0.113.7")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "remote_addr=203.0.113.7") {
|
||||||
|
t.Fatalf("log output = %q, want remote_addr=203.0.113.7", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogIncludesMCPToolName(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
payload := map[string]any{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "1",
|
||||||
|
"method": "tools/call",
|
||||||
|
"params": map[string]any{
|
||||||
|
"name": "list_projects",
|
||||||
|
"arguments": map[string]any{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("json.Marshal() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body))
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "tool=list_projects") {
|
||||||
|
t.Fatalf("log output = %q, want tool=list_projects", buf.String())
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "tool_call=list_projects") {
|
||||||
|
t.Fatalf("log output = %q, want tool_call=list_projects", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogIncludesMCPSessionIDHeader(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/sse", nil)
|
||||||
|
req.Header.Set("MCP-Session-Id", "sess-123")
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "mcp_session_id=sess-123") {
|
||||||
|
t.Fatalf("log output = %q, want mcp_session_id=sess-123", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogIncludesMCPSessionIDQueryParam(t *testing.T) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
logger := slog.New(slog.NewTextHandler(&buf, nil))
|
||||||
|
handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}))
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/sse?session_id=sess-q-1", nil)
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
handler.ServeHTTP(rec, req)
|
||||||
|
|
||||||
|
if rec.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
if !strings.Contains(buf.String(), "mcp_session_id=sess-q-1") {
|
||||||
|
t.Fatalf("log output = %q, want mcp_session_id=sess-q-1", buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
72
internal/requestip/requestip.go
Normal file
72
internal/requestip/requestip.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
package requestip
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FromRequest returns the best-effort client IP/host for a request, preferring
|
||||||
|
// proxy headers before falling back to RemoteAddr.
|
||||||
|
//
|
||||||
|
// Header precedence:
|
||||||
|
// 1) X-Real-IP
|
||||||
|
// 2) X-Forwarded-For (first value)
|
||||||
|
// 3) Forwarded (for=...)
|
||||||
|
// 4) RemoteAddr (host part)
|
||||||
|
func FromRequest(r *http.Request) string {
|
||||||
|
if r == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if v := firstAddressToken(r.Header.Get("X-Real-IP")); v != "" {
|
||||||
|
return stripPort(v)
|
||||||
|
}
|
||||||
|
if v := firstAddressToken(r.Header.Get("X-Forwarded-For")); v != "" {
|
||||||
|
return stripPort(v)
|
||||||
|
}
|
||||||
|
if v := forwardedForValue(r.Header.Get("Forwarded")); v != "" {
|
||||||
|
return stripPort(v)
|
||||||
|
}
|
||||||
|
return stripPort(strings.TrimSpace(r.RemoteAddr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func firstAddressToken(v string) string {
|
||||||
|
if v == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
part := strings.TrimSpace(strings.Split(v, ",")[0])
|
||||||
|
part = strings.Trim(part, `"`)
|
||||||
|
return strings.TrimSpace(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
func forwardedForValue(v string) string {
|
||||||
|
for _, part := range strings.Split(v, ",") {
|
||||||
|
for _, kv := range strings.Split(part, ";") {
|
||||||
|
k, raw, ok := strings.Cut(strings.TrimSpace(kv), "=")
|
||||||
|
if !ok || !strings.EqualFold(strings.TrimSpace(k), "for") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
candidate := strings.Trim(strings.TrimSpace(raw), `"`)
|
||||||
|
if candidate == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return candidate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func stripPort(addr string) string {
|
||||||
|
addr = strings.TrimSpace(addr)
|
||||||
|
if addr == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
// RFC 7239 quoted values may wrap IPv6 with brackets.
|
||||||
|
addr = strings.Trim(addr, "[]")
|
||||||
|
host, _, err := net.SplitHostPort(addr)
|
||||||
|
if err == nil {
|
||||||
|
return host
|
||||||
|
}
|
||||||
|
return addr
|
||||||
|
}
|
||||||
37
internal/requestip/requestip_test.go
Normal file
37
internal/requestip/requestip_test.go
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
package requestip
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFromRequestPrefersXRealIP(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.10:5555"
|
||||||
|
req.Header.Set("X-Forwarded-For", "198.51.100.1")
|
||||||
|
req.Header.Set("X-Real-IP", "203.0.113.10")
|
||||||
|
|
||||||
|
if got := FromRequest(req); got != "203.0.113.10" {
|
||||||
|
t.Fatalf("FromRequest() = %q, want %q", got, "203.0.113.10")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFromRequestUsesXForwardedForFirstValue(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
req.RemoteAddr = "10.0.0.10:5555"
|
||||||
|
req.Header.Set("X-Forwarded-For", "198.51.100.7, 10.1.1.2")
|
||||||
|
|
||||||
|
if got := FromRequest(req); got != "198.51.100.7" {
|
||||||
|
t.Fatalf("FromRequest() = %q, want %q", got, "198.51.100.7")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFromRequestFallsBackToRemoteAddr(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
req.RemoteAddr = "192.0.2.5:1234"
|
||||||
|
|
||||||
|
if got := FromRequest(req); got != "192.0.2.5" {
|
||||||
|
t.Fatalf("FromRequest() = %q, want %q", got, "192.0.2.5")
|
||||||
|
}
|
||||||
|
}
|
||||||
215
internal/store/learnings.go
Normal file
215
internal/store/learnings.go
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
package store
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
|
|
||||||
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (db *DB) CreateLearning(ctx context.Context, learning thoughttypes.Learning) (thoughttypes.Learning, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
insert into learnings (
|
||||||
|
summary, details, category, area, status, priority, confidence,
|
||||||
|
action_required, source_type, source_ref, project_id, related_thought_id,
|
||||||
|
related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id,
|
||||||
|
supersedes_learning_id, tags
|
||||||
|
) values (
|
||||||
|
$1, $2, $3, $4, $5, $6, $7,
|
||||||
|
$8, $9, $10, $11, $12,
|
||||||
|
$13, $14, $15, $16,
|
||||||
|
$17, $18
|
||||||
|
)
|
||||||
|
returning id, created_at, updated_at
|
||||||
|
`,
|
||||||
|
strings.TrimSpace(learning.Summary),
|
||||||
|
strings.TrimSpace(learning.Details),
|
||||||
|
strings.TrimSpace(learning.Category),
|
||||||
|
strings.TrimSpace(learning.Area),
|
||||||
|
string(learning.Status),
|
||||||
|
string(learning.Priority),
|
||||||
|
string(learning.Confidence),
|
||||||
|
learning.ActionRequired,
|
||||||
|
nullableText(learning.SourceType),
|
||||||
|
nullableText(learning.SourceRef),
|
||||||
|
learning.ProjectID,
|
||||||
|
learning.RelatedThoughtID,
|
||||||
|
learning.RelatedSkillID,
|
||||||
|
nullableTextPtr(learning.ReviewedBy),
|
||||||
|
learning.ReviewedAt,
|
||||||
|
learning.DuplicateOfLearningID,
|
||||||
|
learning.SupersedesLearningID,
|
||||||
|
learning.Tags,
|
||||||
|
)
|
||||||
|
|
||||||
|
created := learning
|
||||||
|
if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil {
|
||||||
|
return thoughttypes.Learning{}, fmt.Errorf("create learning: %w", err)
|
||||||
|
}
|
||||||
|
return created, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) GetLearning(ctx context.Context, id uuid.UUID) (thoughttypes.Learning, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
select id, summary, details, category, area, status, priority, confidence,
|
||||||
|
action_required, source_type, source_ref, project_id, related_thought_id,
|
||||||
|
related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id,
|
||||||
|
supersedes_learning_id, tags, created_at, updated_at
|
||||||
|
from learnings
|
||||||
|
where id = $1
|
||||||
|
`, id)
|
||||||
|
|
||||||
|
learning, err := scanLearning(row)
|
||||||
|
if err != nil {
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return thoughttypes.Learning{}, fmt.Errorf("learning not found: %s", id)
|
||||||
|
}
|
||||||
|
return thoughttypes.Learning{}, fmt.Errorf("get learning: %w", err)
|
||||||
|
}
|
||||||
|
return learning, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) ListLearnings(ctx context.Context, filter thoughttypes.LearningFilter) ([]thoughttypes.Learning, error) {
|
||||||
|
args := make([]any, 0, 8)
|
||||||
|
conditions := make([]string, 0, 8)
|
||||||
|
|
||||||
|
if filter.ProjectID != nil {
|
||||||
|
args = append(args, *filter.ProjectID)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("project_id = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Category); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("category = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Area); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("area = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Status); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("status = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Priority); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("priority = $%d", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Tag); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("$%d = any(tags)", len(args)))
|
||||||
|
}
|
||||||
|
if value := strings.TrimSpace(filter.Query); value != "" {
|
||||||
|
args = append(args, value)
|
||||||
|
conditions = append(conditions, fmt.Sprintf("to_tsvector('simple', summary || ' ' || coalesce(details, '')) @@ websearch_to_tsquery('simple', $%d)", len(args)))
|
||||||
|
}
|
||||||
|
|
||||||
|
query := `
|
||||||
|
select id, summary, details, category, area, status, priority, confidence,
|
||||||
|
action_required, source_type, source_ref, project_id, related_thought_id,
|
||||||
|
related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id,
|
||||||
|
supersedes_learning_id, tags, created_at, updated_at
|
||||||
|
from learnings
|
||||||
|
`
|
||||||
|
if len(conditions) > 0 {
|
||||||
|
query += " where " + strings.Join(conditions, " and ")
|
||||||
|
}
|
||||||
|
query += " order by updated_at desc"
|
||||||
|
if filter.Limit > 0 {
|
||||||
|
args = append(args, filter.Limit)
|
||||||
|
query += fmt.Sprintf(" limit $%d", len(args))
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := db.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("list learnings: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
items := make([]thoughttypes.Learning, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
item, err := scanLearning(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("scan learning: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("iterate learnings: %w", err)
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type learningScanner interface {
|
||||||
|
Scan(dest ...any) error
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanLearning(row learningScanner) (thoughttypes.Learning, error) {
|
||||||
|
var learning thoughttypes.Learning
|
||||||
|
var sourceType pgtype.Text
|
||||||
|
var sourceRef pgtype.Text
|
||||||
|
var reviewedBy pgtype.Text
|
||||||
|
var tags []string
|
||||||
|
|
||||||
|
err := row.Scan(
|
||||||
|
&learning.ID,
|
||||||
|
&learning.Summary,
|
||||||
|
&learning.Details,
|
||||||
|
&learning.Category,
|
||||||
|
&learning.Area,
|
||||||
|
&learning.Status,
|
||||||
|
&learning.Priority,
|
||||||
|
&learning.Confidence,
|
||||||
|
&learning.ActionRequired,
|
||||||
|
&sourceType,
|
||||||
|
&sourceRef,
|
||||||
|
&learning.ProjectID,
|
||||||
|
&learning.RelatedThoughtID,
|
||||||
|
&learning.RelatedSkillID,
|
||||||
|
&reviewedBy,
|
||||||
|
&learning.ReviewedAt,
|
||||||
|
&learning.DuplicateOfLearningID,
|
||||||
|
&learning.SupersedesLearningID,
|
||||||
|
&tags,
|
||||||
|
&learning.CreatedAt,
|
||||||
|
&learning.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return thoughttypes.Learning{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
learning.SourceType = sourceType.String
|
||||||
|
learning.SourceRef = sourceRef.String
|
||||||
|
if reviewedBy.Valid {
|
||||||
|
value := reviewedBy.String
|
||||||
|
learning.ReviewedBy = &value
|
||||||
|
}
|
||||||
|
if tags == nil {
|
||||||
|
learning.Tags = []string{}
|
||||||
|
} else {
|
||||||
|
learning.Tags = tags
|
||||||
|
}
|
||||||
|
return learning, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableText(value string) *string {
|
||||||
|
trimmed := strings.TrimSpace(value)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &trimmed
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableTextPtr(value *string) *string {
|
||||||
|
if value == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
trimmed := strings.TrimSpace(*value)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &trimmed
|
||||||
|
}
|
||||||
@@ -26,21 +26,42 @@ func (db *DB) CreateProject(ctx context.Context, name, description string) (thou
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) GetProject(ctx context.Context, nameOrID string) (thoughttypes.Project, error) {
|
func (db *DB) GetProject(ctx context.Context, nameOrID string) (thoughttypes.Project, error) {
|
||||||
var row pgx.Row
|
lookup := strings.TrimSpace(nameOrID)
|
||||||
if parsedID, err := uuid.Parse(strings.TrimSpace(nameOrID)); err == nil {
|
|
||||||
row = db.pool.QueryRow(ctx, `
|
// Prefer guid lookup when input parses as UUID, but fall back to name lookup
|
||||||
select guid, name, description, created_at, last_active_at
|
// so UUID-shaped project names can still be resolved by name.
|
||||||
from projects
|
if parsedID, err := uuid.Parse(lookup); err == nil {
|
||||||
where guid = $1
|
project, queryErr := db.getProjectByGUID(ctx, parsedID)
|
||||||
`, parsedID)
|
if queryErr == nil {
|
||||||
} else {
|
return project, nil
|
||||||
row = db.pool.QueryRow(ctx, `
|
}
|
||||||
select guid, name, description, created_at, last_active_at
|
if queryErr != pgx.ErrNoRows {
|
||||||
from projects
|
return thoughttypes.Project{}, queryErr
|
||||||
where name = $1
|
}
|
||||||
`, strings.TrimSpace(nameOrID))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return db.getProjectByName(ctx, lookup)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) getProjectByGUID(ctx context.Context, id uuid.UUID) (thoughttypes.Project, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
select guid, name, description, created_at, last_active_at
|
||||||
|
from projects
|
||||||
|
where guid = $1
|
||||||
|
`, id)
|
||||||
|
return scanProject(row)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *DB) getProjectByName(ctx context.Context, name string) (thoughttypes.Project, error) {
|
||||||
|
row := db.pool.QueryRow(ctx, `
|
||||||
|
select guid, name, description, created_at, last_active_at
|
||||||
|
from projects
|
||||||
|
where name = $1
|
||||||
|
`, name)
|
||||||
|
return scanProject(row)
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanProject(row pgx.Row) (thoughttypes.Project, error) {
|
||||||
var project thoughttypes.Project
|
var project thoughttypes.Project
|
||||||
if err := row.Scan(&project.ID, &project.Name, &project.Description, &project.CreatedAt, &project.LastActiveAt); err != nil {
|
if err := row.Scan(&project.ID, &project.Name, &project.Description, &project.CreatedAt, &project.LastActiveAt); err != nil {
|
||||||
if err == pgx.ErrNoRows {
|
if err == pgx.ErrNoRows {
|
||||||
|
|||||||
@@ -58,6 +58,12 @@ func (db *DB) InsertThought(ctx context.Context, thought thoughttypes.Thought, e
|
|||||||
return thoughttypes.Thought{}, fmt.Errorf("commit thought insert: %w", err)
|
return thoughttypes.Thought{}, fmt.Errorf("commit thought insert: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(thought.Embedding) > 0 {
|
||||||
|
created.EmbeddingStatus = "done"
|
||||||
|
} else {
|
||||||
|
created.EmbeddingStatus = "pending"
|
||||||
|
}
|
||||||
|
|
||||||
return created, nil
|
return created, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -576,7 +582,7 @@ func (db *DB) SearchThoughtsText(ctx context.Context, query string, limit int, p
|
|||||||
args := []any{query}
|
args := []any{query}
|
||||||
conditions := []string{
|
conditions := []string{
|
||||||
"t.archived_at is null",
|
"t.archived_at is null",
|
||||||
"to_tsvector('simple', t.content) @@ websearch_to_tsquery('simple', $1)",
|
"(to_tsvector('simple', t.content) || to_tsvector('simple', coalesce(p.name, ''))) @@ websearch_to_tsquery('simple', $1)",
|
||||||
}
|
}
|
||||||
if projectID != nil {
|
if projectID != nil {
|
||||||
args = append(args, *projectID)
|
args = append(args, *projectID)
|
||||||
@@ -590,9 +596,10 @@ func (db *DB) SearchThoughtsText(ctx context.Context, query string, limit int, p
|
|||||||
|
|
||||||
q := `
|
q := `
|
||||||
select t.guid, t.content, t.metadata,
|
select t.guid, t.content, t.metadata,
|
||||||
ts_rank_cd(to_tsvector('simple', t.content), websearch_to_tsquery('simple', $1)) as similarity,
|
ts_rank_cd(to_tsvector('simple', t.content) || to_tsvector('simple', coalesce(p.name, '')), websearch_to_tsquery('simple', $1)) as similarity,
|
||||||
t.created_at
|
t.created_at
|
||||||
from thoughts t
|
from thoughts t
|
||||||
|
left join projects p on t.project_id = p.guid
|
||||||
where ` + strings.Join(conditions, " and ") + `
|
where ` + strings.Join(conditions, " and ") + `
|
||||||
order by similarity desc
|
order by similarity desc
|
||||||
limit $` + fmt.Sprintf("%d", len(args))
|
limit $` + fmt.Sprintf("%d", len(args))
|
||||||
|
|||||||
@@ -18,10 +18,10 @@ import (
|
|||||||
const backfillConcurrency = 4
|
const backfillConcurrency = 4
|
||||||
|
|
||||||
type BackfillTool struct {
|
type BackfillTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
logger *slog.Logger
|
logger *slog.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
type BackfillInput struct {
|
type BackfillInput struct {
|
||||||
@@ -47,8 +47,51 @@ type BackfillOutput struct {
|
|||||||
Failures []BackfillFailure `json:"failures,omitempty"`
|
Failures []BackfillFailure `json:"failures,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBackfillTool(db *store.DB, provider ai.Provider, sessions *session.ActiveProjects, logger *slog.Logger) *BackfillTool {
|
func NewBackfillTool(db *store.DB, embeddings *ai.EmbeddingRunner, sessions *session.ActiveProjects, logger *slog.Logger) *BackfillTool {
|
||||||
return &BackfillTool{store: db, provider: provider, sessions: sessions, logger: logger}
|
return &BackfillTool{store: db, embeddings: embeddings, sessions: sessions, logger: logger}
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueueThought queues a single thought for background embedding generation.
|
||||||
|
// It is used by capture when the embedding provider is temporarily unavailable.
|
||||||
|
func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content string) {
|
||||||
|
go func() {
|
||||||
|
started := time.Now()
|
||||||
|
t.logger.Info("background embedding started",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", t.embeddings.PrimaryModel()),
|
||||||
|
)
|
||||||
|
|
||||||
|
result, err := t.embeddings.Embed(ctx, content)
|
||||||
|
if err != nil {
|
||||||
|
t.logger.Warn("background embedding error",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", t.embeddings.PrimaryModel()),
|
||||||
|
slog.String("stage", "embed"),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := t.store.UpsertEmbedding(ctx, id, result.Model, result.Vector); err != nil {
|
||||||
|
t.logger.Warn("background embedding error",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", result.Model),
|
||||||
|
slog.String("stage", "upsert"),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
t.logger.Info("background embedding complete",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", t.embeddings.PrimaryProvider()),
|
||||||
|
slog.String("model", result.Model),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
)
|
||||||
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in BackfillInput) (*mcp.CallToolResult, BackfillOutput, error) {
|
func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in BackfillInput) (*mcp.CallToolResult, BackfillOutput, error) {
|
||||||
@@ -67,15 +110,15 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
projectID = &project.ID
|
projectID = &project.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
model := t.provider.EmbeddingModel()
|
primaryModel := t.embeddings.PrimaryModel()
|
||||||
|
|
||||||
thoughts, err := t.store.ListThoughtsMissingEmbedding(ctx, model, limit, projectID, in.IncludeArchived, in.OlderThanDays)
|
thoughts, err := t.store.ListThoughtsMissingEmbedding(ctx, primaryModel, limit, projectID, in.IncludeArchived, in.OlderThanDays)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, BackfillOutput{}, err
|
return nil, BackfillOutput{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
out := BackfillOutput{
|
out := BackfillOutput{
|
||||||
Model: model,
|
Model: primaryModel,
|
||||||
Scanned: len(thoughts),
|
Scanned: len(thoughts),
|
||||||
DryRun: in.DryRun,
|
DryRun: in.DryRun,
|
||||||
}
|
}
|
||||||
@@ -101,7 +144,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
defer sem.Release(1)
|
defer sem.Release(1)
|
||||||
|
|
||||||
vec, embedErr := t.provider.Embed(ctx, content)
|
result, embedErr := t.embeddings.Embed(ctx, content)
|
||||||
if embedErr != nil {
|
if embedErr != nil {
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: embedErr.Error()})
|
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: embedErr.Error()})
|
||||||
@@ -110,7 +153,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if upsertErr := t.store.UpsertEmbedding(ctx, id, model, vec); upsertErr != nil {
|
if upsertErr := t.store.UpsertEmbedding(ctx, id, result.Model, result.Vector); upsertErr != nil {
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: upsertErr.Error()})
|
out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: upsertErr.Error()})
|
||||||
mu.Unlock()
|
mu.Unlock()
|
||||||
@@ -130,7 +173,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
out.Skipped = out.Scanned - out.Embedded - out.Failed
|
out.Skipped = out.Scanned - out.Embedded - out.Failed
|
||||||
|
|
||||||
t.logger.Info("backfill completed",
|
t.logger.Info("backfill completed",
|
||||||
slog.String("model", model),
|
slog.String("model", primaryModel),
|
||||||
slog.Int("scanned", out.Scanned),
|
slog.Int("scanned", out.Scanned),
|
||||||
slog.Int("embedded", out.Embedded),
|
slog.Int("embedded", out.Embedded),
|
||||||
slog.Int("failed", out.Failed),
|
slog.Int("failed", out.Failed),
|
||||||
|
|||||||
@@ -2,12 +2,10 @@ package tools
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"log/slog"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
"golang.org/x/sync/errgroup"
|
|
||||||
|
|
||||||
"git.warky.dev/wdevs/amcs/internal/ai"
|
"git.warky.dev/wdevs/amcs/internal/ai"
|
||||||
"git.warky.dev/wdevs/amcs/internal/config"
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
@@ -17,14 +15,24 @@ import (
|
|||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// EmbeddingQueuer queues a thought for background embedding generation.
|
||||||
|
type EmbeddingQueuer interface {
|
||||||
|
QueueThought(ctx context.Context, id uuid.UUID, content string)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MetadataQueuer queues a thought for background metadata retry. Both
|
||||||
|
// MetadataRetryer and EnrichmentRetryer satisfy this.
|
||||||
|
type MetadataQueuer interface {
|
||||||
|
QueueThought(id uuid.UUID)
|
||||||
|
}
|
||||||
|
|
||||||
type CaptureTool struct {
|
type CaptureTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
capture config.CaptureConfig
|
capture config.CaptureConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
metadataTimeout time.Duration
|
retryer MetadataQueuer
|
||||||
retryer *MetadataRetryer
|
embedRetryer EmbeddingQueuer
|
||||||
log *slog.Logger
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type CaptureInput struct {
|
type CaptureInput struct {
|
||||||
@@ -36,8 +44,8 @@ type CaptureOutput struct {
|
|||||||
Thought thoughttypes.Thought `json:"thought"`
|
Thought thoughttypes.Thought `json:"thought"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCaptureTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer *MetadataRetryer, log *slog.Logger) *CaptureTool {
|
func NewCaptureTool(db *store.DB, embeddings *ai.EmbeddingRunner, capture config.CaptureConfig, sessions *session.ActiveProjects, retryer MetadataQueuer, embedRetryer EmbeddingQueuer) *CaptureTool {
|
||||||
return &CaptureTool{store: db, provider: provider, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, log: log}
|
return &CaptureTool{store: db, embeddings: embeddings, capture: capture, sessions: sessions, retryer: retryer, embedRetryer: embedRetryer}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) {
|
func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) {
|
||||||
@@ -51,61 +59,30 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C
|
|||||||
return nil, CaptureOutput{}, err
|
return nil, CaptureOutput{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var embedding []float32
|
|
||||||
rawMetadata := metadata.Fallback(t.capture)
|
rawMetadata := metadata.Fallback(t.capture)
|
||||||
metadataNeedsRetry := false
|
rawMetadata.MetadataStatus = metadata.MetadataStatusPending
|
||||||
|
|
||||||
group, groupCtx := errgroup.WithContext(ctx)
|
|
||||||
group.Go(func() error {
|
|
||||||
vector, err := t.provider.Embed(groupCtx, content)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
embedding = vector
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
group.Go(func() error {
|
|
||||||
metaCtx := groupCtx
|
|
||||||
attemptedAt := time.Now().UTC()
|
|
||||||
if t.metadataTimeout > 0 {
|
|
||||||
var cancel context.CancelFunc
|
|
||||||
metaCtx, cancel = context.WithTimeout(groupCtx, t.metadataTimeout)
|
|
||||||
defer cancel()
|
|
||||||
}
|
|
||||||
extracted, err := t.provider.ExtractMetadata(metaCtx, content)
|
|
||||||
if err != nil {
|
|
||||||
t.log.Warn("metadata extraction failed, using fallback", slog.String("provider", t.provider.Name()), slog.String("error", err.Error()))
|
|
||||||
rawMetadata = metadata.MarkMetadataPending(rawMetadata, t.capture, attemptedAt, err)
|
|
||||||
metadataNeedsRetry = true
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
rawMetadata = metadata.MarkMetadataComplete(extracted, t.capture, attemptedAt)
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
if err := group.Wait(); err != nil {
|
|
||||||
return nil, CaptureOutput{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
thought := thoughttypes.Thought{
|
thought := thoughttypes.Thought{
|
||||||
Content: content,
|
Content: content,
|
||||||
Embedding: embedding,
|
Metadata: rawMetadata,
|
||||||
Metadata: metadata.Normalize(metadata.SanitizeExtracted(rawMetadata), t.capture),
|
|
||||||
}
|
}
|
||||||
if project != nil {
|
if project != nil {
|
||||||
thought.ProjectID = &project.ID
|
thought.ProjectID = &project.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
created, err := t.store.InsertThought(ctx, thought, t.provider.EmbeddingModel())
|
created, err := t.store.InsertThought(ctx, thought, t.embeddings.PrimaryModel())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, CaptureOutput{}, err
|
return nil, CaptureOutput{}, err
|
||||||
}
|
}
|
||||||
if project != nil {
|
if project != nil {
|
||||||
_ = t.store.TouchProject(ctx, project.ID)
|
_ = t.store.TouchProject(ctx, project.ID)
|
||||||
}
|
}
|
||||||
if metadataNeedsRetry && t.retryer != nil {
|
|
||||||
|
if t.retryer != nil {
|
||||||
t.retryer.QueueThought(created.ID)
|
t.retryer.QueueThought(created.ID)
|
||||||
}
|
}
|
||||||
|
if t.embedRetryer != nil {
|
||||||
|
t.embedRetryer.QueueThought(ctx, created.ID, content)
|
||||||
|
}
|
||||||
|
|
||||||
return nil, CaptureOutput{Thought: created}, nil
|
return nil, CaptureOutput{Thought: created}, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,10 +15,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type ContextTool struct {
|
type ContextTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
search config.SearchConfig
|
search config.SearchConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
}
|
}
|
||||||
|
|
||||||
type ProjectContextInput struct {
|
type ProjectContextInput struct {
|
||||||
@@ -41,8 +41,8 @@ type ProjectContextOutput struct {
|
|||||||
Items []ContextItem `json:"items"`
|
Items []ContextItem `json:"items"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewContextTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *ContextTool {
|
func NewContextTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig, sessions *session.ActiveProjects) *ContextTool {
|
||||||
return &ContextTool{store: db, provider: provider, search: search, sessions: sessions}
|
return &ContextTool{store: db, embeddings: embeddings, search: search, sessions: sessions}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ProjectContextInput) (*mcp.CallToolResult, ProjectContextOutput, error) {
|
func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ProjectContextInput) (*mcp.CallToolResult, ProjectContextOutput, error) {
|
||||||
@@ -72,7 +72,7 @@ func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in P
|
|||||||
|
|
||||||
query := strings.TrimSpace(in.Query)
|
query := strings.TrimSpace(in.Query)
|
||||||
if query != "" {
|
if query != "" {
|
||||||
semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, t.search.DefaultThreshold, &project.ID, nil)
|
semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, t.search.DefaultThreshold, &project.ID, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, ProjectContextOutput{}, err
|
return nil, ProjectContextOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
227
internal/tools/enrichment_retry.go
Normal file
227
internal/tools/enrichment_retry.go
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log/slog"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
"golang.org/x/sync/semaphore"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/ai"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/metadata"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/session"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/store"
|
||||||
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
const enrichmentRetryConcurrency = 4
|
||||||
|
const enrichmentRetryMaxAttempts = 5
|
||||||
|
|
||||||
|
var enrichmentRetryBackoff = []time.Duration{
|
||||||
|
30 * time.Second,
|
||||||
|
2 * time.Minute,
|
||||||
|
10 * time.Minute,
|
||||||
|
30 * time.Minute,
|
||||||
|
2 * time.Hour,
|
||||||
|
}
|
||||||
|
|
||||||
|
type EnrichmentRetryer struct {
|
||||||
|
backgroundCtx context.Context
|
||||||
|
store *store.DB
|
||||||
|
metadata *ai.MetadataRunner
|
||||||
|
capture config.CaptureConfig
|
||||||
|
sessions *session.ActiveProjects
|
||||||
|
metadataTimeout time.Duration
|
||||||
|
logger *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
type RetryEnrichmentTool struct {
|
||||||
|
retryer *EnrichmentRetryer
|
||||||
|
}
|
||||||
|
|
||||||
|
type RetryEnrichmentInput struct {
|
||||||
|
Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the retry"`
|
||||||
|
Limit int `json:"limit,omitempty" jsonschema:"maximum number of thoughts to process in one call; defaults to 100"`
|
||||||
|
IncludeArchived bool `json:"include_archived,omitempty" jsonschema:"whether to include archived thoughts; defaults to false"`
|
||||||
|
OlderThanDays int `json:"older_than_days,omitempty" jsonschema:"only retry thoughts whose last metadata attempt was at least N days ago; 0 means no restriction"`
|
||||||
|
DryRun bool `json:"dry_run,omitempty" jsonschema:"report counts without retrying metadata extraction"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RetryEnrichmentFailure struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Error string `json:"error"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RetryEnrichmentOutput struct {
|
||||||
|
Scanned int `json:"scanned"`
|
||||||
|
Retried int `json:"retried"`
|
||||||
|
Updated int `json:"updated"`
|
||||||
|
Skipped int `json:"skipped"`
|
||||||
|
Failed int `json:"failed"`
|
||||||
|
DryRun bool `json:"dry_run"`
|
||||||
|
Failures []RetryEnrichmentFailure `json:"failures,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewEnrichmentRetryer(backgroundCtx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *EnrichmentRetryer {
|
||||||
|
if backgroundCtx == nil {
|
||||||
|
backgroundCtx = context.Background()
|
||||||
|
}
|
||||||
|
return &EnrichmentRetryer{
|
||||||
|
backgroundCtx: backgroundCtx,
|
||||||
|
store: db,
|
||||||
|
metadata: metadataRunner,
|
||||||
|
capture: capture,
|
||||||
|
sessions: sessions,
|
||||||
|
metadataTimeout: metadataTimeout,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRetryEnrichmentTool(retryer *EnrichmentRetryer) *RetryEnrichmentTool {
|
||||||
|
return &RetryEnrichmentTool{retryer: retryer}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *RetryEnrichmentTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in RetryEnrichmentInput) (*mcp.CallToolResult, RetryEnrichmentOutput, error) {
|
||||||
|
return t.retryer.Handle(ctx, req, in)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *EnrichmentRetryer) QueueThought(id uuid.UUID) {
|
||||||
|
go func() {
|
||||||
|
started := time.Now()
|
||||||
|
r.logger.Info("background metadata started",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
)
|
||||||
|
updated, err := r.retryOne(r.backgroundCtx, id)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Warn("background metadata error",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.logger.Info("background metadata complete",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
slog.Bool("updated", updated),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
)
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *EnrichmentRetryer) Handle(ctx context.Context, req *mcp.CallToolRequest, in RetryEnrichmentInput) (*mcp.CallToolResult, RetryEnrichmentOutput, error) {
|
||||||
|
limit := in.Limit
|
||||||
|
if limit <= 0 {
|
||||||
|
limit = 100
|
||||||
|
}
|
||||||
|
|
||||||
|
project, err := resolveProject(ctx, r.store, r.sessions, req, in.Project, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, RetryEnrichmentOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var projectID *uuid.UUID
|
||||||
|
if project != nil {
|
||||||
|
projectID = &project.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
thoughts, err := r.store.ListThoughtsPendingMetadataRetry(ctx, limit, projectID, in.IncludeArchived, in.OlderThanDays)
|
||||||
|
if err != nil {
|
||||||
|
return nil, RetryEnrichmentOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
out := RetryEnrichmentOutput{Scanned: len(thoughts), DryRun: in.DryRun}
|
||||||
|
if in.DryRun || len(thoughts) == 0 {
|
||||||
|
return nil, out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sem := semaphore.NewWeighted(enrichmentRetryConcurrency)
|
||||||
|
var mu sync.Mutex
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
|
||||||
|
for _, thought := range thoughts {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err := sem.Acquire(ctx, 1); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Add(1)
|
||||||
|
go func(thought thoughttypes.Thought) {
|
||||||
|
defer wg.Done()
|
||||||
|
defer sem.Release(1)
|
||||||
|
|
||||||
|
mu.Lock()
|
||||||
|
out.Retried++
|
||||||
|
mu.Unlock()
|
||||||
|
|
||||||
|
updated, err := r.retryOne(ctx, thought.ID)
|
||||||
|
if err != nil {
|
||||||
|
mu.Lock()
|
||||||
|
out.Failures = append(out.Failures, RetryEnrichmentFailure{ID: thought.ID.String(), Error: err.Error()})
|
||||||
|
mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if updated {
|
||||||
|
mu.Lock()
|
||||||
|
out.Updated++
|
||||||
|
mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
mu.Lock()
|
||||||
|
out.Skipped++
|
||||||
|
mu.Unlock()
|
||||||
|
}(thought)
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
out.Failed = len(out.Failures)
|
||||||
|
|
||||||
|
return nil, out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *EnrichmentRetryer) retryOne(ctx context.Context, id uuid.UUID) (bool, error) {
|
||||||
|
thought, err := r.store.GetThought(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if thought.Metadata.MetadataStatus == metadata.MetadataStatusComplete {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
attemptCtx := ctx
|
||||||
|
if r.metadataTimeout > 0 {
|
||||||
|
var cancel context.CancelFunc
|
||||||
|
attemptCtx, cancel = context.WithTimeout(ctx, r.metadataTimeout)
|
||||||
|
defer cancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
attemptedAt := time.Now().UTC()
|
||||||
|
extracted, extractErr := r.metadata.ExtractMetadata(attemptCtx, thought.Content)
|
||||||
|
if extractErr != nil {
|
||||||
|
failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr)
|
||||||
|
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil {
|
||||||
|
return false, updateErr
|
||||||
|
}
|
||||||
|
return false, extractErr
|
||||||
|
}
|
||||||
|
|
||||||
|
completedMetadata := metadata.MarkMetadataComplete(metadata.SanitizeExtracted(extracted), r.capture, attemptedAt)
|
||||||
|
completedMetadata.Attachments = thought.Metadata.Attachments
|
||||||
|
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, completedMetadata); updateErr != nil {
|
||||||
|
return false, updateErr
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
174
internal/tools/learnings.go
Normal file
174
internal/tools/learnings.go
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/config"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/session"
|
||||||
|
"git.warky.dev/wdevs/amcs/internal/store"
|
||||||
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LearningsTool struct {
|
||||||
|
store *store.DB
|
||||||
|
sessions *session.ActiveProjects
|
||||||
|
cfg config.SearchConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddLearningInput struct {
|
||||||
|
Summary string `json:"summary" jsonschema:"short curated learning summary"`
|
||||||
|
Details string `json:"details,omitempty" jsonschema:"optional detailed learning body"`
|
||||||
|
Category string `json:"category,omitempty"`
|
||||||
|
Area string `json:"area,omitempty"`
|
||||||
|
Status string `json:"status,omitempty"`
|
||||||
|
Priority string `json:"priority,omitempty"`
|
||||||
|
Confidence string `json:"confidence,omitempty"`
|
||||||
|
ActionRequired *bool `json:"action_required,omitempty"`
|
||||||
|
SourceType string `json:"source_type,omitempty"`
|
||||||
|
SourceRef string `json:"source_ref,omitempty"`
|
||||||
|
Project string `json:"project,omitempty" jsonschema:"project name or id; falls back to active session project"`
|
||||||
|
RelatedThoughtID *uuid.UUID `json:"related_thought_id,omitempty"`
|
||||||
|
RelatedSkillID *uuid.UUID `json:"related_skill_id,omitempty"`
|
||||||
|
ReviewedBy *string `json:"reviewed_by,omitempty"`
|
||||||
|
DuplicateOfLearningID *uuid.UUID `json:"duplicate_of_learning_id,omitempty"`
|
||||||
|
SupersedesLearningID *uuid.UUID `json:"supersedes_learning_id,omitempty"`
|
||||||
|
Tags []string `json:"tags,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddLearningOutput struct {
|
||||||
|
Learning thoughttypes.Learning `json:"learning"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetLearningInput struct {
|
||||||
|
ID uuid.UUID `json:"id" jsonschema:"learning id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetLearningOutput struct {
|
||||||
|
Learning thoughttypes.Learning `json:"learning"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListLearningsInput struct {
|
||||||
|
Limit int `json:"limit,omitempty"`
|
||||||
|
Project string `json:"project,omitempty" jsonschema:"project name or id; falls back to active session project"`
|
||||||
|
Category string `json:"category,omitempty"`
|
||||||
|
Area string `json:"area,omitempty"`
|
||||||
|
Status string `json:"status,omitempty"`
|
||||||
|
Priority string `json:"priority,omitempty"`
|
||||||
|
Tag string `json:"tag,omitempty"`
|
||||||
|
Query string `json:"query,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListLearningsOutput struct {
|
||||||
|
Learnings []thoughttypes.Learning `json:"learnings"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewLearningsTool(db *store.DB, sessions *session.ActiveProjects, cfg config.SearchConfig) *LearningsTool {
|
||||||
|
return &LearningsTool{store: db, sessions: sessions, cfg: cfg}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *LearningsTool) Add(ctx context.Context, req *mcp.CallToolRequest, in AddLearningInput) (*mcp.CallToolResult, AddLearningOutput, error) {
|
||||||
|
summary := strings.TrimSpace(in.Summary)
|
||||||
|
if summary == "" {
|
||||||
|
return nil, AddLearningOutput{}, errRequiredField("summary")
|
||||||
|
}
|
||||||
|
|
||||||
|
project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, AddLearningOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
learning := thoughttypes.Learning{
|
||||||
|
Summary: summary,
|
||||||
|
Details: strings.TrimSpace(in.Details),
|
||||||
|
Category: defaultString(strings.TrimSpace(in.Category), "insight"),
|
||||||
|
Area: defaultString(strings.TrimSpace(in.Area), "other"),
|
||||||
|
Status: thoughttypes.LearningStatus(defaultString(strings.TrimSpace(in.Status), string(thoughttypes.LearningStatusPending))),
|
||||||
|
Priority: thoughttypes.LearningPriority(defaultString(strings.TrimSpace(in.Priority), string(thoughttypes.LearningPriorityMedium))),
|
||||||
|
Confidence: thoughttypes.LearningEvidenceLevel(defaultString(strings.TrimSpace(in.Confidence), string(thoughttypes.LearningEvidenceHypothesis))),
|
||||||
|
SourceType: strings.TrimSpace(in.SourceType),
|
||||||
|
SourceRef: strings.TrimSpace(in.SourceRef),
|
||||||
|
RelatedThoughtID: in.RelatedThoughtID,
|
||||||
|
RelatedSkillID: in.RelatedSkillID,
|
||||||
|
ReviewedBy: in.ReviewedBy,
|
||||||
|
DuplicateOfLearningID: in.DuplicateOfLearningID,
|
||||||
|
SupersedesLearningID: in.SupersedesLearningID,
|
||||||
|
Tags: normalizeStringSlice(in.Tags),
|
||||||
|
}
|
||||||
|
if in.ActionRequired != nil {
|
||||||
|
learning.ActionRequired = *in.ActionRequired
|
||||||
|
}
|
||||||
|
if project != nil {
|
||||||
|
learning.ProjectID = &project.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
created, err := t.store.CreateLearning(ctx, learning)
|
||||||
|
if err != nil {
|
||||||
|
return nil, AddLearningOutput{}, err
|
||||||
|
}
|
||||||
|
return nil, AddLearningOutput{Learning: created}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *LearningsTool) Get(ctx context.Context, _ *mcp.CallToolRequest, in GetLearningInput) (*mcp.CallToolResult, GetLearningOutput, error) {
|
||||||
|
learning, err := t.store.GetLearning(ctx, in.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, GetLearningOutput{}, err
|
||||||
|
}
|
||||||
|
return nil, GetLearningOutput{Learning: learning}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *LearningsTool) List(ctx context.Context, req *mcp.CallToolRequest, in ListLearningsInput) (*mcp.CallToolResult, ListLearningsOutput, error) {
|
||||||
|
project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ListLearningsOutput{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
filter := thoughttypes.LearningFilter{
|
||||||
|
Limit: normalizeLimit(in.Limit, t.cfg),
|
||||||
|
Category: strings.TrimSpace(in.Category),
|
||||||
|
Area: strings.TrimSpace(in.Area),
|
||||||
|
Status: strings.TrimSpace(in.Status),
|
||||||
|
Priority: strings.TrimSpace(in.Priority),
|
||||||
|
Tag: strings.TrimSpace(in.Tag),
|
||||||
|
Query: strings.TrimSpace(in.Query),
|
||||||
|
}
|
||||||
|
if project != nil {
|
||||||
|
filter.ProjectID = &project.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
items, err := t.store.ListLearnings(ctx, filter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ListLearningsOutput{}, err
|
||||||
|
}
|
||||||
|
return nil, ListLearningsOutput{Learnings: items}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func defaultString(value string, fallback string) string {
|
||||||
|
if value == "" {
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeStringSlice(values []string) []string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
out := make([]string, 0, len(values))
|
||||||
|
seen := map[string]struct{}{}
|
||||||
|
for _, value := range values {
|
||||||
|
trimmed := strings.TrimSpace(value)
|
||||||
|
if trimmed == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := seen[trimmed]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[trimmed] = struct{}{}
|
||||||
|
out = append(out, trimmed)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
@@ -13,9 +13,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type LinksTool struct {
|
type LinksTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
search config.SearchConfig
|
search config.SearchConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
type LinkInput struct {
|
type LinkInput struct {
|
||||||
@@ -47,8 +47,8 @@ type RelatedOutput struct {
|
|||||||
Related []RelatedThought `json:"related"`
|
Related []RelatedThought `json:"related"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLinksTool(db *store.DB, provider ai.Provider, search config.SearchConfig) *LinksTool {
|
func NewLinksTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig) *LinksTool {
|
||||||
return &LinksTool{store: db, provider: provider, search: search}
|
return &LinksTool{store: db, embeddings: embeddings, search: search}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *LinksTool) Link(ctx context.Context, _ *mcp.CallToolRequest, in LinkInput) (*mcp.CallToolResult, LinkOutput, error) {
|
func (t *LinksTool) Link(ctx context.Context, _ *mcp.CallToolRequest, in LinkInput) (*mcp.CallToolResult, LinkOutput, error) {
|
||||||
@@ -117,7 +117,7 @@ func (t *LinksTool) Related(ctx context.Context, _ *mcp.CallToolRequest, in Rela
|
|||||||
}
|
}
|
||||||
|
|
||||||
if includeSemantic {
|
if includeSemantic {
|
||||||
semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, thought.Content, t.search.DefaultLimit, t.search.DefaultThreshold, thought.ProjectID, &thought.ID)
|
semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, thought.Content, t.search.DefaultLimit, t.search.DefaultThreshold, thought.ProjectID, &thought.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, RelatedOutput{}, err
|
return nil, RelatedOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,17 +23,47 @@ const metadataRetryConcurrency = 4
|
|||||||
type MetadataRetryer struct {
|
type MetadataRetryer struct {
|
||||||
backgroundCtx context.Context
|
backgroundCtx context.Context
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
metadata *ai.MetadataRunner
|
||||||
capture config.CaptureConfig
|
capture config.CaptureConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
metadataTimeout time.Duration
|
metadataTimeout time.Duration
|
||||||
logger *slog.Logger
|
logger *slog.Logger
|
||||||
|
lock *RetryLocker
|
||||||
}
|
}
|
||||||
|
|
||||||
type RetryMetadataTool struct {
|
type RetryMetadataTool struct {
|
||||||
retryer *MetadataRetryer
|
retryer *MetadataRetryer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type RetryLocker struct {
|
||||||
|
mu sync.Mutex
|
||||||
|
locks map[uuid.UUID]time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRetryLocker() *RetryLocker {
|
||||||
|
return &RetryLocker{locks: map[uuid.UUID]time.Time{}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *RetryLocker) Acquire(id uuid.UUID, ttl time.Duration) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
defer l.mu.Unlock()
|
||||||
|
if l.locks == nil {
|
||||||
|
l.locks = map[uuid.UUID]time.Time{}
|
||||||
|
}
|
||||||
|
now := time.Now()
|
||||||
|
if exp, ok := l.locks[id]; ok && exp.After(now) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
l.locks[id] = now.Add(ttl)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *RetryLocker) Release(id uuid.UUID) {
|
||||||
|
l.mu.Lock()
|
||||||
|
defer l.mu.Unlock()
|
||||||
|
delete(l.locks, id)
|
||||||
|
}
|
||||||
|
|
||||||
type RetryMetadataInput struct {
|
type RetryMetadataInput struct {
|
||||||
Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the retry"`
|
Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the retry"`
|
||||||
Limit int `json:"limit,omitempty" jsonschema:"maximum number of thoughts to process in one call; defaults to 100"`
|
Limit int `json:"limit,omitempty" jsonschema:"maximum number of thoughts to process in one call; defaults to 100"`
|
||||||
@@ -57,18 +87,19 @@ type RetryMetadataOutput struct {
|
|||||||
Failures []RetryMetadataFailure `json:"failures,omitempty"`
|
Failures []RetryMetadataFailure `json:"failures,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewMetadataRetryer(backgroundCtx context.Context, db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *MetadataRetryer {
|
func NewMetadataRetryer(backgroundCtx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *MetadataRetryer {
|
||||||
if backgroundCtx == nil {
|
if backgroundCtx == nil {
|
||||||
backgroundCtx = context.Background()
|
backgroundCtx = context.Background()
|
||||||
}
|
}
|
||||||
return &MetadataRetryer{
|
return &MetadataRetryer{
|
||||||
backgroundCtx: backgroundCtx,
|
backgroundCtx: backgroundCtx,
|
||||||
store: db,
|
store: db,
|
||||||
provider: provider,
|
metadata: metadataRunner,
|
||||||
capture: capture,
|
capture: capture,
|
||||||
sessions: sessions,
|
sessions: sessions,
|
||||||
metadataTimeout: metadataTimeout,
|
metadataTimeout: metadataTimeout,
|
||||||
logger: logger,
|
logger: logger,
|
||||||
|
lock: NewRetryLocker(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -82,9 +113,35 @@ func (t *RetryMetadataTool) Handle(ctx context.Context, req *mcp.CallToolRequest
|
|||||||
|
|
||||||
func (r *MetadataRetryer) QueueThought(id uuid.UUID) {
|
func (r *MetadataRetryer) QueueThought(id uuid.UUID) {
|
||||||
go func() {
|
go func() {
|
||||||
if _, err := r.retryOne(r.backgroundCtx, id); err != nil {
|
started := time.Now()
|
||||||
r.logger.Warn("background metadata retry failed", slog.String("thought_id", id.String()), slog.String("error", err.Error()))
|
if !r.lock.Acquire(id, 15*time.Minute) {
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
defer r.lock.Release(id)
|
||||||
|
|
||||||
|
r.logger.Info("background metadata started",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
)
|
||||||
|
updated, err := r.retryOne(r.backgroundCtx, id)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Warn("background metadata error",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
slog.String("error", err.Error()),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.logger.Info("background metadata complete",
|
||||||
|
slog.String("thought_id", id.String()),
|
||||||
|
slog.String("provider", r.metadata.PrimaryProvider()),
|
||||||
|
slog.String("model", r.metadata.PrimaryModel()),
|
||||||
|
slog.Bool("updated", updated),
|
||||||
|
slog.Duration("duration", time.Since(started)),
|
||||||
|
)
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -138,7 +195,14 @@ func (r *MetadataRetryer) Handle(ctx context.Context, req *mcp.CallToolRequest,
|
|||||||
out.Retried++
|
out.Retried++
|
||||||
mu.Unlock()
|
mu.Unlock()
|
||||||
|
|
||||||
|
if !r.lock.Acquire(thought.ID, 15*time.Minute) {
|
||||||
|
mu.Lock()
|
||||||
|
out.Skipped++
|
||||||
|
mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
updated, err := r.retryOne(ctx, thought.ID)
|
updated, err := r.retryOne(ctx, thought.ID)
|
||||||
|
r.lock.Release(thought.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
out.Failures = append(out.Failures, RetryMetadataFailure{ID: thought.ID.String(), Error: err.Error()})
|
out.Failures = append(out.Failures, RetryMetadataFailure{ID: thought.ID.String(), Error: err.Error()})
|
||||||
@@ -181,7 +245,7 @@ func (r *MetadataRetryer) retryOne(ctx context.Context, id uuid.UUID) (bool, err
|
|||||||
}
|
}
|
||||||
|
|
||||||
attemptedAt := time.Now().UTC()
|
attemptedAt := time.Now().UTC()
|
||||||
extracted, extractErr := r.provider.ExtractMetadata(attemptCtx, thought.Content)
|
extracted, extractErr := r.metadata.ExtractMetadata(attemptCtx, thought.Content)
|
||||||
if extractErr != nil {
|
if extractErr != nil {
|
||||||
failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr)
|
failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr)
|
||||||
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil {
|
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil {
|
||||||
|
|||||||
@@ -15,10 +15,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type RecallTool struct {
|
type RecallTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
search config.SearchConfig
|
search config.SearchConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
}
|
}
|
||||||
|
|
||||||
type RecallInput struct {
|
type RecallInput struct {
|
||||||
@@ -32,8 +32,8 @@ type RecallOutput struct {
|
|||||||
Items []ContextItem `json:"items"`
|
Items []ContextItem `json:"items"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRecallTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *RecallTool {
|
func NewRecallTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig, sessions *session.ActiveProjects) *RecallTool {
|
||||||
return &RecallTool{store: db, provider: provider, search: search, sessions: sessions}
|
return &RecallTool{store: db, embeddings: embeddings, search: search, sessions: sessions}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *RecallTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in RecallInput) (*mcp.CallToolResult, RecallOutput, error) {
|
func (t *RecallTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in RecallInput) (*mcp.CallToolResult, RecallOutput, error) {
|
||||||
@@ -54,7 +54,7 @@ func (t *RecallTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in Re
|
|||||||
projectID = &project.ID
|
projectID = &project.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, t.search.DefaultThreshold, projectID, nil)
|
semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, t.search.DefaultThreshold, projectID, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, RecallOutput{}, err
|
return nil, RecallOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ const metadataReparseConcurrency = 4
|
|||||||
|
|
||||||
type ReparseMetadataTool struct {
|
type ReparseMetadataTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
metadata *ai.MetadataRunner
|
||||||
capture config.CaptureConfig
|
capture config.CaptureConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
logger *slog.Logger
|
logger *slog.Logger
|
||||||
@@ -53,8 +53,8 @@ type ReparseMetadataOutput struct {
|
|||||||
Failures []ReparseMetadataFailure `json:"failures,omitempty"`
|
Failures []ReparseMetadataFailure `json:"failures,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewReparseMetadataTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, sessions *session.ActiveProjects, logger *slog.Logger) *ReparseMetadataTool {
|
func NewReparseMetadataTool(db *store.DB, metadataRunner *ai.MetadataRunner, capture config.CaptureConfig, sessions *session.ActiveProjects, logger *slog.Logger) *ReparseMetadataTool {
|
||||||
return &ReparseMetadataTool{store: db, provider: provider, capture: capture, sessions: sessions, logger: logger}
|
return &ReparseMetadataTool{store: db, metadata: metadataRunner, capture: capture, sessions: sessions, logger: logger}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *ReparseMetadataTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ReparseMetadataInput) (*mcp.CallToolResult, ReparseMetadataOutput, error) {
|
func (t *ReparseMetadataTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ReparseMetadataInput) (*mcp.CallToolResult, ReparseMetadataOutput, error) {
|
||||||
@@ -107,7 +107,7 @@ func (t *ReparseMetadataTool) Handle(ctx context.Context, req *mcp.CallToolReque
|
|||||||
normalizedCurrent := metadata.Normalize(thought.Metadata, t.capture)
|
normalizedCurrent := metadata.Normalize(thought.Metadata, t.capture)
|
||||||
|
|
||||||
attemptedAt := time.Now().UTC()
|
attemptedAt := time.Now().UTC()
|
||||||
extracted, extractErr := t.provider.ExtractMetadata(ctx, thought.Content)
|
extracted, extractErr := t.metadata.ExtractMetadata(ctx, thought.Content)
|
||||||
normalizedTarget := normalizedCurrent
|
normalizedTarget := normalizedCurrent
|
||||||
if extractErr != nil {
|
if extractErr != nil {
|
||||||
normalizedTarget = metadata.MarkMetadataFailed(normalizedCurrent, t.capture, attemptedAt, extractErr)
|
normalizedTarget = metadata.MarkMetadataFailed(normalizedCurrent, t.capture, attemptedAt, extractErr)
|
||||||
|
|||||||
@@ -11,12 +11,14 @@ import (
|
|||||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
// semanticSearch runs vector similarity search if embeddings exist for the active model
|
// semanticSearch runs vector similarity search if embeddings exist for the
|
||||||
// in the given scope, otherwise falls back to Postgres full-text search.
|
// primary embedding model in the given scope, otherwise falls back to Postgres
|
||||||
|
// full-text search. Search always uses the primary model so query vectors
|
||||||
|
// match rows stored under the primary model name.
|
||||||
func semanticSearch(
|
func semanticSearch(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
db *store.DB,
|
db *store.DB,
|
||||||
provider ai.Provider,
|
embeddings *ai.EmbeddingRunner,
|
||||||
search config.SearchConfig,
|
search config.SearchConfig,
|
||||||
query string,
|
query string,
|
||||||
limit int,
|
limit int,
|
||||||
@@ -24,17 +26,18 @@ func semanticSearch(
|
|||||||
projectID *uuid.UUID,
|
projectID *uuid.UUID,
|
||||||
excludeID *uuid.UUID,
|
excludeID *uuid.UUID,
|
||||||
) ([]thoughttypes.SearchResult, error) {
|
) ([]thoughttypes.SearchResult, error) {
|
||||||
hasEmbeddings, err := db.HasEmbeddingsForModel(ctx, provider.EmbeddingModel(), projectID)
|
model := embeddings.PrimaryModel()
|
||||||
|
hasEmbeddings, err := db.HasEmbeddingsForModel(ctx, model, projectID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasEmbeddings {
|
if hasEmbeddings {
|
||||||
embedding, err := provider.Embed(ctx, query)
|
embedding, err := embeddings.EmbedPrimary(ctx, query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return db.SearchSimilarThoughts(ctx, embedding, provider.EmbeddingModel(), threshold, limit, projectID, excludeID)
|
return db.SearchSimilarThoughts(ctx, embedding, model, threshold, limit, projectID, excludeID)
|
||||||
}
|
}
|
||||||
|
|
||||||
return db.SearchThoughtsText(ctx, query, limit, projectID, excludeID)
|
return db.SearchThoughtsText(ctx, query, limit, projectID, excludeID)
|
||||||
|
|||||||
@@ -15,10 +15,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type SearchTool struct {
|
type SearchTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
search config.SearchConfig
|
search config.SearchConfig
|
||||||
sessions *session.ActiveProjects
|
sessions *session.ActiveProjects
|
||||||
}
|
}
|
||||||
|
|
||||||
type SearchInput struct {
|
type SearchInput struct {
|
||||||
@@ -32,8 +32,8 @@ type SearchOutput struct {
|
|||||||
Results []thoughttypes.SearchResult `json:"results"`
|
Results []thoughttypes.SearchResult `json:"results"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewSearchTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *SearchTool {
|
func NewSearchTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig, sessions *session.ActiveProjects) *SearchTool {
|
||||||
return &SearchTool{store: db, provider: provider, search: search, sessions: sessions}
|
return &SearchTool{store: db, embeddings: embeddings, search: search, sessions: sessions}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *SearchTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in SearchInput) (*mcp.CallToolResult, SearchOutput, error) {
|
func (t *SearchTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in SearchInput) (*mcp.CallToolResult, SearchOutput, error) {
|
||||||
@@ -56,7 +56,7 @@ func (t *SearchTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in Se
|
|||||||
_ = t.store.TouchProject(ctx, project.ID)
|
_ = t.store.TouchProject(ctx, project.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
results, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, threshold, projectID, nil)
|
results, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, threshold, projectID, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, SearchOutput{}, err
|
return nil, SearchOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,10 +14,11 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type SummarizeTool struct {
|
type SummarizeTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
search config.SearchConfig
|
metadata *ai.MetadataRunner
|
||||||
sessions *session.ActiveProjects
|
search config.SearchConfig
|
||||||
|
sessions *session.ActiveProjects
|
||||||
}
|
}
|
||||||
|
|
||||||
type SummarizeInput struct {
|
type SummarizeInput struct {
|
||||||
@@ -32,8 +33,8 @@ type SummarizeOutput struct {
|
|||||||
Count int `json:"count"`
|
Count int `json:"count"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewSummarizeTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *SummarizeTool {
|
func NewSummarizeTool(db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, search config.SearchConfig, sessions *session.ActiveProjects) *SummarizeTool {
|
||||||
return &SummarizeTool{store: db, provider: provider, search: search, sessions: sessions}
|
return &SummarizeTool{store: db, embeddings: embeddings, metadata: metadata, search: search, sessions: sessions}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *SummarizeTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in SummarizeInput) (*mcp.CallToolResult, SummarizeOutput, error) {
|
func (t *SummarizeTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in SummarizeInput) (*mcp.CallToolResult, SummarizeOutput, error) {
|
||||||
@@ -52,7 +53,7 @@ func (t *SummarizeTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
if project != nil {
|
if project != nil {
|
||||||
projectID = &project.ID
|
projectID = &project.ID
|
||||||
}
|
}
|
||||||
results, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, t.search.DefaultThreshold, projectID, nil)
|
results, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, t.search.DefaultThreshold, projectID, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, SummarizeOutput{}, err
|
return nil, SummarizeOutput{}, err
|
||||||
}
|
}
|
||||||
@@ -77,7 +78,7 @@ func (t *SummarizeTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in
|
|||||||
|
|
||||||
userPrompt := formatContextBlock("Summarize the following thoughts into concise prose with themes, action items, and notable people.", lines)
|
userPrompt := formatContextBlock("Summarize the following thoughts into concise prose with themes, action items, and notable people.", lines)
|
||||||
systemPrompt := "You summarize note collections. Be concise, concrete, and structured in plain prose."
|
systemPrompt := "You summarize note collections. Be concise, concrete, and structured in plain prose."
|
||||||
summary, err := t.provider.Summarize(ctx, systemPrompt, userPrompt)
|
summary, err := t.metadata.Summarize(ctx, systemPrompt, userPrompt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, SummarizeOutput{}, err
|
return nil, SummarizeOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,10 +16,11 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type UpdateTool struct {
|
type UpdateTool struct {
|
||||||
store *store.DB
|
store *store.DB
|
||||||
provider ai.Provider
|
embeddings *ai.EmbeddingRunner
|
||||||
capture config.CaptureConfig
|
metadata *ai.MetadataRunner
|
||||||
log *slog.Logger
|
capture config.CaptureConfig
|
||||||
|
log *slog.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
type UpdateInput struct {
|
type UpdateInput struct {
|
||||||
@@ -33,8 +34,8 @@ type UpdateOutput struct {
|
|||||||
Thought thoughttypes.Thought `json:"thought"`
|
Thought thoughttypes.Thought `json:"thought"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUpdateTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, log *slog.Logger) *UpdateTool {
|
func NewUpdateTool(db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, capture config.CaptureConfig, log *slog.Logger) *UpdateTool {
|
||||||
return &UpdateTool{store: db, provider: provider, capture: capture, log: log}
|
return &UpdateTool{store: db, embeddings: embeddings, metadata: metadata, capture: capture, log: log}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in UpdateInput) (*mcp.CallToolResult, UpdateOutput, error) {
|
func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in UpdateInput) (*mcp.CallToolResult, UpdateOutput, error) {
|
||||||
@@ -50,6 +51,7 @@ func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in Upda
|
|||||||
|
|
||||||
content := current.Content
|
content := current.Content
|
||||||
var embedding []float32
|
var embedding []float32
|
||||||
|
embeddingModel := ""
|
||||||
mergedMetadata := current.Metadata
|
mergedMetadata := current.Metadata
|
||||||
projectID := current.ProjectID
|
projectID := current.ProjectID
|
||||||
|
|
||||||
@@ -58,11 +60,13 @@ func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in Upda
|
|||||||
if content == "" {
|
if content == "" {
|
||||||
return nil, UpdateOutput{}, errInvalidInput("content must not be empty")
|
return nil, UpdateOutput{}, errInvalidInput("content must not be empty")
|
||||||
}
|
}
|
||||||
embedding, err = t.provider.Embed(ctx, content)
|
embedResult, err := t.embeddings.Embed(ctx, content)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, UpdateOutput{}, err
|
return nil, UpdateOutput{}, err
|
||||||
}
|
}
|
||||||
extracted, extractErr := t.provider.ExtractMetadata(ctx, content)
|
embedding = embedResult.Vector
|
||||||
|
embeddingModel = embedResult.Model
|
||||||
|
extracted, extractErr := t.metadata.ExtractMetadata(ctx, content)
|
||||||
if extractErr != nil {
|
if extractErr != nil {
|
||||||
t.log.Warn("metadata extraction failed during update, keeping current metadata", slog.String("error", extractErr.Error()))
|
t.log.Warn("metadata extraction failed during update, keeping current metadata", slog.String("error", extractErr.Error()))
|
||||||
mergedMetadata = metadata.MarkMetadataFailed(mergedMetadata, t.capture, time.Now().UTC(), extractErr)
|
mergedMetadata = metadata.MarkMetadataFailed(mergedMetadata, t.capture, time.Now().UTC(), extractErr)
|
||||||
@@ -82,7 +86,7 @@ func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in Upda
|
|||||||
projectID = &project.ID
|
projectID = &project.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
updated, err := t.store.UpdateThought(ctx, id, content, embedding, t.provider.EmbeddingModel(), mergedMetadata, projectID)
|
updated, err := t.store.UpdateThought(ctx, id, content, embedding, embeddingModel, mergedMetadata, projectID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, UpdateOutput{}, err
|
return nil, UpdateOutput{}, err
|
||||||
}
|
}
|
||||||
|
|||||||
68
internal/types/learning.go
Normal file
68
internal/types/learning.go
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LearningEvidenceLevel string
|
||||||
|
|
||||||
|
const (
|
||||||
|
LearningEvidenceHypothesis LearningEvidenceLevel = "hypothesis"
|
||||||
|
LearningEvidenceObserved LearningEvidenceLevel = "observed"
|
||||||
|
LearningEvidenceVerified LearningEvidenceLevel = "verified"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LearningStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
LearningStatusPending LearningStatus = "pending"
|
||||||
|
LearningStatusInProgress LearningStatus = "in_progress"
|
||||||
|
LearningStatusResolved LearningStatus = "resolved"
|
||||||
|
LearningStatusWontFix LearningStatus = "wont_fix"
|
||||||
|
LearningStatusPromoted LearningStatus = "promoted"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LearningPriority string
|
||||||
|
|
||||||
|
const (
|
||||||
|
LearningPriorityLow LearningPriority = "low"
|
||||||
|
LearningPriorityMedium LearningPriority = "medium"
|
||||||
|
LearningPriorityHigh LearningPriority = "high"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Learning struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Summary string `json:"summary"`
|
||||||
|
Details string `json:"details"`
|
||||||
|
Category string `json:"category"`
|
||||||
|
Area string `json:"area"`
|
||||||
|
Status LearningStatus `json:"status"`
|
||||||
|
Priority LearningPriority `json:"priority"`
|
||||||
|
Confidence LearningEvidenceLevel `json:"confidence"`
|
||||||
|
ActionRequired bool `json:"action_required"`
|
||||||
|
SourceType string `json:"source_type,omitempty"`
|
||||||
|
SourceRef string `json:"source_ref,omitempty"`
|
||||||
|
ProjectID *uuid.UUID `json:"project_id,omitempty"`
|
||||||
|
RelatedThoughtID *uuid.UUID `json:"related_thought_id,omitempty"`
|
||||||
|
RelatedSkillID *uuid.UUID `json:"related_skill_id,omitempty"`
|
||||||
|
ReviewedBy *string `json:"reviewed_by,omitempty"`
|
||||||
|
ReviewedAt *time.Time `json:"reviewed_at,omitempty"`
|
||||||
|
DuplicateOfLearningID *uuid.UUID `json:"duplicate_of_learning_id,omitempty"`
|
||||||
|
SupersedesLearningID *uuid.UUID `json:"supersedes_learning_id,omitempty"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type LearningFilter struct {
|
||||||
|
Limit int
|
||||||
|
ProjectID *uuid.UUID
|
||||||
|
Category string
|
||||||
|
Area string
|
||||||
|
Status string
|
||||||
|
Priority string
|
||||||
|
Tag string
|
||||||
|
Query string
|
||||||
|
}
|
||||||
@@ -52,14 +52,15 @@ type StoredFileFilter struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Thought struct {
|
type Thought struct {
|
||||||
ID uuid.UUID `json:"id"`
|
ID uuid.UUID `json:"id"`
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
Embedding []float32 `json:"embedding,omitempty"`
|
Embedding []float32 `json:"embedding,omitempty"`
|
||||||
Metadata ThoughtMetadata `json:"metadata"`
|
EmbeddingStatus string `json:"embedding_status,omitempty"`
|
||||||
ProjectID *uuid.UUID `json:"project_id,omitempty"`
|
Metadata ThoughtMetadata `json:"metadata"`
|
||||||
ArchivedAt *time.Time `json:"archived_at,omitempty"`
|
ProjectID *uuid.UUID `json:"project_id,omitempty"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
ArchivedAt *time.Time `json:"archived_at,omitempty"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type SearchResult struct {
|
type SearchResult struct {
|
||||||
|
|||||||
77
llm/learnings_schema.md
Normal file
77
llm/learnings_schema.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# Structured Learnings Schema (v1)
|
||||||
|
|
||||||
|
## Data Model
|
||||||
|
|
||||||
|
| Field | Type | Description |
|
||||||
|
|-------|------|-------------|
|
||||||
|
| **ID** | string | Stable learning identifier |
|
||||||
|
| **Category** | enum | `correction`, `insight`, `knowledge_gap`, `best_practice` |
|
||||||
|
| **Area** | enum | `frontend`, `backend`, `infra`, `tests`, `docs`, `config`, `other` |
|
||||||
|
| **Status** | enum | `pending`, `in_progress`, `resolved`, `wont_f` |
|
||||||
|
| **Priority** | string | e.g., `low`, `medium`, `high` |
|
||||||
|
| **Summary** | string | Brief description |
|
||||||
|
| **Details** | string | Full description / context |
|
||||||
|
| **ProjectID** | string (optional) | Reference to a project |
|
||||||
|
| **ThoughtID** | string (optional) | Reference to a thought |
|
||||||
|
| **SkillID** | string (optional) | Reference to a skill |
|
||||||
|
| **CreatedAt** | timestamp | Creation timestamp |
|
||||||
|
| **UpdatedAt** | timestamp | Last update timestamp |
|
||||||
|
|
||||||
|
## Suggested SQL Definition
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE learnings (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
category TEXT NOT NULL,
|
||||||
|
area TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
priority TEXT,
|
||||||
|
summary TEXT,
|
||||||
|
details TEXT,
|
||||||
|
project_id UUID,
|
||||||
|
thought_id UUID,
|
||||||
|
skill_id UUID,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tool Surface (MCP)
|
||||||
|
|
||||||
|
- `create_learning` – insert a new learning record
|
||||||
|
- `list_learnings` – query with optional filters (category, area, status, project, etc.)
|
||||||
|
- `get_learning` – retrieve a single learning by ID
|
||||||
|
- `update_learning` – modify fields (e.g., status, priority) and/or links
|
||||||
|
|
||||||
|
## Enums (Go)
|
||||||
|
|
||||||
|
```go
|
||||||
|
type LearningCategory string
|
||||||
|
const (
|
||||||
|
LearningCategoryCorrection LearningCategory = "correction"
|
||||||
|
LearningCategoryInsight LearningCategory = "insight"
|
||||||
|
LearningCategoryKnowledgeGap LearningCategory = "knowledge_gap"
|
||||||
|
LearningCategoryBestPractice LearningCategory = "best_practice"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LearningArea string
|
||||||
|
const (
|
||||||
|
LearningAreaFrontend LearningArea = "frontend"
|
||||||
|
LearningAreaBackend LearningArea = "backend"
|
||||||
|
LearningAreaInfra LearningArea = "infra"
|
||||||
|
LearningAreaTests LearningArea = "tests"
|
||||||
|
LearningAreaDocs LearningArea = "docs"
|
||||||
|
LearningAreaConfig LearningArea = "config"
|
||||||
|
LearningAreaOther LearningArea = "other"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LearningStatus string
|
||||||
|
const (
|
||||||
|
LearningStatusPending LearningStatus = "pending"
|
||||||
|
LearningStatusInProgress LearningStatus = "in_progress"
|
||||||
|
LearningStatusResolved LearningStatus = "resolved"
|
||||||
|
LearningStatusWontF LearningStatus = "wont_f"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Let me know if this alignment works or if you’d like any adjustments before I proceed with the implementation.
|
||||||
@@ -2,6 +2,12 @@
|
|||||||
|
|
||||||
AMCS (Avalon Memory Crystal Server) is an MCP server for capturing and retrieving thoughts, memory, and project context. It is backed by Postgres with pgvector for semantic search.
|
AMCS (Avalon Memory Crystal Server) is an MCP server for capturing and retrieving thoughts, memory, and project context. It is backed by Postgres with pgvector for semantic search.
|
||||||
|
|
||||||
|
`amcs-cli` is a pre-built CLI that connects to the AMCS MCP server so agents do not need to implement their own HTTP MCP client. Download it from https://git.warky.dev/wdevs/amcs/releases
|
||||||
|
|
||||||
|
The key command is `amcs-cli stdio`, which bridges the remote HTTP MCP server to a local stdio MCP transport. Register it as a stdio MCP server in your agent config and all AMCS tools are available immediately without any custom client code.
|
||||||
|
|
||||||
|
Configure with `~/.config/amcs/config.yaml` (`server`, `token`), env vars `AMCS_URL` / `AMCS_TOKEN`, or `--server` / `--token` flags.
|
||||||
|
|
||||||
You have access to an MCP memory server named AMCS.
|
You have access to an MCP memory server named AMCS.
|
||||||
|
|
||||||
Use AMCS as memory with two scopes:
|
Use AMCS as memory with two scopes:
|
||||||
|
|||||||
826
llm/options_to_openclaw.md
Normal file
826
llm/options_to_openclaw.md
Normal file
@@ -0,0 +1,826 @@
|
|||||||
|
# AMCS → OpenClaw Alternative: Gap Analysis & Roadmap
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
AMCS is a **passive** MCP memory server. OpenClaw's key differentiator is that it's an **always-on autonomous agent** — it proactively acts, monitors, and learns without human prompting. AMCS has the data model and search foundation; it's missing the execution engine and channel integrations that make OpenClaw compelling.
|
||||||
|
|
||||||
|
OpenClaw's 3 pillars AMCS lacks:
|
||||||
|
1. **Autonomous heartbeat** — scheduled jobs that run without user prompts
|
||||||
|
2. **Channel integrations** — 25+ messaging platforms (Telegram, Slack, Discord, email, etc.)
|
||||||
|
3. **Self-improving memory** — knowledge graph distillation, daily notes, living summary (MEMORY.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 1: Autonomous Heartbeat Engine (Critical — unlocks everything else)
|
||||||
|
|
||||||
|
### 1a. Add `Complete()` to AI Provider
|
||||||
|
|
||||||
|
The current `Provider` interface in `internal/ai/provider.go` only supports `Summarize(ctx, systemPrompt, userPrompt)`. An autonomous agent needs a stateful multi-turn call with tool awareness.
|
||||||
|
|
||||||
|
**Extend the interface:**
|
||||||
|
|
||||||
|
```go
|
||||||
|
// internal/ai/provider.go
|
||||||
|
|
||||||
|
type CompletionRole string
|
||||||
|
|
||||||
|
const (
|
||||||
|
RoleSystem CompletionRole = "system"
|
||||||
|
RoleUser CompletionRole = "user"
|
||||||
|
RoleAssistant CompletionRole = "assistant"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CompletionMessage struct {
|
||||||
|
Role CompletionRole `json:"role"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CompletionResult struct {
|
||||||
|
Content string `json:"content"`
|
||||||
|
StopReason string `json:"stop_reason"` // "stop" | "length" | "error"
|
||||||
|
Model string `json:"model"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Provider interface {
|
||||||
|
Embed(ctx context.Context, input string) ([]float32, error)
|
||||||
|
ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error)
|
||||||
|
Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error)
|
||||||
|
Complete(ctx context.Context, messages []CompletionMessage) (CompletionResult, error)
|
||||||
|
Name() string
|
||||||
|
EmbeddingModel() string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Implement in `internal/ai/compat/client.go`:**
|
||||||
|
|
||||||
|
`Complete` is a simplification of the existing `extractMetadataWithModel` path — same OpenAI-compatible `/chat/completions` endpoint, same auth headers, no JSON schema coercion. Add a `chatCompletionsRequest` type (reuse or extend the existing unexported struct) and a `Complete` method on `*Client` that:
|
||||||
|
1. Builds the request body from `[]CompletionMessage`
|
||||||
|
2. POSTs to `c.baseURL + "/chat/completions"` with `c.metadataModel`
|
||||||
|
3. Reads the first choice's `message.content`
|
||||||
|
4. Returns `CompletionResult{Content, StopReason, Model}`
|
||||||
|
|
||||||
|
Error handling mirrors the metadata path: on HTTP 429/503 mark the model unhealthy (`c.modelHealth`), return a wrapped error. No fallback model chain needed for agent calls — callers should retry on next heartbeat tick.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 1b. Heartbeat Engine Package
|
||||||
|
|
||||||
|
**New package: `internal/agent/`**
|
||||||
|
|
||||||
|
#### `internal/agent/job.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Job is a single scheduled unit of autonomous work.
|
||||||
|
type Job interface {
|
||||||
|
Name() string
|
||||||
|
Interval() time.Duration
|
||||||
|
Run(ctx context.Context) error
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `internal/agent/engine.go`
|
||||||
|
|
||||||
|
The engine manages a set of jobs and fires each on its own ticker. It mirrors the pattern already used for `runBackfillPass` and `runMetadataRetryPass` in `internal/app/app.go`, but generalises it.
|
||||||
|
|
||||||
|
```go
|
||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log/slog"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Engine struct {
|
||||||
|
jobs []Job
|
||||||
|
store JobStore // persists agent_job_runs rows
|
||||||
|
logger *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewEngine(store JobStore, logger *slog.Logger, jobs ...Job) *Engine {
|
||||||
|
return &Engine{jobs: jobs, store: store, logger: logger}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run starts all job tickers and blocks until ctx is cancelled.
|
||||||
|
func (e *Engine) Run(ctx context.Context) {
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
for _, job := range e.jobs {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(j Job) {
|
||||||
|
defer wg.Done()
|
||||||
|
e.runLoop(ctx, j)
|
||||||
|
}(job)
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Engine) runLoop(ctx context.Context, j Job) {
|
||||||
|
ticker := time.NewTicker(j.Interval())
|
||||||
|
defer ticker.Stop()
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return
|
||||||
|
case <-ticker.C:
|
||||||
|
e.runOnce(ctx, j)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Engine) runOnce(ctx context.Context, j Job) {
|
||||||
|
runID, err := e.store.StartRun(ctx, j.Name())
|
||||||
|
if err != nil {
|
||||||
|
e.logger.Error("agent: failed to start job run record",
|
||||||
|
slog.String("job", j.Name()), slog.String("error", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := j.Run(ctx); err != nil {
|
||||||
|
e.logger.Error("agent: job failed",
|
||||||
|
slog.String("job", j.Name()), slog.String("error", err.Error()))
|
||||||
|
_ = e.store.FinishRun(ctx, runID, "failed", "", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_ = e.store.FinishRun(ctx, runID, "ok", "", "")
|
||||||
|
e.logger.Info("agent: job complete", slog.String("job", j.Name()))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Deduplication / double-run prevention:** `StartRun` should check for an existing `running` row younger than `2 * j.Interval()` and return `ErrAlreadyRunning` — the caller skips that tick.
|
||||||
|
|
||||||
|
#### `internal/agent/distill.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
// DistillJob clusters semantically related thoughts and promotes
|
||||||
|
// durable insights into knowledge nodes.
|
||||||
|
type DistillJob struct {
|
||||||
|
store store.ThoughtQuerier
|
||||||
|
provider ai.Provider
|
||||||
|
cfg AgentDistillConfig
|
||||||
|
projectID *uuid.UUID // nil = all projects
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *DistillJob) Name() string { return "distill" }
|
||||||
|
func (j *DistillJob) Interval() time.Duration { return j.cfg.Interval }
|
||||||
|
|
||||||
|
func (j *DistillJob) Run(ctx context.Context) error {
|
||||||
|
// 1. Fetch recent thoughts not yet distilled (metadata.distilled != true)
|
||||||
|
// using store.ListThoughts with filter Days = cfg.MinAgeHours/24
|
||||||
|
// 2. Group into semantic clusters via SearchSimilarThoughts
|
||||||
|
// 3. For each cluster > MinClusterSize:
|
||||||
|
// a. Call provider.Summarize with insight extraction prompt
|
||||||
|
// b. InsertThought with type="insight", metadata.knowledge_node=true
|
||||||
|
// c. InsertLink from each cluster member to the insight, relation="distilled_from"
|
||||||
|
// d. UpdateThought on each source to set metadata.distilled=true
|
||||||
|
// 4. Return nil; partial failures are logged but do not abort the run
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Prompt used in step 3a:
|
||||||
|
```
|
||||||
|
System: You extract durable knowledge from a cluster of related notes.
|
||||||
|
Return a single paragraph (2-5 sentences) capturing the core insight.
|
||||||
|
Do not reference the notes themselves. Write in third person.
|
||||||
|
User: [concatenated thought content, newest first, max 4000 tokens]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `internal/agent/daily_notes.go`
|
||||||
|
|
||||||
|
Runs at a configured hour each day (checked by comparing `time.Now().Hour()` against `cfg.Hour` inside the loop — skip if already ran today by querying `agent_job_runs` for a successful `daily_notes` run with `started_at >= today midnight`).
|
||||||
|
|
||||||
|
Collects:
|
||||||
|
- Thoughts created today (`store.ListThoughts` with `Days=1`)
|
||||||
|
- CRM interactions logged today
|
||||||
|
- Calendar activities for today
|
||||||
|
- Maintenance logs from today
|
||||||
|
|
||||||
|
Formats into a structured markdown string and calls `store.InsertThought` with `type=daily_note`.
|
||||||
|
|
||||||
|
#### `internal/agent/living_summary.go`
|
||||||
|
|
||||||
|
Regenerates `MEMORY.md` from the last N daily notes + all knowledge nodes. Calls `provider.Summarize` and upserts the result via `store.UpsertFile` using a fixed name `MEMORY.md` scoped to the project (or global if no project).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 1c. Config Structs
|
||||||
|
|
||||||
|
Add to `internal/config/config.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Config struct {
|
||||||
|
// ... existing fields ...
|
||||||
|
Agent AgentConfig `yaml:"agent"`
|
||||||
|
Channels ChannelsConfig `yaml:"channels"`
|
||||||
|
Shell ShellConfig `yaml:"shell"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
Distill AgentDistillConfig `yaml:"distill"`
|
||||||
|
DailyNotes AgentDailyNotesConfig `yaml:"daily_notes"`
|
||||||
|
LivingSummary AgentLivingSummary `yaml:"living_summary"`
|
||||||
|
Archival AgentArchivalConfig `yaml:"archival"`
|
||||||
|
Model string `yaml:"model"` // override for agent calls; falls back to AI.Metadata.Model
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentDistillConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
Interval time.Duration `yaml:"interval"` // default: 24h
|
||||||
|
BatchSize int `yaml:"batch_size"` // thoughts per run; default: 50
|
||||||
|
MinClusterSize int `yaml:"min_cluster_size"` // default: 3
|
||||||
|
MinAgeHours int `yaml:"min_age_hours"` // ignore thoughts younger than this; default: 6
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentDailyNotesConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
Hour int `yaml:"hour"` // 0-23 UTC; default: 23
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentLivingSummary struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
Interval time.Duration `yaml:"interval"` // default: 24h
|
||||||
|
MaxDays int `yaml:"max_days"` // daily notes lookback; default: 30
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentArchivalConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
Interval time.Duration `yaml:"interval"` // default: 168h (weekly)
|
||||||
|
ArchiveOlderThan int `yaml:"archive_older_than_days"` // default: 90
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Full YAML reference (`configs/dev.yaml` additions):**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
agent:
|
||||||
|
enabled: false
|
||||||
|
model: "" # leave blank to reuse ai.metadata.model
|
||||||
|
distill:
|
||||||
|
enabled: false
|
||||||
|
interval: 24h
|
||||||
|
batch_size: 50
|
||||||
|
min_cluster_size: 3
|
||||||
|
min_age_hours: 6
|
||||||
|
daily_notes:
|
||||||
|
enabled: false
|
||||||
|
hour: 23 # UTC hour to generate (0–23)
|
||||||
|
living_summary:
|
||||||
|
enabled: false
|
||||||
|
interval: 24h
|
||||||
|
max_days: 30
|
||||||
|
archival:
|
||||||
|
enabled: false
|
||||||
|
interval: 168h
|
||||||
|
archive_older_than_days: 90
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 1d. Wire into `internal/app/app.go`
|
||||||
|
|
||||||
|
After the existing `MetadataRetry` goroutine block:
|
||||||
|
|
||||||
|
```go
|
||||||
|
if cfg.Agent.Enabled {
|
||||||
|
jobStore := store.NewJobStore(db)
|
||||||
|
var jobs []agent.Job
|
||||||
|
if cfg.Agent.Distill.Enabled {
|
||||||
|
jobs = append(jobs, agent.NewDistillJob(db, provider, cfg.Agent.Distill, nil))
|
||||||
|
}
|
||||||
|
if cfg.Agent.DailyNotes.Enabled {
|
||||||
|
jobs = append(jobs, agent.NewDailyNotesJob(db, provider, cfg.Agent.DailyNotes))
|
||||||
|
}
|
||||||
|
if cfg.Agent.LivingSummary.Enabled {
|
||||||
|
jobs = append(jobs, agent.NewLivingSummaryJob(db, provider, cfg.Agent.LivingSummary))
|
||||||
|
}
|
||||||
|
if cfg.Agent.Archival.Enabled {
|
||||||
|
jobs = append(jobs, agent.NewArchivalJob(db, cfg.Agent.Archival))
|
||||||
|
}
|
||||||
|
engine := agent.NewEngine(jobStore, logger, jobs...)
|
||||||
|
go engine.Run(ctx)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 1e. New MCP Tools — `internal/tools/agent.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
// list_agent_jobs
|
||||||
|
// Returns all registered jobs with: name, interval, last_run (status, started_at, finished_at), next_run estimate.
|
||||||
|
|
||||||
|
// trigger_agent_job
|
||||||
|
// Input: { "job": "distill" }
|
||||||
|
// Fires the job immediately in a goroutine; returns a run_id for polling.
|
||||||
|
|
||||||
|
// get_agent_job_history
|
||||||
|
// Input: { "job": "distill", "limit": 20 }
|
||||||
|
// Returns rows from agent_job_runs ordered by started_at DESC.
|
||||||
|
```
|
||||||
|
|
||||||
|
Register in `internal/app/app.go` routes by adding `Agent tools.AgentTool` to `mcpserver.ToolSet` and wiring `tools.NewAgentTool(engine)`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 1f. Migration — `migrations/021_agent_jobs.sql`
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE agent_job_runs (
|
||||||
|
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
job_name text NOT NULL,
|
||||||
|
started_at timestamptz NOT NULL DEFAULT now(),
|
||||||
|
finished_at timestamptz,
|
||||||
|
status text NOT NULL DEFAULT 'running', -- running | ok | failed | skipped
|
||||||
|
output text,
|
||||||
|
error text,
|
||||||
|
metadata jsonb NOT NULL DEFAULT '{}'
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_agent_job_runs_lookup
|
||||||
|
ON agent_job_runs (job_name, started_at DESC);
|
||||||
|
```
|
||||||
|
|
||||||
|
**`JobStore` interface (`internal/store/agent.go`):**
|
||||||
|
|
||||||
|
```go
|
||||||
|
type JobStore interface {
|
||||||
|
StartRun(ctx context.Context, jobName string) (uuid.UUID, error)
|
||||||
|
FinishRun(ctx context.Context, id uuid.UUID, status, output, errMsg string) error
|
||||||
|
LastRun(ctx context.Context, jobName string) (*AgentJobRun, error)
|
||||||
|
ListRuns(ctx context.Context, jobName string, limit int) ([]AgentJobRun, error)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 2: Knowledge Graph Distillation
|
||||||
|
|
||||||
|
Builds on Phase 1's distillation job. `thought_links` already exists with typed `relation` — the missing piece is a way to mark and query promoted knowledge nodes.
|
||||||
|
|
||||||
|
### 2a. Extend `ThoughtMetadata`
|
||||||
|
|
||||||
|
In `internal/types/thought.go`, add two fields to `ThoughtMetadata`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type ThoughtMetadata struct {
|
||||||
|
// ... existing fields ...
|
||||||
|
KnowledgeNode bool `json:"knowledge_node,omitempty"` // true = promoted insight
|
||||||
|
KnowledgeWeight int `json:"knowledge_weight,omitempty"` // number of source thoughts that fed this node
|
||||||
|
Distilled bool `json:"distilled,omitempty"` // true = this thought has been processed by distill job
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
These are stored in the existing `metadata jsonb` column — no schema migration needed.
|
||||||
|
|
||||||
|
### 2b. Store Addition
|
||||||
|
|
||||||
|
In `internal/store/thoughts.go` add:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// ListKnowledgeNodes returns thoughts where metadata->>'knowledge_node' = 'true',
|
||||||
|
// ordered by knowledge_weight DESC, then created_at DESC.
|
||||||
|
func (db *DB) ListKnowledgeNodes(ctx context.Context, projectID *uuid.UUID, limit int) ([]types.Thought, error)
|
||||||
|
```
|
||||||
|
|
||||||
|
SQL:
|
||||||
|
```sql
|
||||||
|
SELECT id, content, metadata, project_id, archived_at, created_at, updated_at
|
||||||
|
FROM thoughts
|
||||||
|
WHERE (metadata->>'knowledge_node')::boolean = true
|
||||||
|
AND ($1::uuid IS NULL OR project_id = $1)
|
||||||
|
AND archived_at IS NULL
|
||||||
|
ORDER BY (metadata->>'knowledge_weight')::int DESC, created_at DESC
|
||||||
|
LIMIT $2
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2c. New MCP Tools — `internal/tools/knowledge.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
// get_knowledge_graph
|
||||||
|
// Input: { "project_id": "uuid|null", "limit": 50 }
|
||||||
|
// Returns: { nodes: [Thought], edges: [ThoughtLink] }
|
||||||
|
// Fetches ListKnowledgeNodes + their outgoing/incoming links via store.GetThoughtLinks.
|
||||||
|
|
||||||
|
// distill_now
|
||||||
|
// Input: { "project_id": "uuid|null", "batch_size": 20 }
|
||||||
|
// Triggers the distillation job synchronously (for on-demand use); returns { insights_created: N }
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 3: Channel Integrations — Telegram First
|
||||||
|
|
||||||
|
### 3a. Channel Adapter Interface — `internal/channels/channel.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
package channels
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Attachment struct {
|
||||||
|
Name string
|
||||||
|
MediaType string
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type InboundMessage struct {
|
||||||
|
ChannelID string // e.g. telegram chat ID as string
|
||||||
|
SenderID string // e.g. telegram user ID as string
|
||||||
|
SenderName string // display name
|
||||||
|
Text string
|
||||||
|
Attachments []Attachment
|
||||||
|
Timestamp time.Time
|
||||||
|
Raw any // original platform message for debug/logging
|
||||||
|
}
|
||||||
|
|
||||||
|
type Channel interface {
|
||||||
|
Name() string
|
||||||
|
Start(ctx context.Context, handler func(InboundMessage)) error
|
||||||
|
Send(ctx context.Context, channelID string, text string) error
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3b. Telegram Implementation — `internal/channels/telegram/bot.go`
|
||||||
|
|
||||||
|
Uses `net/http` only (no external Telegram SDK). Long-polling loop:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Bot struct {
|
||||||
|
token string
|
||||||
|
allowedIDs map[int64]struct{} // empty = all allowed
|
||||||
|
baseURL string // https://api.telegram.org/bot{token}
|
||||||
|
client *http.Client
|
||||||
|
offset int64
|
||||||
|
logger *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Bot) Name() string { return "telegram" }
|
||||||
|
|
||||||
|
func (b *Bot) Start(ctx context.Context, handler func(channels.InboundMessage)) error {
|
||||||
|
for {
|
||||||
|
updates, err := b.getUpdates(ctx, b.offset, 30 /*timeout seconds*/)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil { return nil }
|
||||||
|
// transient error: log and back off 5s
|
||||||
|
time.Sleep(5 * time.Second)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, u := range updates {
|
||||||
|
b.offset = u.UpdateID + 1
|
||||||
|
if u.Message == nil { continue }
|
||||||
|
if !b.isAllowed(u.Message.Chat.ID) { continue }
|
||||||
|
handler(b.toInbound(u.Message))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Bot) Send(ctx context.Context, channelID string, text string) error {
|
||||||
|
// POST /sendMessage with chat_id and text
|
||||||
|
// Splits messages > 4096 chars automatically
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Error handling:**
|
||||||
|
- HTTP 401 (bad token): return fatal error, engine stops channel
|
||||||
|
- HTTP 429 (rate limit): respect `retry_after` from response body, sleep, retry
|
||||||
|
- HTTP 5xx: exponential backoff (5s → 10s → 30s → 60s), max 3 retries then sleep 5 min
|
||||||
|
|
||||||
|
### 3c. Channel Router — `internal/channels/router.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Router struct {
|
||||||
|
store store.ContactQuerier
|
||||||
|
thoughts store.ThoughtInserter
|
||||||
|
provider ai.Provider
|
||||||
|
channels map[string]channels.Channel
|
||||||
|
cfg config.ChannelsConfig
|
||||||
|
logger *slog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Router) Handle(msg channels.InboundMessage) {
|
||||||
|
// 1. Resolve sender → CRM contact (by channel_identifiers->>'telegram' = senderID)
|
||||||
|
// If not found: create a new professional_contact with the sender name + channel identifier
|
||||||
|
// 2. Capture message as thought:
|
||||||
|
// content = msg.Text
|
||||||
|
// metadata.source = "telegram"
|
||||||
|
// metadata.type = "observation"
|
||||||
|
// metadata.people = [senderName]
|
||||||
|
// metadata (extra, stored in JSONB): channel="telegram", channel_id=msg.ChannelID, sender_id=msg.SenderID
|
||||||
|
// 3. If cfg.Telegram.Respond:
|
||||||
|
// a. Load recent context via store.SearchSimilarThoughts(msg.Text, limit=10)
|
||||||
|
// b. Build []CompletionMessage with system context + recent thoughts + user message
|
||||||
|
// c. Call provider.Complete(ctx, messages)
|
||||||
|
// d. Capture response as thought (type="assistant_response", source="telegram")
|
||||||
|
// e. Send reply via channel.Send(ctx, msg.ChannelID, result.Content)
|
||||||
|
// f. Save chat history via store.InsertChatHistory
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Agent response system prompt (step 3b):**
|
||||||
|
```
|
||||||
|
You are a personal assistant with access to the user's memory.
|
||||||
|
Relevant context from memory:
|
||||||
|
{joined recent thought content}
|
||||||
|
|
||||||
|
Respond concisely. If you cannot answer from memory, say so.
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3d. Config — full YAML reference
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
channels:
|
||||||
|
telegram:
|
||||||
|
enabled: false
|
||||||
|
bot_token: ""
|
||||||
|
allowed_chat_ids: [] # empty = all chats allowed
|
||||||
|
capture_all: true # save every inbound message as a thought
|
||||||
|
respond: true # send LLM reply back to sender
|
||||||
|
response_model: "" # blank = uses agent.model or ai.metadata.model
|
||||||
|
poll_timeout_seconds: 30 # Telegram long-poll timeout (max 60)
|
||||||
|
max_message_length: 4096 # split replies longer than this
|
||||||
|
discord:
|
||||||
|
enabled: false
|
||||||
|
bot_token: ""
|
||||||
|
guild_ids: [] # empty = all guilds
|
||||||
|
capture_all: true
|
||||||
|
respond: true
|
||||||
|
slack:
|
||||||
|
enabled: false
|
||||||
|
bot_token: ""
|
||||||
|
app_token: "" # for socket mode
|
||||||
|
capture_all: true
|
||||||
|
respond: true
|
||||||
|
email:
|
||||||
|
enabled: false
|
||||||
|
imap_host: ""
|
||||||
|
imap_port: 993
|
||||||
|
smtp_host: ""
|
||||||
|
smtp_port: 587
|
||||||
|
username: ""
|
||||||
|
password: ""
|
||||||
|
poll_interval: 5m
|
||||||
|
capture_all: true
|
||||||
|
folders: ["INBOX"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3e. Schema Migration — `migrations/022_channel_contacts.sql`
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Store per-channel identity handles on CRM contacts
|
||||||
|
ALTER TABLE professional_contacts
|
||||||
|
ADD COLUMN IF NOT EXISTS channel_identifiers jsonb NOT NULL DEFAULT '{}';
|
||||||
|
|
||||||
|
-- e.g. {"telegram": "123456789", "discord": "user#1234", "slack": "U01234567"}
|
||||||
|
CREATE INDEX idx_contacts_telegram_id
|
||||||
|
ON professional_contacts ((channel_identifiers->>'telegram'))
|
||||||
|
WHERE channel_identifiers->>'telegram' IS NOT NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3f. New MCP Tools — `internal/tools/channels.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
// send_channel_message
|
||||||
|
// Input: { "channel": "telegram", "channel_id": "123456789", "text": "Hello" }
|
||||||
|
// Sends a message on the named channel. Returns { sent: true, channel: "telegram" }
|
||||||
|
|
||||||
|
// list_channel_conversations
|
||||||
|
// Input: { "channel": "telegram", "limit": 20, "days": 7 }
|
||||||
|
// Lists chat histories filtered by channel metadata. Wraps store.ListChatHistories.
|
||||||
|
|
||||||
|
// get_channel_status
|
||||||
|
// Returns: [{ channel: "telegram", connected: true, uptime_seconds: 3600 }, ...]
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3g. Future Channel Adapters
|
||||||
|
|
||||||
|
Each is a new subdirectory implementing `channels.Channel`. No router or MCP tool changes needed.
|
||||||
|
|
||||||
|
| Channel | Package | Approach |
|
||||||
|
|---------|---------|----------|
|
||||||
|
| Discord | `internal/channels/discord/` | Gateway WebSocket (discord.com/api/gateway); or use `discordgo` lib |
|
||||||
|
| Slack | `internal/channels/slack/` | Socket Mode WebSocket (no public URL needed) |
|
||||||
|
| Email (IMAP) | `internal/channels/email/` | IMAP IDLE or poll; SMTP for send |
|
||||||
|
| Signal | `internal/channels/signal/` | Wrap `signal-cli` JSON-RPC subprocess |
|
||||||
|
| WhatsApp | `internal/channels/whatsapp/` | Meta Cloud API webhook (requires public URL) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 4: Shell / Computer Access
|
||||||
|
|
||||||
|
### 4a. Shell Tool — `internal/tools/shell.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
type ShellInput struct {
|
||||||
|
Command string `json:"command"`
|
||||||
|
WorkingDir string `json:"working_dir,omitempty"` // override default; must be within allowed prefix
|
||||||
|
Timeout string `json:"timeout,omitempty"` // e.g. "30s"; overrides config default
|
||||||
|
CaptureAs string `json:"capture_as,omitempty"` // thought type for stored output; default "shell_output"
|
||||||
|
SaveOutput bool `json:"save_output"` // store stdout/stderr as a thought
|
||||||
|
}
|
||||||
|
|
||||||
|
type ShellOutput struct {
|
||||||
|
Stdout string `json:"stdout"`
|
||||||
|
Stderr string `json:"stderr"`
|
||||||
|
ExitCode int `json:"exit_code"`
|
||||||
|
ThoughtID *uuid.UUID `json:"thought_id,omitempty"` // set if save_output=true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Execution model:**
|
||||||
|
1. Validate `command` against `cfg.Shell.AllowedCommands` (if non-empty) and `cfg.Shell.BlockedCommands`
|
||||||
|
2. `exec.CommandContext(ctx, "sh", "-c", command)` with `Dir` set to working dir
|
||||||
|
3. Capture stdout + stderr into `bytes.Buffer`
|
||||||
|
4. On timeout: kill process group (`syscall.Kill(-cmd.Process.Pid, syscall.SIGKILL)`), return exit code -1
|
||||||
|
5. If `SaveOutput`: call `store.InsertThought` with content = truncated stdout (max 8KB) + stderr summary
|
||||||
|
|
||||||
|
**Security controls:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
shell:
|
||||||
|
enabled: false
|
||||||
|
working_dir: "/tmp/amcs-agent" # all commands run here unless overridden
|
||||||
|
allowed_working_dirs: # if set, working_dir overrides must be within one of these
|
||||||
|
- "/tmp/amcs-agent"
|
||||||
|
- "/home/user/projects"
|
||||||
|
timeout: 30s
|
||||||
|
max_output_bytes: 65536 # truncate captured output beyond this
|
||||||
|
allowed_commands: [] # empty = all; non-empty = exact binary name allowlist
|
||||||
|
blocked_commands: # checked before allowed_commands
|
||||||
|
- "rm"
|
||||||
|
- "sudo"
|
||||||
|
- "su"
|
||||||
|
- "curl"
|
||||||
|
- "wget"
|
||||||
|
save_output_by_default: false
|
||||||
|
```
|
||||||
|
|
||||||
|
The tool is registered with `mcp.Tool.Annotations` `Destructive: true` so MCP clients prompt for confirmation.
|
||||||
|
|
||||||
|
### 4b. File Bridge Tools
|
||||||
|
|
||||||
|
Also in `internal/tools/shell.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// read_file_from_path
|
||||||
|
// Input: { "path": "/abs/path/file.txt", "link_to_thought": "uuid|null" }
|
||||||
|
// Reads file from server filesystem → stores as AMCS file via store.InsertFile
|
||||||
|
// Returns: { file_id: "uuid", size_bytes: N, media_type: "text/plain" }
|
||||||
|
|
||||||
|
// write_file_to_path
|
||||||
|
// Input: { "file_id": "uuid", "path": "/abs/path/output.txt" }
|
||||||
|
// Loads AMCS file → writes to filesystem path
|
||||||
|
// Path must be within cfg.Shell.AllowedWorkingDirs if set
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 5: Self-Improving Memory
|
||||||
|
|
||||||
|
### 5a. Skill Discovery Job — `internal/agent/skill_discovery.go`
|
||||||
|
|
||||||
|
Runs weekly. Algorithm:
|
||||||
|
|
||||||
|
1. Load last 30 days of `chat_histories` via `store.ListChatHistories(days=30)`
|
||||||
|
2. Extract assistant message patterns with `provider.Complete`:
|
||||||
|
```
|
||||||
|
System: Identify reusable behavioural patterns or preferences visible in these conversations.
|
||||||
|
Return a JSON array of { "name": "...", "description": "...", "tags": [...] }.
|
||||||
|
Only include patterns that would be useful across future sessions.
|
||||||
|
User: [last N assistant + user messages, newest first]
|
||||||
|
```
|
||||||
|
3. For each discovered pattern, call `store.InsertSkill` with tag `auto-discovered` and the current date
|
||||||
|
4. Link to all projects via `store.LinkSkillToProject`
|
||||||
|
|
||||||
|
Deduplication: before inserting, call `store.SearchSkills(pattern.name)` — if similarity > 0.9, skip.
|
||||||
|
|
||||||
|
### 5b. Thought Archival Job — `internal/agent/archival.go`
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (j *ArchivalJob) Run(ctx context.Context) error {
|
||||||
|
// 1. ListThoughts older than cfg.ArchiveOlderThanDays with no knowledge_node link
|
||||||
|
// SQL: thoughts where created_at < now() - interval '$N days'
|
||||||
|
// AND metadata->>'knowledge_node' IS DISTINCT FROM 'true'
|
||||||
|
// AND archived_at IS NULL
|
||||||
|
// AND id NOT IN (SELECT thought_id FROM thought_links WHERE relation = 'distilled_from')
|
||||||
|
// 2. For each batch: store.ArchiveThought(ctx, id)
|
||||||
|
// 3. Log count
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Uses the existing `ArchiveThought` store method — no new SQL needed.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## End-to-End Agent Loop Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
Telegram message arrives
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
channels/telegram/bot.go (long-poll goroutine)
|
||||||
|
│ InboundMessage{}
|
||||||
|
▼
|
||||||
|
channels/router.go Handle()
|
||||||
|
├── Resolve sender → CRM contact (store.SearchContacts by channel_identifiers)
|
||||||
|
├── store.InsertThought (source="telegram", type="observation")
|
||||||
|
├── store.SearchSimilarThoughts (semantic context retrieval)
|
||||||
|
├── ai.Provider.Complete (build messages → LLM call)
|
||||||
|
├── store.InsertThought (source="telegram", type="assistant_response")
|
||||||
|
├── store.InsertChatHistory (full turn saved)
|
||||||
|
└── channels.Channel.Send (reply dispatched to Telegram)
|
||||||
|
|
||||||
|
Meanwhile, every 24h:
|
||||||
|
agent/engine.go ticker fires DistillJob
|
||||||
|
├── store.ListThoughts (recent, not yet distilled)
|
||||||
|
├── store.SearchSimilarThoughts (cluster by semantic similarity)
|
||||||
|
├── ai.Provider.Summarize (insight extraction prompt)
|
||||||
|
├── store.InsertThought (type="insight", knowledge_node=true)
|
||||||
|
└── store.InsertLink (relation="distilled_from" for each source)
|
||||||
|
|
||||||
|
After distill:
|
||||||
|
agent/living_summary.go
|
||||||
|
├── store.ListKnowledgeNodes
|
||||||
|
├── store.ListThoughts (type="daily_note", last 30 days)
|
||||||
|
├── ai.Provider.Summarize (MEMORY.md regeneration)
|
||||||
|
└── store.UpsertFile (name="MEMORY.md", linked to project)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Error Handling & Retry Strategy
|
||||||
|
|
||||||
|
| Scenario | Handling |
|
||||||
|
|----------|----------|
|
||||||
|
| LLM returns 429 | Mark model unhealthy in `modelHealth` map (existing pattern), return error, engine logs and skips tick |
|
||||||
|
| LLM returns 5xx | Same as 429 |
|
||||||
|
| Telegram 429 | Read `retry_after` from response, sleep exact duration, retry immediately |
|
||||||
|
| Telegram 5xx | Exponential backoff: 5s → 10s → 30s → 60s, reset after success |
|
||||||
|
| Telegram disconnects | Long-poll timeout naturally retries; context cancel exits cleanly |
|
||||||
|
| Agent job panics | `engine.runOnce` wraps in `recover()`, logs stack trace, marks run `failed` |
|
||||||
|
| Agent double-run | `store.StartRun` checks for `running` row < `2 * interval` old → returns `ErrAlreadyRunning`, tick skipped silently |
|
||||||
|
| Shell command timeout | `exec.CommandContext` kills process group via SIGKILL, returns exit_code=-1 and partial output |
|
||||||
|
| Distillation partial failure | Each cluster processed independently; failure of one cluster logged and skipped, others continue |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Critical Files
|
||||||
|
|
||||||
|
| File | Change |
|
||||||
|
|------|--------|
|
||||||
|
| `internal/ai/provider.go` | Add `Complete()`, `CompletionMessage`, `CompletionResult` |
|
||||||
|
| `internal/ai/compat/client.go` | Implement `Complete()` on `*Client` |
|
||||||
|
| `internal/config/config.go` | Add `AgentConfig`, `ChannelsConfig`, `ShellConfig` |
|
||||||
|
| `internal/types/thought.go` | Add `KnowledgeNode`, `KnowledgeWeight`, `Distilled` to `ThoughtMetadata` |
|
||||||
|
| `internal/store/thoughts.go` | Add `ListKnowledgeNodes()` |
|
||||||
|
| `internal/store/agent.go` | New: `JobStore` interface + implementation |
|
||||||
|
| `internal/app/app.go` | Wire agent engine + channel router goroutines |
|
||||||
|
| `internal/mcpserver/server.go` | Add `Agent`, `Knowledge`, `Channels`, `Shell` to `ToolSet` |
|
||||||
|
| `internal/agent/` | New package: engine, job, distill, daily_notes, living_summary, archival, skill_discovery |
|
||||||
|
| `internal/channels/` | New package: channel interface, router, telegram/ |
|
||||||
|
| `internal/tools/agent.go` | New: list_agent_jobs, trigger_agent_job, get_agent_job_history |
|
||||||
|
| `internal/tools/knowledge.go` | New: get_knowledge_graph, distill_now |
|
||||||
|
| `internal/tools/channels.go` | New: send_channel_message, list_channel_conversations, get_channel_status |
|
||||||
|
| `internal/tools/shell.go` | New: run_shell_command, read_file_from_path, write_file_to_path |
|
||||||
|
| `migrations/021_agent_jobs.sql` | New table: agent_job_runs |
|
||||||
|
| `migrations/022_channel_contacts.sql` | ALTER professional_contacts: add channel_identifiers jsonb |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sequence / Parallelism
|
||||||
|
|
||||||
|
```
|
||||||
|
Phase 1 (Heartbeat Engine) ──► Phase 2 (Knowledge Graph)
|
||||||
|
└──► Phase 5 (Self-Improving)
|
||||||
|
|
||||||
|
Phase 3 (Telegram) ──► Phase 3g (Discord / Slack / Email)
|
||||||
|
|
||||||
|
Phase 4 (Shell) [fully independent — no dependencies on other phases]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Minimum viable OpenClaw competitor = Phase 1 + Phase 3** (autonomous scheduling + Telegram channel).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Verification
|
||||||
|
|
||||||
|
| Phase | Test |
|
||||||
|
|-------|------|
|
||||||
|
| 1 — Heartbeat | Set `distill.interval: 1m` in dev config. Capture 5+ related thoughts. Wait 1 min. Query `thought_links` for `relation=distilled_from` rows. Check `agent_job_runs` has a `status=ok` row. |
|
||||||
|
| 1 — Daily notes | Set `daily_notes.hour` to current UTC hour. Restart server. Within 1 min, `list_thoughts` should return a `type=daily_note` entry for today. |
|
||||||
|
| 2 — Knowledge graph | Call `get_knowledge_graph` MCP tool. Verify `nodes` array contains `type=insight` thoughts with `knowledge_node=true`. Verify edges list `distilled_from` links. |
|
||||||
|
| 3 — Telegram inbound | Send a message to the configured bot. Call `search_thoughts` with the message text — should appear with `source=telegram`. |
|
||||||
|
| 3 — Telegram response | Send a question to the bot. Verify a reply arrives in Telegram. Call `list_chat_histories` — should contain the turn. |
|
||||||
|
| 4 — Shell | Call `run_shell_command` with `{"command": "echo hello", "save_output": true}`. Verify `stdout=hello\n`, `exit_code=0`, and a new thought with `type=shell_output`. |
|
||||||
|
| 4 — Blocked command | Call `run_shell_command` with `{"command": "sudo whoami"}`. Verify error returned without execution. |
|
||||||
|
| 5 — Skill discovery | Run `trigger_agent_job` with `{"job": "skill_discovery"}`. Verify new rows in `agent_skills` with tag `auto-discovered`. |
|
||||||
|
| Full loop | Send Telegram message → agent responds → distill job runs → knowledge node created from conversation → MEMORY.md regenerated with new insight. |
|
||||||
@@ -33,7 +33,8 @@ In practice, the project has also grown beyond the original v1 scope with additi
|
|||||||
- stored files and binary resources
|
- stored files and binary resources
|
||||||
- agent skills and guardrails
|
- agent skills and guardrails
|
||||||
- chat history tools
|
- chat history tools
|
||||||
- household / maintenance / calendar / meal / CRM tools
|
- maintenance tools
|
||||||
|
- household / calendar / meal / CRM tools (moved to future plugin; see `llm/todo.md`)
|
||||||
- OAuth client-credentials support
|
- OAuth client-credentials support
|
||||||
- Ollama support
|
- Ollama support
|
||||||
- tool discovery and persistent tool annotations
|
- tool discovery and persistent tool annotations
|
||||||
|
|||||||
14
llm/sample_learning.json
Normal file
14
llm/sample_learning.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||||
|
"category": "insight",
|
||||||
|
"area": "frontend",
|
||||||
|
"status": "pending",
|
||||||
|
"priority": "high",
|
||||||
|
"summary": "Understanding React hooks lifecycle",
|
||||||
|
"details": "React hooks provide a way to use state and other React features without writing a class. This learning note captures key insights about hooks lifecycle and common pitfalls.",
|
||||||
|
"project_id": "proj-001",
|
||||||
|
"thought_id": "th-001",
|
||||||
|
"skill_id": "skill-001",
|
||||||
|
"created_at": "2026-04-05T19:30:00Z",
|
||||||
|
"updated_at": "2026-04-05T19:30:00Z"
|
||||||
|
}
|
||||||
7
llm/structured-learnings/README.md
Normal file
7
llm/structured-learnings/README.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Structured Learnings
|
||||||
|
|
||||||
|
This directory is intended to hold structured learning modules and resources.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Add your learning materials here.*
|
||||||
46
llm/todo.md
46
llm/todo.md
@@ -1,4 +1,50 @@
|
|||||||
# AMCS TODO
|
# AMCS TODO
|
||||||
|
|
||||||
|
## Future Plugin: Lifestyle Tools (calendar, meals, household, CRM)
|
||||||
|
|
||||||
|
The following tool groups have been removed from the core server and are candidates for a separate optional plugin or extension server. The store/tool implementations remain in the codebase but are no longer registered.
|
||||||
|
|
||||||
|
### calendar
|
||||||
|
- `add_family_member` — Add a family member to the household.
|
||||||
|
- `list_family_members` — List all family members.
|
||||||
|
- `add_activity` — Schedule a one-time or recurring family activity.
|
||||||
|
- `get_week_schedule` — Get all activities scheduled for a given week.
|
||||||
|
- `search_activities` — Search activities by title, type, or family member.
|
||||||
|
- `add_important_date` — Track a birthday, anniversary, deadline, or other important date.
|
||||||
|
- `get_upcoming_dates` — Get important dates coming up in the next N days.
|
||||||
|
|
||||||
|
### meals
|
||||||
|
- `add_recipe` — Save a recipe with ingredients and instructions.
|
||||||
|
- `search_recipes` — Search recipes by name, cuisine, tags, or ingredient.
|
||||||
|
- `update_recipe` — Update an existing recipe.
|
||||||
|
- `create_meal_plan` — Set the weekly meal plan; replaces existing.
|
||||||
|
- `get_meal_plan` — Get the meal plan for a given week.
|
||||||
|
- `generate_shopping_list` — Generate shopping list from the weekly meal plan.
|
||||||
|
|
||||||
|
### household
|
||||||
|
- `add_household_item` — Store a household fact (paint, appliance, measurement, etc.).
|
||||||
|
- `search_household_items` — Search household items by name, category, or location.
|
||||||
|
- `get_household_item` — Retrieve a household item by id.
|
||||||
|
- `add_vendor` — Add a service provider (plumber, electrician, landscaper, etc.).
|
||||||
|
- `list_vendors` — List household service vendors, optionally filtered by service type.
|
||||||
|
|
||||||
|
### crm
|
||||||
|
- `add_professional_contact` — Add a professional contact to the CRM.
|
||||||
|
- `search_contacts` — Search professional contacts by name, company, title, notes, or tags.
|
||||||
|
- `log_interaction` — Log an interaction with a professional contact.
|
||||||
|
- `get_contact_history` — Get full history (interactions and opportunities) for a contact.
|
||||||
|
- `create_opportunity` — Create a deal, project, or opportunity linked to a contact.
|
||||||
|
- `get_follow_ups_due` — List contacts with a follow-up date due within the next N days.
|
||||||
|
- `link_thought_to_contact` — Append a stored thought to a contact's notes.
|
||||||
|
|
||||||
|
**Implementation notes:**
|
||||||
|
- Store implementations: `internal/tools/calendar.go`, `internal/tools/meals.go`, `internal/tools/household.go`, `internal/tools/crm.go`
|
||||||
|
- DB store layers: `internal/store/calendar.go`, `internal/store/meals.go`, `internal/store/household.go`, `internal/store/crm.go`
|
||||||
|
- Re-register via `mcpserver.ToolSet` fields: `Household`, `Calendar`, `Meals`, `CRM`
|
||||||
|
- Re-add `registerHouseholdTools`, `registerCalendarTools`, `registerMealTools`, `registerCRMTools` to the register slice in `NewHandlers`
|
||||||
|
- Add catalog entries back in `BuildToolCatalog`
|
||||||
|
|
||||||
|
---
|
||||||
## Embedding Backfill and Text-Search Fallback Audit
|
## Embedding Backfill and Text-Search Fallback Audit
|
||||||
|
|
||||||
This file originally described the planned `backfill_embeddings` work and semantic-to-text fallback behavior. Most of that work is now implemented. This document now tracks what landed, what still needs verification, and what follow-up work remains.
|
This file originally described the planned `backfill_embeddings` work and semantic-to-text fallback behavior. Most of that work is now implemented. This document now tracks what landed, what still needs verification, and what follow-up work remains.
|
||||||
|
|||||||
@@ -275,6 +275,30 @@ CREATE TABLE IF NOT EXISTS public.tool_annotations (
|
|||||||
updated_at timestamptz NOT NULL DEFAULT now()
|
updated_at timestamptz NOT NULL DEFAULT now()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public.learnings (
|
||||||
|
action_required boolean NOT NULL DEFAULT false,
|
||||||
|
area text NOT NULL DEFAULT 'other',
|
||||||
|
category text NOT NULL DEFAULT 'insight',
|
||||||
|
confidence text NOT NULL DEFAULT 'hypothesis',
|
||||||
|
created_at timestamptz NOT NULL DEFAULT now(),
|
||||||
|
details text NOT NULL DEFAULT '',
|
||||||
|
duplicate_of_learning_id uuid,
|
||||||
|
id uuid NOT NULL DEFAULT gen_random_uuid(),
|
||||||
|
priority text NOT NULL DEFAULT 'medium',
|
||||||
|
project_id uuid,
|
||||||
|
related_skill_id uuid,
|
||||||
|
related_thought_id uuid,
|
||||||
|
reviewed_at timestamptz,
|
||||||
|
reviewed_by text,
|
||||||
|
source_ref text,
|
||||||
|
source_type text,
|
||||||
|
status text NOT NULL DEFAULT 'pending',
|
||||||
|
summary text NOT NULL,
|
||||||
|
supersedes_learning_id uuid,
|
||||||
|
tags text,
|
||||||
|
updated_at timestamptz NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS public.agent_skills (
|
CREATE TABLE IF NOT EXISTS public.agent_skills (
|
||||||
content text NOT NULL,
|
content text NOT NULL,
|
||||||
created_at timestamptz NOT NULL DEFAULT now(),
|
created_at timestamptz NOT NULL DEFAULT now(),
|
||||||
@@ -2597,6 +2621,279 @@ BEGIN
|
|||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'action_required'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN action_required boolean NOT NULL DEFAULT false;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'area'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN area text NOT NULL DEFAULT 'other';
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'category'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN category text NOT NULL DEFAULT 'insight';
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'confidence'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN confidence text NOT NULL DEFAULT 'hypothesis';
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'created_at'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN created_at timestamptz NOT NULL DEFAULT now();
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'details'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN details text NOT NULL DEFAULT '';
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'duplicate_of_learning_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN duplicate_of_learning_id uuid;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN id uuid NOT NULL DEFAULT gen_random_uuid();
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'priority'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN priority text NOT NULL DEFAULT 'medium';
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'project_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN project_id uuid;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'related_skill_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN related_skill_id uuid;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'related_thought_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN related_thought_id uuid;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'reviewed_at'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN reviewed_at timestamptz;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'reviewed_by'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN reviewed_by text;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'source_ref'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN source_ref text;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'source_type'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN source_type text;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'status'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN status text NOT NULL DEFAULT 'pending';
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'summary'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN summary text NOT NULL;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'supersedes_learning_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN supersedes_learning_id uuid;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'tags'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN tags text;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND column_name = 'updated_at'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD COLUMN updated_at timestamptz NOT NULL DEFAULT now();
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
@@ -3403,6 +3700,34 @@ BEGIN
|
|||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
auto_pk_name text;
|
||||||
|
BEGIN
|
||||||
|
-- Drop auto-generated primary key if it exists
|
||||||
|
SELECT constraint_name INTO auto_pk_name
|
||||||
|
FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND constraint_type = 'PRIMARY KEY'
|
||||||
|
AND constraint_name IN ('learnings_pkey', 'public_learnings_pkey');
|
||||||
|
|
||||||
|
IF auto_pk_name IS NOT NULL THEN
|
||||||
|
EXECUTE 'ALTER TABLE public.learnings DROP CONSTRAINT ' || quote_ident(auto_pk_name);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Add named primary key if it doesn't exist
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND constraint_name = 'pk_public_learnings'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings ADD CONSTRAINT pk_public_learnings PRIMARY KEY (id);
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
DO $$
|
DO $$
|
||||||
DECLARE
|
DECLARE
|
||||||
auto_pk_name text;
|
auto_pk_name text;
|
||||||
@@ -3475,6 +3800,15 @@ CREATE INDEX IF NOT EXISTS idx_contact_interactions_contact_id_occurred_at
|
|||||||
CREATE INDEX IF NOT EXISTS idx_maintenance_logs_task_id_completed_at
|
CREATE INDEX IF NOT EXISTS idx_maintenance_logs_task_id_completed_at
|
||||||
ON public.maintenance_logs USING btree (task_id, completed_at);
|
ON public.maintenance_logs USING btree (task_id, completed_at);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_learnings_details
|
||||||
|
ON public.learnings USING gin (details gin_trgm_ops);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_learnings_summary
|
||||||
|
ON public.learnings USING gin (summary gin_trgm_ops);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_learnings_tags
|
||||||
|
ON public.learnings USING gin (tags gin_trgm_ops);
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_project_skills_project_id_skill_id
|
CREATE INDEX IF NOT EXISTS idx_project_skills_project_id_skill_id
|
||||||
ON public.project_skills USING btree (project_id, skill_id);
|
ON public.project_skills USING btree (project_id, skill_id);
|
||||||
|
|
||||||
@@ -3810,6 +4144,86 @@ BEGIN
|
|||||||
END IF;
|
END IF;
|
||||||
END;
|
END;
|
||||||
$$;DO $$
|
$$;DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND constraint_name = 'fk_learnings_duplicate_of_learning_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings
|
||||||
|
ADD CONSTRAINT fk_learnings_duplicate_of_learning_id
|
||||||
|
FOREIGN KEY (duplicate_of_learning_id)
|
||||||
|
REFERENCES public.learnings (id)
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
ON UPDATE NO ACTION;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND constraint_name = 'fk_learnings_project_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings
|
||||||
|
ADD CONSTRAINT fk_learnings_project_id
|
||||||
|
FOREIGN KEY (project_id)
|
||||||
|
REFERENCES public.projects (guid)
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
ON UPDATE NO ACTION;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND constraint_name = 'fk_learnings_related_skill_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings
|
||||||
|
ADD CONSTRAINT fk_learnings_related_skill_id
|
||||||
|
FOREIGN KEY (related_skill_id)
|
||||||
|
REFERENCES public.agent_skills (id)
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
ON UPDATE NO ACTION;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND constraint_name = 'fk_learnings_related_thought_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings
|
||||||
|
ADD CONSTRAINT fk_learnings_related_thought_id
|
||||||
|
FOREIGN KEY (related_thought_id)
|
||||||
|
REFERENCES public.thoughts (guid)
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
ON UPDATE NO ACTION;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'learnings'
|
||||||
|
AND constraint_name = 'fk_learnings_supersedes_learning_id'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.learnings
|
||||||
|
ADD CONSTRAINT fk_learnings_supersedes_learning_id
|
||||||
|
FOREIGN KEY (supersedes_learning_id)
|
||||||
|
REFERENCES public.learnings (id)
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
ON UPDATE NO ACTION;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
IF NOT EXISTS (
|
IF NOT EXISTS (
|
||||||
SELECT 1 FROM information_schema.table_constraints
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
@@ -3992,5 +4406,6 @@ $$;
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,3 +30,46 @@ Table tool_annotations {
|
|||||||
|
|
||||||
// Cross-file refs (for relspecgo merge)
|
// Cross-file refs (for relspecgo merge)
|
||||||
Ref: chat_histories.project_id > projects.guid [delete: set null]
|
Ref: chat_histories.project_id > projects.guid [delete: set null]
|
||||||
|
|
||||||
|
Table learnings {
|
||||||
|
id uuid [pk, default: `gen_random_uuid()`]
|
||||||
|
summary text [not null]
|
||||||
|
details text [not null, default: '']
|
||||||
|
category text [not null, default: 'insight']
|
||||||
|
area text [not null, default: 'other']
|
||||||
|
status text [not null, default: 'pending']
|
||||||
|
priority text [not null, default: 'medium']
|
||||||
|
confidence text [not null, default: 'hypothesis']
|
||||||
|
action_required boolean [not null, default: false]
|
||||||
|
source_type text
|
||||||
|
source_ref text
|
||||||
|
project_id uuid [ref: > projects.guid]
|
||||||
|
related_thought_id uuid [ref: > thoughts.guid]
|
||||||
|
related_skill_id uuid [ref: > agent_skills.id]
|
||||||
|
reviewed_by text
|
||||||
|
reviewed_at timestamptz
|
||||||
|
duplicate_of_learning_id uuid [ref: > learnings.id]
|
||||||
|
supersedes_learning_id uuid [ref: > learnings.id]
|
||||||
|
tags "text[]" [not null, default: `'{}'`]
|
||||||
|
created_at timestamptz [not null, default: `now()`]
|
||||||
|
updated_at timestamptz [not null, default: `now()`]
|
||||||
|
|
||||||
|
indexes {
|
||||||
|
project_id
|
||||||
|
category
|
||||||
|
area
|
||||||
|
status
|
||||||
|
priority
|
||||||
|
reviewed_at
|
||||||
|
tags [type: gin]
|
||||||
|
summary [type: gin]
|
||||||
|
details [type: gin]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cross-file refs (for relspecgo merge)
|
||||||
|
Ref: learnings.project_id > projects.guid [delete: set null]
|
||||||
|
Ref: learnings.related_thought_id > thoughts.guid [delete: set null]
|
||||||
|
Ref: learnings.related_skill_id > agent_skills.id [delete: set null]
|
||||||
|
Ref: learnings.duplicate_of_learning_id > learnings.id [delete: set null]
|
||||||
|
Ref: learnings.supersedes_learning_id > learnings.id [delete: set null]
|
||||||
|
|||||||
@@ -19,5 +19,14 @@
|
|||||||
"tailwindcss": "^4.1.4",
|
"tailwindcss": "^4.1.4",
|
||||||
"typescript": "^5.8.3",
|
"typescript": "^5.8.3",
|
||||||
"vite": "^6.3.2"
|
"vite": "^6.3.2"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@sentry/svelte": "^10.49.0",
|
||||||
|
"@skeletonlabs/skeleton": "^4.15.2",
|
||||||
|
"@skeletonlabs/skeleton-svelte": "^4.15.2",
|
||||||
|
"@tanstack/svelte-virtual": "^3.13.24",
|
||||||
|
"@warkypublic/artemis-kit": "file:../../artemis-kit",
|
||||||
|
"@warkypublic/resolvespec-js": "^1.0.1",
|
||||||
|
"@warkypublic/svelix": "^0.1.31"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
3150
ui/pnpm-lock.yaml
generated
3150
ui/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,19 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { onMount } from "svelte";
|
import { onMount } from 'svelte';
|
||||||
|
import { getApiURL } from '@warkypublic/svelix';
|
||||||
|
import {
|
||||||
|
buildOAuthAuthorizationURL,
|
||||||
|
ensureApiURL,
|
||||||
|
exchangeOAuthCode,
|
||||||
|
GlobalStateStore,
|
||||||
|
setCurrentPath
|
||||||
|
} from './shellState';
|
||||||
|
|
||||||
type AccessEntry = {
|
type AccessEntry = {
|
||||||
key_id: string;
|
key_id: string;
|
||||||
last_accessed_at: string;
|
last_accessed_at: string;
|
||||||
last_path: string;
|
last_path: string;
|
||||||
|
user_agent: string;
|
||||||
request_count: number;
|
request_count: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -21,237 +30,392 @@
|
|||||||
entries: AccessEntry[];
|
entries: AccessEntry[];
|
||||||
};
|
};
|
||||||
|
|
||||||
let data: StatusResponse | null = null;
|
type NavItem = {
|
||||||
let loading = true;
|
id: string;
|
||||||
let error = "";
|
label: string;
|
||||||
|
description: string;
|
||||||
|
disabled?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
const quickLinks = [
|
const navItems: NavItem[] = [
|
||||||
{ href: "/llm", label: "LLM Instructions" },
|
{
|
||||||
{ href: "/healthz", label: "Health Check" },
|
id: 'dashboard',
|
||||||
{ href: "/readyz", label: "Readiness Check" },
|
label: 'Dashboard',
|
||||||
|
description: 'System overview and status snapshots.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'projects',
|
||||||
|
label: 'Projects',
|
||||||
|
description: 'First management module for AMCS projects.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'thoughts',
|
||||||
|
label: 'Thoughts',
|
||||||
|
description: 'Thought management arrives after projects.',
|
||||||
|
disabled: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'files',
|
||||||
|
label: 'Files',
|
||||||
|
description: 'File inventory and attachment views.',
|
||||||
|
disabled: true
|
||||||
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
async function loadStatus() {
|
let authMessage = $state('');
|
||||||
loading = true;
|
let authError = $state('');
|
||||||
error = "";
|
let authBusy = $state(false);
|
||||||
|
let callbackBusy = $state(false);
|
||||||
|
let data = $state<StatusResponse | null>(null);
|
||||||
|
let loading = $state(false);
|
||||||
|
let error = $state('');
|
||||||
|
let currentPage = $state<'dashboard' | 'projects'>('dashboard');
|
||||||
|
|
||||||
|
ensureApiURL(import.meta.env.VITE_API_URL);
|
||||||
|
|
||||||
|
const isLoggedIn = $derived(GlobalStateStore.isLoggedIn());
|
||||||
|
const currentPath = $derived(typeof window !== 'undefined' ? window.location.pathname : '/');
|
||||||
|
const isOAuthCallback = $derived(currentPath === '/oauth/callback');
|
||||||
|
const oauthAuthorizeURL = $derived(`${getApiURL()}/oauth/authorize`);
|
||||||
|
|
||||||
|
async function startOAuthLogin(): Promise<void> {
|
||||||
|
authBusy = true;
|
||||||
|
authError = '';
|
||||||
|
authMessage = '';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch("/api/status");
|
const authorizationURL = await buildOAuthAuthorizationURL();
|
||||||
|
window.location.assign(authorizationURL);
|
||||||
|
} catch (err) {
|
||||||
|
authError = err instanceof Error ? err.message : 'Failed to start OAuth login.';
|
||||||
|
} finally {
|
||||||
|
authBusy = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function finishOAuthLogin(): Promise<void> {
|
||||||
|
callbackBusy = true;
|
||||||
|
authError = '';
|
||||||
|
authMessage = '';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const params = new URLSearchParams(window.location.search);
|
||||||
|
const code = params.get('code');
|
||||||
|
const returnedState = params.get('state');
|
||||||
|
const oauthError = params.get('error');
|
||||||
|
|
||||||
|
if (oauthError) {
|
||||||
|
throw new Error(`OAuth login failed: ${oauthError}`);
|
||||||
|
}
|
||||||
|
if (!code || !returnedState) {
|
||||||
|
throw new Error('OAuth callback is missing code or state.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = await exchangeOAuthCode(code, returnedState);
|
||||||
|
await GlobalStateStore.getState().login(token, {
|
||||||
|
username: 'OAuth operator'
|
||||||
|
});
|
||||||
|
|
||||||
|
authMessage = 'OAuth login complete. Welcome back.';
|
||||||
|
window.history.replaceState({}, '', '/');
|
||||||
|
await loadStatus();
|
||||||
|
} catch (err) {
|
||||||
|
authError = err instanceof Error ? err.message : 'OAuth callback failed.';
|
||||||
|
} finally {
|
||||||
|
callbackBusy = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function logout(): Promise<void> {
|
||||||
|
await GlobalStateStore.getState().logout();
|
||||||
|
authMessage = 'Logged out.';
|
||||||
|
authError = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadStatus(): Promise<void> {
|
||||||
|
loading = true;
|
||||||
|
error = '';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/status');
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Status request failed with ${response.status}`);
|
throw new Error(`Status request failed with ${response.status}`);
|
||||||
}
|
}
|
||||||
data = (await response.json()) as StatusResponse;
|
data = (await response.json()) as StatusResponse;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
error = err instanceof Error ? err.message : "Failed to load status";
|
error = err instanceof Error ? err.message : 'Failed to load status';
|
||||||
} finally {
|
} finally {
|
||||||
loading = false;
|
loading = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatDate(value: string) {
|
function formatDate(value: string): string {
|
||||||
return new Date(value).toLocaleString();
|
return new Date(value).toLocaleString();
|
||||||
}
|
}
|
||||||
|
|
||||||
onMount(loadStatus);
|
onMount(async () => {
|
||||||
|
if (typeof window !== 'undefined') {
|
||||||
|
setCurrentPath(window.location.pathname);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isOAuthCallback) {
|
||||||
|
await finishOAuthLogin();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isLoggedIn) {
|
||||||
|
await loadStatus();
|
||||||
|
}
|
||||||
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<svelte:head>
|
<svelte:head>
|
||||||
<title>AMCS</title>
|
<title>AMCS Admin</title>
|
||||||
</svelte:head>
|
</svelte:head>
|
||||||
|
|
||||||
<div class="min-h-screen bg-slate-950 text-slate-100">
|
<div class="min-h-screen bg-slate-950 text-slate-100">
|
||||||
<main
|
{#if !isLoggedIn}
|
||||||
class="mx-auto flex min-h-screen max-w-7xl flex-col px-4 py-6 sm:px-6 lg:px-8"
|
<main class="mx-auto flex min-h-screen max-w-6xl items-center px-4 py-10 sm:px-6 lg:px-8">
|
||||||
>
|
<section class="grid w-full gap-8 lg:grid-cols-[1.15fr_0.85fr]">
|
||||||
<section
|
<div class="rounded-3xl border border-cyan-400/20 bg-slate-900/80 p-8 shadow-2xl shadow-slate-950/40">
|
||||||
class="overflow-hidden rounded-3xl border border-white/10 bg-slate-900 shadow-2xl shadow-slate-950/40"
|
<div class="inline-flex items-center gap-2 rounded-full border border-cyan-400/20 bg-cyan-400/10 px-3 py-1 text-sm font-medium text-cyan-200">
|
||||||
>
|
<span class="h-2 w-2 rounded-full bg-emerald-400"></span>
|
||||||
<img
|
AMCS Control Interface
|
||||||
src="/images/project.jpg"
|
|
||||||
alt="Avelon Memory Crystal"
|
|
||||||
class="h-64 w-full object-cover object-center sm:h-80"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<div class="grid gap-8 p-6 sm:p-8 lg:grid-cols-[1.6fr_1fr] lg:p-10">
|
|
||||||
<div class="space-y-6">
|
|
||||||
<div class="space-y-4">
|
|
||||||
<div
|
|
||||||
class="inline-flex items-center gap-2 rounded-full border border-cyan-400/20 bg-cyan-400/10 px-3 py-1 text-sm font-medium text-cyan-200"
|
|
||||||
>
|
|
||||||
<span class="h-2 w-2 rounded-full bg-emerald-400"></span>
|
|
||||||
Avalon Memory Crystal Server
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h1
|
|
||||||
class="text-3xl font-semibold tracking-tight text-white sm:text-4xl"
|
|
||||||
>
|
|
||||||
Avelon Memory Crystal Server (AMCS)
|
|
||||||
</h1>
|
|
||||||
<p
|
|
||||||
class="mt-3 max-w-3xl text-base leading-7 text-slate-300 sm:text-lg"
|
|
||||||
>
|
|
||||||
{data?.description ??
|
|
||||||
"AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools."}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
<h1 class="mt-6 text-4xl font-semibold tracking-tight text-white">
|
||||||
<div class="flex flex-wrap gap-3">
|
{#if isOAuthCallback}
|
||||||
{#each quickLinks as link}
|
Completing login
|
||||||
<a
|
{:else}
|
||||||
class="inline-flex items-center justify-center rounded-xl border border-cyan-300/20 bg-cyan-400/10 px-4 py-2 text-sm font-semibold text-cyan-100 transition hover:border-cyan-300/40 hover:bg-cyan-400/20"
|
Login
|
||||||
href={link.href}>{link.label}</a
|
|
||||||
>
|
|
||||||
{/each}
|
|
||||||
{#if data?.oauth_enabled}
|
|
||||||
<a
|
|
||||||
class="inline-flex items-center justify-center rounded-xl border border-violet-300/20 bg-violet-400/10 px-4 py-2 text-sm font-semibold text-violet-100 transition hover:border-violet-300/40 hover:bg-violet-400/20"
|
|
||||||
href="/oauth-authorization-server">OAuth Authorization Server</a
|
|
||||||
>
|
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</h1>
|
||||||
|
<p class="mt-3 max-w-2xl text-base leading-7 text-slate-300">
|
||||||
<div class="grid gap-4 sm:grid-cols-3">
|
Origin-style operator access for the AMCS admin interface. ResolveSpec OAuth is the front door now,
|
||||||
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
not the old login shortcut.
|
||||||
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">
|
|
||||||
Connected users
|
|
||||||
</p>
|
|
||||||
<p class="mt-2 text-3xl font-semibold text-white">
|
|
||||||
{data?.connected_count ?? "—"}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
|
||||||
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">
|
|
||||||
Known principals
|
|
||||||
</p>
|
|
||||||
<p class="mt-2 text-3xl font-semibold text-white">
|
|
||||||
{data?.total_known ?? "—"}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
|
||||||
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">
|
|
||||||
Version
|
|
||||||
</p>
|
|
||||||
<p class="mt-2 break-all text-2xl font-semibold text-white">
|
|
||||||
{data?.version ?? "—"}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<aside
|
|
||||||
class="space-y-4 rounded-2xl border border-white/10 bg-slate-950/50 p-5"
|
|
||||||
>
|
|
||||||
<div>
|
|
||||||
<h2 class="text-lg font-semibold text-white">Build details</h2>
|
|
||||||
<p class="mt-1 text-sm text-slate-400">The same status info.</p>
|
|
||||||
</div>
|
|
||||||
<dl class="space-y-3 text-sm text-slate-300">
|
|
||||||
<div>
|
|
||||||
<dt class="text-slate-500">Build date</dt>
|
|
||||||
<dd class="mt-1 font-medium text-white">
|
|
||||||
{data?.build_date ?? "unknown"}
|
|
||||||
</dd>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<dt class="text-slate-500">Commit</dt>
|
|
||||||
<dd
|
|
||||||
class="mt-1 break-all rounded-lg bg-white/5 px-3 py-2 font-mono text-xs text-cyan-100"
|
|
||||||
>
|
|
||||||
{data?.commit ?? "unknown"}
|
|
||||||
</dd>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<dt class="text-slate-500">Connected window</dt>
|
|
||||||
<dd class="mt-1 font-medium text-white">
|
|
||||||
{data?.connected_window ?? "last 10 minutes"}
|
|
||||||
</dd>
|
|
||||||
</div>
|
|
||||||
</dl>
|
|
||||||
</aside>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section
|
|
||||||
class="mt-6 rounded-3xl border border-white/10 bg-slate-900/80 p-6 shadow-xl shadow-slate-950/20 sm:p-8"
|
|
||||||
>
|
|
||||||
<div
|
|
||||||
class="flex flex-col gap-3 sm:flex-row sm:items-end sm:justify-between"
|
|
||||||
>
|
|
||||||
<div>
|
|
||||||
<h2 class="text-2xl font-semibold text-white">Recent access</h2>
|
|
||||||
<p class="mt-1 text-sm text-slate-400">
|
|
||||||
Authenticated principals AMCS has seen recently.
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
|
||||||
<button
|
|
||||||
class="inline-flex items-center justify-center rounded-xl border border-white/10 bg-white/5 px-4 py-2 text-sm font-medium text-slate-200 transition hover:bg-white/10"
|
|
||||||
on:click={loadStatus}
|
|
||||||
>
|
|
||||||
Refresh
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{#if loading}
|
<div class="mt-8 grid gap-4 sm:grid-cols-2">
|
||||||
<div
|
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||||
class="mt-6 rounded-2xl border border-dashed border-white/10 bg-slate-950/40 px-4 py-10 text-center text-slate-400"
|
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">Primary module</p>
|
||||||
>
|
<p class="mt-2 text-2xl font-semibold text-white">Projects</p>
|
||||||
Loading status…
|
<p class="mt-2 text-sm text-slate-400">Projects are the first real admin screen in this rollout.</p>
|
||||||
</div>
|
</div>
|
||||||
{:else if error}
|
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||||
<div
|
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">OAuth path</p>
|
||||||
class="mt-6 rounded-2xl border border-rose-400/30 bg-rose-400/10 px-4 py-6 text-sm text-rose-100"
|
<p class="mt-2 text-2xl font-semibold text-white">ResolveSpec</p>
|
||||||
>
|
<p class="mt-2 text-sm text-slate-400">Client registration, authorize, callback, token exchange.</p>
|
||||||
<p class="font-semibold">Couldn’t load the status snapshot.</p>
|
</div>
|
||||||
<p class="mt-1 text-rose-100/80">{error}</p>
|
|
||||||
</div>
|
|
||||||
{:else if data && data.entries.length === 0}
|
|
||||||
<div
|
|
||||||
class="mt-6 rounded-2xl border border-dashed border-white/10 bg-slate-950/40 px-4 py-10 text-center text-slate-400"
|
|
||||||
>
|
|
||||||
No authenticated access recorded yet.
|
|
||||||
</div>
|
|
||||||
{:else if data}
|
|
||||||
<div class="mt-6 overflow-hidden rounded-2xl border border-white/10">
|
|
||||||
<div class="overflow-x-auto">
|
|
||||||
<table
|
|
||||||
class="min-w-full divide-y divide-white/10 text-left text-sm text-slate-300"
|
|
||||||
>
|
|
||||||
<thead
|
|
||||||
class="bg-white/5 text-xs uppercase tracking-[0.2em] text-slate-500"
|
|
||||||
>
|
|
||||||
<tr>
|
|
||||||
<th class="px-4 py-3 font-medium">Principal</th>
|
|
||||||
<th class="px-4 py-3 font-medium">Last accessed</th>
|
|
||||||
<th class="px-4 py-3 font-medium">Last path</th>
|
|
||||||
<th class="px-4 py-3 font-medium">Requests</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody class="divide-y divide-white/5 bg-slate-950/30">
|
|
||||||
{#each data.entries as entry}
|
|
||||||
<tr class="hover:bg-white/[0.03]">
|
|
||||||
<td class="px-4 py-3 align-top"
|
|
||||||
><code
|
|
||||||
class="rounded bg-white/5 px-2 py-1 font-mono text-xs text-cyan-100"
|
|
||||||
>{entry.key_id}</code
|
|
||||||
></td
|
|
||||||
>
|
|
||||||
<td class="px-4 py-3 align-top text-slate-200"
|
|
||||||
>{formatDate(entry.last_accessed_at)}</td
|
|
||||||
>
|
|
||||||
<td class="px-4 py-3 align-top"
|
|
||||||
><code class="text-slate-100">{entry.last_path}</code></td
|
|
||||||
>
|
|
||||||
<td class="px-4 py-3 align-top font-semibold text-white"
|
|
||||||
>{entry.request_count}</td
|
|
||||||
>
|
|
||||||
</tr>
|
|
||||||
{/each}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
|
||||||
</section>
|
<div class="rounded-3xl border border-white/10 bg-slate-900 p-6 shadow-xl shadow-slate-950/30 sm:p-8">
|
||||||
</main>
|
{#if isOAuthCallback}
|
||||||
|
<h2 class="text-xl font-semibold text-white">Authorizing operator session</h2>
|
||||||
|
<p class="mt-2 text-sm leading-6 text-slate-400">
|
||||||
|
Finishing the ResolveSpec handshake and exchanging the returned code for an AMCS token.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div class="mt-6 rounded-2xl border border-cyan-400/20 bg-cyan-400/5 px-4 py-6 text-sm text-cyan-100">
|
||||||
|
{#if callbackBusy}
|
||||||
|
Working the callback doohickey…
|
||||||
|
{:else if authError}
|
||||||
|
Callback failed. Fix the route or try the login run again.
|
||||||
|
{:else}
|
||||||
|
Callback processed.
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
|
<h2 class="text-xl font-semibold text-white">Operator login</h2>
|
||||||
|
<p class="mt-1 text-sm text-slate-400">Authenticate through AMCS ResolveSpec OAuth endpoints.</p>
|
||||||
|
|
||||||
|
<div class="mt-6 space-y-4">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
class="inline-flex w-full items-center justify-center rounded-xl border border-cyan-300/20 bg-cyan-400/10 px-4 py-3 text-sm font-semibold text-cyan-100 transition hover:border-cyan-300/40 hover:bg-cyan-400/20 disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
|
onclick={startOAuthLogin}
|
||||||
|
disabled={authBusy}
|
||||||
|
>
|
||||||
|
{#if authBusy}Starting OAuth login…{:else}Login with ResolveSpec OAuth{/if}
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<div class="rounded-2xl border border-white/10 bg-white/5 p-4 text-sm text-slate-300">
|
||||||
|
<p class="font-semibold text-white">Routes in play</p>
|
||||||
|
<ul class="mt-3 space-y-2 text-slate-400">
|
||||||
|
<li>• discovery: <code class="text-cyan-100">/api/.well-known/oauth-authorization-server</code></li>
|
||||||
|
<li>• registration: <code class="text-cyan-100">/api/oauth/register</code></li>
|
||||||
|
<li>• authorize: <code class="text-cyan-100">{oauthAuthorizeURL}</code></li>
|
||||||
|
<li>• callback: <code class="text-cyan-100">/oauth/callback</code></li>
|
||||||
|
<li>• token: <code class="text-cyan-100">/api/oauth/token</code></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if authError}
|
||||||
|
<p class="text-sm text-rose-300">{authError}</p>
|
||||||
|
{/if}
|
||||||
|
{#if authMessage}
|
||||||
|
<p class="text-sm text-emerald-300">{authMessage}</p>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
</main>
|
||||||
|
{:else}
|
||||||
|
<div class="grid min-h-screen lg:grid-cols-[17rem_1fr]">
|
||||||
|
<aside class="border-r border-white/10 bg-slate-900/90 p-6">
|
||||||
|
<div>
|
||||||
|
<p class="text-xs uppercase tracking-[0.3em] text-cyan-300">AMCS</p>
|
||||||
|
<h1 class="mt-2 text-2xl font-semibold text-white">Admin</h1>
|
||||||
|
<p class="mt-2 text-sm text-slate-400">Origin-style shell, starting with Projects.</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<nav class="mt-8 space-y-2">
|
||||||
|
{#each navItems as item}
|
||||||
|
<button
|
||||||
|
class={`w-full rounded-2xl border px-4 py-3 text-left transition ${item.disabled ? 'cursor-not-allowed border-white/5 bg-white/[0.02] text-slate-600' : currentPage === item.id ? 'border-cyan-300/30 bg-cyan-400/10 text-cyan-100' : 'border-white/10 bg-white/5 text-slate-200 hover:bg-white/10'}`}
|
||||||
|
disabled={item.disabled}
|
||||||
|
onclick={() => {
|
||||||
|
if (!item.disabled && (item.id === 'dashboard' || item.id === 'projects')) {
|
||||||
|
currentPage = item.id;
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div class="text-sm font-semibold">{item.label}</div>
|
||||||
|
<div class="mt-1 text-xs text-slate-400">{item.description}</div>
|
||||||
|
</button>
|
||||||
|
{/each}
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<button
|
||||||
|
class="mt-8 inline-flex w-full items-center justify-center rounded-xl border border-white/10 bg-white/5 px-4 py-3 text-sm font-medium text-slate-200 transition hover:bg-white/10"
|
||||||
|
onclick={logout}
|
||||||
|
>
|
||||||
|
Logout
|
||||||
|
</button>
|
||||||
|
</aside>
|
||||||
|
|
||||||
|
<main class="px-4 py-6 sm:px-6 lg:px-8">
|
||||||
|
{#if currentPage === 'dashboard'}
|
||||||
|
<section class="rounded-3xl border border-white/10 bg-slate-900/80 p-6 shadow-xl shadow-slate-950/20 sm:p-8">
|
||||||
|
<div class="flex flex-col gap-3 sm:flex-row sm:items-end sm:justify-between">
|
||||||
|
<div>
|
||||||
|
<h2 class="text-2xl font-semibold text-white">System overview</h2>
|
||||||
|
<p class="mt-1 text-sm text-slate-400">Current AMCS status behind the admin shell.</p>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
class="inline-flex items-center justify-center rounded-xl border border-white/10 bg-white/5 px-4 py-2 text-sm font-medium text-slate-200 transition hover:bg-white/10"
|
||||||
|
onclick={loadStatus}
|
||||||
|
>
|
||||||
|
Refresh
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if loading}
|
||||||
|
<div class="mt-6 rounded-2xl border border-dashed border-white/10 bg-slate-950/40 px-4 py-10 text-center text-slate-400">
|
||||||
|
Loading status…
|
||||||
|
</div>
|
||||||
|
{:else if error}
|
||||||
|
<div class="mt-6 rounded-2xl border border-rose-400/30 bg-rose-400/10 px-4 py-6 text-sm text-rose-100">
|
||||||
|
<p class="font-semibold">Couldn’t load the status snapshot.</p>
|
||||||
|
<p class="mt-1 text-rose-100/80">{error}</p>
|
||||||
|
</div>
|
||||||
|
{:else if data}
|
||||||
|
<div class="mt-6 grid gap-4 sm:grid-cols-3">
|
||||||
|
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||||
|
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">Connected users</p>
|
||||||
|
<p class="mt-2 text-3xl font-semibold text-white">{data.connected_count}</p>
|
||||||
|
</div>
|
||||||
|
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||||
|
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">Known principals</p>
|
||||||
|
<p class="mt-2 text-3xl font-semibold text-white">{data.total_known}</p>
|
||||||
|
</div>
|
||||||
|
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||||
|
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">Version</p>
|
||||||
|
<p class="mt-2 break-all text-2xl font-semibold text-white">{data.version}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</section>
|
||||||
|
{:else}
|
||||||
|
<section class="rounded-3xl border border-white/10 bg-slate-900/80 p-6 shadow-xl shadow-slate-950/20 sm:p-8">
|
||||||
|
<div class="flex flex-col gap-3 sm:flex-row sm:items-end sm:justify-between">
|
||||||
|
<div>
|
||||||
|
<h2 class="text-2xl font-semibold text-white">Projects</h2>
|
||||||
|
<p class="mt-1 text-sm text-slate-400">First module scaffold. Grid/Form wiring comes next.</p>
|
||||||
|
</div>
|
||||||
|
<span class="inline-flex items-center rounded-full border border-amber-300/20 bg-amber-400/10 px-3 py-1 text-xs font-medium text-amber-200">
|
||||||
|
Structure phase
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="mt-6 grid gap-4 lg:grid-cols-[1.35fr_0.65fr]">
|
||||||
|
<div class="rounded-2xl border border-dashed border-cyan-400/20 bg-cyan-400/5 p-6">
|
||||||
|
<h3 class="text-lg font-semibold text-white">Project grid placeholder</h3>
|
||||||
|
<p class="mt-2 text-sm leading-6 text-slate-300">
|
||||||
|
This is the landing zone for the Origin-style projects grid using Svelix and GridlerFull.
|
||||||
|
Next pass: wire ResolveSpec-backed project list, row actions, and editor flow.
|
||||||
|
</p>
|
||||||
|
<ul class="mt-4 space-y-2 text-sm text-slate-400">
|
||||||
|
<li>• Project list and search</li>
|
||||||
|
<li>• Project detail/edit drawer or modal</li>
|
||||||
|
<li>• Create/archive actions</li>
|
||||||
|
<li>• Link-outs to related thoughts and skills</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="rounded-2xl border border-white/10 bg-white/5 p-6">
|
||||||
|
<h3 class="text-lg font-semibold text-white">Build notes</h3>
|
||||||
|
<dl class="mt-4 space-y-3 text-sm text-slate-300">
|
||||||
|
<div>
|
||||||
|
<dt class="text-slate-500">Auth path</dt>
|
||||||
|
<dd class="mt-1">ResolveSpec OAuth packages</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt class="text-slate-500">Page pattern</dt>
|
||||||
|
<dd class="mt-1">Mapped toward Origin login and shell</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt class="text-slate-500">First module</dt>
|
||||||
|
<dd class="mt-1">Projects</dd>
|
||||||
|
</div>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if data && currentPage === 'dashboard' && data.entries.length > 0}
|
||||||
|
<section class="mt-6 rounded-3xl border border-white/10 bg-slate-900/80 p-6 shadow-xl shadow-slate-950/20 sm:p-8">
|
||||||
|
<h3 class="text-xl font-semibold text-white">Recent access</h3>
|
||||||
|
<div class="mt-6 overflow-hidden rounded-2xl border border-white/10">
|
||||||
|
<div class="overflow-x-auto">
|
||||||
|
<table class="min-w-full divide-y divide-white/10 text-left text-sm text-slate-300">
|
||||||
|
<thead class="bg-white/5 text-xs uppercase tracking-[0.2em] text-slate-500">
|
||||||
|
<tr>
|
||||||
|
<th class="px-4 py-3 font-medium">Principal</th>
|
||||||
|
<th class="px-4 py-3 font-medium">Last accessed</th>
|
||||||
|
<th class="px-4 py-3 font-medium">Last path</th>
|
||||||
|
<th class="px-4 py-3 font-medium">Agent</th>
|
||||||
|
<th class="px-4 py-3 font-medium">Requests</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody class="divide-y divide-white/5 bg-slate-950/30">
|
||||||
|
{#each data.entries as entry}
|
||||||
|
<tr class="hover:bg-white/[0.03]">
|
||||||
|
<td class="px-4 py-3 align-top"><code class="rounded bg-white/5 px-2 py-1 font-mono text-xs text-cyan-100">{entry.key_id}</code></td>
|
||||||
|
<td class="px-4 py-3 align-top text-slate-200">{formatDate(entry.last_accessed_at)}</td>
|
||||||
|
<td class="px-4 py-3 align-top"><code class="text-slate-100">{entry.last_path}</code></td>
|
||||||
|
<td class="max-w-[16rem] truncate px-4 py-3 align-top text-xs text-slate-400">{entry.user_agent ?? '—'}</td>
|
||||||
|
<td class="px-4 py-3 align-top font-semibold text-white">{entry.request_count}</td>
|
||||||
|
</tr>
|
||||||
|
{/each}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
{/if}
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
276
ui/src/shellState.ts
Normal file
276
ui/src/shellState.ts
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
import { GlobalStateStore } from '@warkypublic/svelix';
|
||||||
|
|
||||||
|
const normalizeApiURL = (url: string): string => url.replace(/\/+$/, '');
|
||||||
|
|
||||||
|
const resolveApiURL = (envURL?: string): string => {
|
||||||
|
const viteEnvURL =
|
||||||
|
envURL?.trim() ||
|
||||||
|
import.meta.env.VITE_API_URL?.trim() ||
|
||||||
|
import.meta.env.VITE_API_BASE_URL?.trim() ||
|
||||||
|
import.meta.env.VITE_URL?.trim();
|
||||||
|
|
||||||
|
if (viteEnvURL) return normalizeApiURL(viteEnvURL);
|
||||||
|
|
||||||
|
if (typeof window !== 'undefined') {
|
||||||
|
return `${window.location.protocol}//${window.location.host}/api`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const stateURL = GlobalStateStore.getState().session.apiURL?.trim();
|
||||||
|
if (stateURL) return normalizeApiURL(stateURL);
|
||||||
|
|
||||||
|
return '';
|
||||||
|
};
|
||||||
|
|
||||||
|
export { GlobalStateStore };
|
||||||
|
|
||||||
|
export type OAuthClientRegistration = {
|
||||||
|
client_id: string;
|
||||||
|
client_name?: string;
|
||||||
|
redirect_uris?: string[];
|
||||||
|
grant_types?: string[];
|
||||||
|
response_types?: string[];
|
||||||
|
token_endpoint_auth_method?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type OAuthServerMetadata = {
|
||||||
|
issuer: string;
|
||||||
|
authorization_endpoint: string;
|
||||||
|
token_endpoint: string;
|
||||||
|
registration_endpoint: string;
|
||||||
|
scopes_supported?: string[];
|
||||||
|
response_types_supported?: string[];
|
||||||
|
grant_types_supported?: string[];
|
||||||
|
token_endpoint_auth_methods_supported?: string[];
|
||||||
|
code_challenge_methods_supported?: string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type OAuthSession = {
|
||||||
|
clientId: string;
|
||||||
|
redirectURI: string;
|
||||||
|
codeVerifier: string;
|
||||||
|
state: string;
|
||||||
|
createdAt: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
const OAUTH_SESSION_KEY = 'amcs.oauth.session';
|
||||||
|
const OAUTH_CLIENT_KEY = 'amcs.oauth.client';
|
||||||
|
const OAUTH_DEFAULT_SCOPE = 'mcp';
|
||||||
|
|
||||||
|
export function ensureApiURL(envURL?: string): string {
|
||||||
|
const resolved = resolveApiURL(envURL);
|
||||||
|
if (!resolved) return '';
|
||||||
|
|
||||||
|
const state = GlobalStateStore.getState();
|
||||||
|
if (state.session.apiURL !== resolved) {
|
||||||
|
state.setApiURL(resolved);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolved;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getPublicBaseURL(): string {
|
||||||
|
if (typeof window === 'undefined') return '';
|
||||||
|
return `${window.location.protocol}//${window.location.host}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getOAuthRedirectURI(): string {
|
||||||
|
const base = getPublicBaseURL();
|
||||||
|
return base ? `${base}/oauth/callback` : '/oauth/callback';
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStorage(storageKey: string): string | null {
|
||||||
|
if (typeof window === 'undefined') return null;
|
||||||
|
return window.localStorage.getItem(storageKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
function setStorage(storageKey: string, value: string): void {
|
||||||
|
if (typeof window === 'undefined') return;
|
||||||
|
window.localStorage.setItem(storageKey, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeStorage(storageKey: string): void {
|
||||||
|
if (typeof window === 'undefined') return;
|
||||||
|
window.localStorage.removeItem(storageKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function readOAuthClient(): OAuthClientRegistration | null {
|
||||||
|
const raw = getStorage(OAUTH_CLIENT_KEY);
|
||||||
|
if (!raw) return null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
return JSON.parse(raw) as OAuthClientRegistration;
|
||||||
|
} catch {
|
||||||
|
removeStorage(OAUTH_CLIENT_KEY);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function saveOAuthClient(client: OAuthClientRegistration): void {
|
||||||
|
setStorage(OAUTH_CLIENT_KEY, JSON.stringify(client));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function readOAuthSession(): OAuthSession | null {
|
||||||
|
const raw = getStorage(OAUTH_SESSION_KEY);
|
||||||
|
if (!raw) return null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
return JSON.parse(raw) as OAuthSession;
|
||||||
|
} catch {
|
||||||
|
removeStorage(OAUTH_SESSION_KEY);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function saveOAuthSession(session: OAuthSession): void {
|
||||||
|
setStorage(OAUTH_SESSION_KEY, JSON.stringify(session));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function clearOAuthSession(): void {
|
||||||
|
removeStorage(OAUTH_SESSION_KEY);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function setCurrentPath(pathname: string): void {
|
||||||
|
const state = GlobalStateStore.getState();
|
||||||
|
const current = state.navigation.currentPage ?? {};
|
||||||
|
|
||||||
|
state.setCurrentPage({
|
||||||
|
...current,
|
||||||
|
path: pathname
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function createRandomString(length = 48): string {
|
||||||
|
const alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~';
|
||||||
|
|
||||||
|
if (typeof crypto !== 'undefined' && typeof crypto.getRandomValues === 'function') {
|
||||||
|
const bytes = new Uint8Array(length);
|
||||||
|
crypto.getRandomValues(bytes);
|
||||||
|
return Array.from(bytes, (byte) => alphabet[byte % alphabet.length]).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
return Array.from({ length }, () => alphabet[Math.floor(Math.random() * alphabet.length)]).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
function base64UrlEncode(buffer: ArrayBuffer): string {
|
||||||
|
let binary = '';
|
||||||
|
const bytes = new Uint8Array(buffer);
|
||||||
|
const chunkSize = 0x8000;
|
||||||
|
|
||||||
|
for (let index = 0; index < bytes.length; index += chunkSize) {
|
||||||
|
binary += String.fromCharCode(...bytes.subarray(index, index + chunkSize));
|
||||||
|
}
|
||||||
|
|
||||||
|
return btoa(binary).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/g, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sha256(input: string): Promise<string> {
|
||||||
|
if (typeof crypto === 'undefined' || !crypto.subtle) {
|
||||||
|
throw new Error('Secure browser crypto is required for OAuth login.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = new TextEncoder().encode(input);
|
||||||
|
const digest = await crypto.subtle.digest('SHA-256', data);
|
||||||
|
return base64UrlEncode(digest);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchOAuthMetadata(): Promise<OAuthServerMetadata> {
|
||||||
|
const apiURL = ensureApiURL();
|
||||||
|
const response = await fetch(`${apiURL}/.well-known/oauth-authorization-server`);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to load OAuth metadata (${response.status})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.json()) as OAuthServerMetadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function ensureOAuthClientRegistration(metadata: OAuthServerMetadata): Promise<OAuthClientRegistration> {
|
||||||
|
const redirectURI = getOAuthRedirectURI();
|
||||||
|
const existing = readOAuthClient();
|
||||||
|
if (existing?.client_id && existing.redirect_uris?.includes(redirectURI)) {
|
||||||
|
return existing;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(metadata.registration_endpoint, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
client_name: 'AMCS Admin UI',
|
||||||
|
redirect_uris: [redirectURI],
|
||||||
|
grant_types: ['authorization_code'],
|
||||||
|
response_types: ['code'],
|
||||||
|
token_endpoint_auth_method: 'none'
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to register OAuth client (${response.status})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = (await response.json()) as OAuthClientRegistration;
|
||||||
|
saveOAuthClient(client);
|
||||||
|
return client;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function buildOAuthAuthorizationURL(): Promise<string> {
|
||||||
|
const metadata = await fetchOAuthMetadata();
|
||||||
|
const client = await ensureOAuthClientRegistration(metadata);
|
||||||
|
const codeVerifier = createRandomString(96);
|
||||||
|
const codeChallenge = await sha256(codeVerifier);
|
||||||
|
const state = createRandomString(40);
|
||||||
|
const redirectURI = getOAuthRedirectURI();
|
||||||
|
|
||||||
|
saveOAuthSession({
|
||||||
|
clientId: client.client_id,
|
||||||
|
redirectURI,
|
||||||
|
codeVerifier,
|
||||||
|
state,
|
||||||
|
createdAt: Date.now()
|
||||||
|
});
|
||||||
|
|
||||||
|
const url = new URL(metadata.authorization_endpoint);
|
||||||
|
url.searchParams.set('client_id', client.client_id);
|
||||||
|
url.searchParams.set('redirect_uri', redirectURI);
|
||||||
|
url.searchParams.set('response_type', 'code');
|
||||||
|
url.searchParams.set('scope', OAUTH_DEFAULT_SCOPE);
|
||||||
|
url.searchParams.set('state', state);
|
||||||
|
url.searchParams.set('code_challenge', codeChallenge);
|
||||||
|
url.searchParams.set('code_challenge_method', 'S256');
|
||||||
|
|
||||||
|
return url.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function exchangeOAuthCode(code: string, returnedState: string): Promise<string> {
|
||||||
|
const session = readOAuthSession();
|
||||||
|
if (!session) {
|
||||||
|
throw new Error('OAuth session is missing. Start login again.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (session.state !== returnedState) {
|
||||||
|
throw new Error('OAuth state mismatch. Start login again.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = await fetchOAuthMetadata();
|
||||||
|
const response = await fetch(metadata.token_endpoint, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded'
|
||||||
|
},
|
||||||
|
body: new URLSearchParams({
|
||||||
|
grant_type: 'authorization_code',
|
||||||
|
code,
|
||||||
|
redirect_uri: session.redirectURI,
|
||||||
|
client_id: session.clientId,
|
||||||
|
code_verifier: session.codeVerifier
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const payload = (await response.json()) as { access_token?: string; error?: string };
|
||||||
|
if (!response.ok || !payload.access_token) {
|
||||||
|
throw new Error(payload.error || `Token exchange failed (${response.status})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
clearOAuthSession();
|
||||||
|
return payload.access_token;
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user