Compare commits
23 Commits
feat/dbml-
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| a6165a0f2e | |||
| b6e156011f | |||
| 4d107cb87e | |||
| 1d4dbad33f | |||
| 02bcbdabd8 | |||
| 5f48a197e8 | |||
| 1958eaca01 | |||
| 4aed4105aa | |||
| 8af4956951 | |||
| 5457cbbd21 | |||
| d6488cd4d5 | |||
| a1bf5ceb38 | |||
| 28f7dc199e | |||
|
|
73eb852361 | ||
|
|
a42274a770 | ||
| f0d9c4dc09 | |||
|
|
4bf1c1fe60 | ||
|
|
6c6f4022a0 | ||
| 1328b3cc94 | |||
| 87a62c0d6c | |||
| 3e09dc0ac6 | |||
| b59e02aebe | |||
| 4713110e32 |
41
.gitea/workflows/ci.yml
Normal file
41
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,41 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['**']
|
||||
tags-ignore: ['v*']
|
||||
pull_request:
|
||||
branches: ['**']
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.26'
|
||||
|
||||
- name: Cache Go modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/go-build
|
||||
~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-go-
|
||||
|
||||
- name: Download dependencies
|
||||
run: go mod download
|
||||
|
||||
- name: Run tests
|
||||
run: go test ./...
|
||||
|
||||
- name: Build amcs-server
|
||||
run: go build -o /dev/null ./cmd/amcs-server
|
||||
|
||||
- name: Build amcs-cli
|
||||
run: go build -o /dev/null ./cmd/amcs-cli
|
||||
122
.gitea/workflows/release.yml
Normal file
122
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,122 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Tag to release (e.g. v1.2.3)'
|
||||
required: true
|
||||
|
||||
env:
|
||||
GITEA_SERVER: https://git.warky.dev
|
||||
GITEA_REPO: wdevs/amcs
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.26'
|
||||
|
||||
- name: Cache Go modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/go-build
|
||||
~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-go-
|
||||
|
||||
- name: Download dependencies
|
||||
run: go mod download
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
|
||||
- name: Install pnpm
|
||||
run: npm install -g pnpm
|
||||
|
||||
- name: Build UI
|
||||
run: |
|
||||
cd ui
|
||||
pnpm install --frozen-lockfile
|
||||
pnpm run build
|
||||
|
||||
- name: Set build vars
|
||||
id: vars
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag || github.ref_name }}"
|
||||
echo "VERSION=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "COMMIT=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||
echo "BUILD_DATE=$(date -u +%Y-%m-%dT%H:%M:%SZ)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
VERSION="${{ steps.vars.outputs.VERSION }}"
|
||||
COMMIT="${{ steps.vars.outputs.COMMIT }}"
|
||||
BUILD_DATE="${{ steps.vars.outputs.BUILD_DATE }}"
|
||||
LDFLAGS="-s -w -X git.warky.dev/wdevs/amcs/internal/buildinfo.Version=${VERSION} -X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION} -X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT} -X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}"
|
||||
mkdir -p dist
|
||||
for BINARY in amcs-server amcs-cli; do
|
||||
CMD="./cmd/${BINARY}"
|
||||
for PLATFORM in linux/amd64 linux/arm64 darwin/amd64 darwin/arm64 windows/amd64; do
|
||||
OS="${PLATFORM%/*}"
|
||||
ARCH="${PLATFORM#*/}"
|
||||
EXT=""
|
||||
[ "$OS" = "windows" ] && EXT=".exe"
|
||||
OUTPUT="dist/${BINARY}-${OS}-${ARCH}${EXT}"
|
||||
echo "Building ${OUTPUT}..."
|
||||
GOOS=$OS GOARCH=$ARCH go build -ldflags "${LDFLAGS}" -o "${OUTPUT}" "${CMD}"
|
||||
done
|
||||
done
|
||||
cd dist && sha256sum * > checksums.txt && cd ..
|
||||
|
||||
- name: Create Gitea Release
|
||||
id: create_release
|
||||
run: |
|
||||
export VERSION="${{ steps.vars.outputs.VERSION }}"
|
||||
BODY=$(python3 <<'PY'
|
||||
import json, subprocess, os
|
||||
version = os.environ['VERSION']
|
||||
commit = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], text=True).strip()
|
||||
body = f"## {version}\n\nBuilt from commit {commit}.\n\nSee `checksums.txt` to verify downloads."
|
||||
print(json.dumps({
|
||||
'tag_name': version,
|
||||
'name': version,
|
||||
'body': body,
|
||||
'draft': False,
|
||||
'prerelease': False,
|
||||
}))
|
||||
PY
|
||||
)
|
||||
RESPONSE=$(curl -fsS -X POST "${{ env.GITEA_SERVER }}/api/v1/repos/${{ env.GITEA_REPO }}/releases" \
|
||||
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$BODY")
|
||||
RELEASE_ID=$(printf '%s' "$RESPONSE" | python3 -c 'import sys,json; print(json.load(sys.stdin)["id"])')
|
||||
echo "RELEASE_ID=${RELEASE_ID}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload release assets
|
||||
run: |
|
||||
RELEASE_ID="${{ steps.create_release.outputs.RELEASE_ID }}"
|
||||
for f in dist/*; do
|
||||
name=$(basename "$f")
|
||||
echo "Uploading ${name}..."
|
||||
curl -fsS -X POST \
|
||||
"${{ env.GITEA_SERVER }}/api/v1/repos/${{ env.GITEA_REPO }}/releases/${RELEASE_ID}/assets?name=${name}" \
|
||||
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-H "Content-Type: application/octet-stream" \
|
||||
--data-binary @"${f}"
|
||||
done
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -31,3 +31,6 @@ cmd/amcs-server/__debug_*
|
||||
bin/
|
||||
.cache/
|
||||
OB1/
|
||||
ui/node_modules/
|
||||
ui/.svelte-kit/
|
||||
internal/app/ui/dist/
|
||||
|
||||
12
Dockerfile
12
Dockerfile
@@ -1,3 +1,14 @@
|
||||
FROM node:22-bookworm AS ui-builder
|
||||
|
||||
RUN npm install -g pnpm
|
||||
WORKDIR /src/ui
|
||||
|
||||
COPY ui/package.json ui/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
COPY ui/ ./
|
||||
RUN pnpm run build
|
||||
|
||||
FROM golang:1.26.1-bookworm AS builder
|
||||
|
||||
WORKDIR /src
|
||||
@@ -6,6 +17,7 @@ COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
COPY . .
|
||||
COPY --from=ui-builder /src/internal/app/ui/dist ./internal/app/ui/dist
|
||||
|
||||
RUN set -eu; \
|
||||
VERSION_TAG="$(git describe --tags --exact-match 2>/dev/null || echo dev)"; \
|
||||
|
||||
25
Makefile
25
Makefile
@@ -3,6 +3,7 @@ GO_CACHE_DIR := $(CURDIR)/.cache/go-build
|
||||
SERVER_BIN := $(BIN_DIR)/amcs-server
|
||||
CMD_SERVER := ./cmd/amcs-server
|
||||
BUILDINFO_PKG := git.warky.dev/wdevs/amcs/internal/buildinfo
|
||||
UI_DIR := $(CURDIR)/ui
|
||||
PATCH_INCREMENT ?= 1
|
||||
VERSION_TAG ?= $(shell git describe --tags --exact-match 2>/dev/null || echo dev)
|
||||
COMMIT_SHA ?= $(shell git rev-parse --short HEAD 2>/dev/null || echo unknown)
|
||||
@@ -11,21 +12,34 @@ RELSPEC ?= $(shell command -v relspec 2>/dev/null || echo $(HOME)/go/bin/relspec
|
||||
SCHEMA_FILES := $(sort $(wildcard schema/*.dbml))
|
||||
MERGE_TARGET_TMP := $(CURDIR)/.cache/schema.merge-target.dbml
|
||||
GENERATED_SCHEMA_MIGRATION := migrations/020_generated_schema.sql
|
||||
PNPM ?= pnpm
|
||||
LDFLAGS := -s -w \
|
||||
-X $(BUILDINFO_PKG).Version=$(VERSION_TAG) \
|
||||
-X $(BUILDINFO_PKG).TagName=$(VERSION_TAG) \
|
||||
-X $(BUILDINFO_PKG).Commit=$(COMMIT_SHA) \
|
||||
-X $(BUILDINFO_PKG).BuildDate=$(BUILD_DATE)
|
||||
|
||||
.PHONY: all build clean migrate release-version test generate-migrations check-schema-drift
|
||||
.PHONY: all build clean migrate release-version test generate-migrations check-schema-drift build-cli ui-install ui-build ui-dev ui-check
|
||||
|
||||
all: build
|
||||
|
||||
build:
|
||||
build: ui-build
|
||||
@mkdir -p $(BIN_DIR)
|
||||
go build -ldflags "$(LDFLAGS)" -o $(SERVER_BIN) $(CMD_SERVER)
|
||||
|
||||
test:
|
||||
ui-install:
|
||||
cd $(UI_DIR) && $(PNPM) install --frozen-lockfile
|
||||
|
||||
ui-build: ui-install
|
||||
cd $(UI_DIR) && $(PNPM) run build
|
||||
|
||||
ui-dev: ui-install
|
||||
cd $(UI_DIR) && $(PNPM) run dev
|
||||
|
||||
ui-check: ui-install
|
||||
cd $(UI_DIR) && $(PNPM) run check
|
||||
|
||||
test: ui-check
|
||||
@mkdir -p $(GO_CACHE_DIR)
|
||||
GOCACHE=$(GO_CACHE_DIR) go test ./...
|
||||
|
||||
@@ -47,6 +61,7 @@ release-version:
|
||||
exit 1; \
|
||||
fi; \
|
||||
git tag -a "$$next_tag" -m "Release $$next_tag"; \
|
||||
git push origin "$$next_tag"; \
|
||||
echo "$$next_tag"
|
||||
|
||||
migrate:
|
||||
@@ -78,3 +93,7 @@ check-schema-drift:
|
||||
exit 1; \
|
||||
fi; \
|
||||
rm -f $$tmpfile
|
||||
|
||||
build-cli:
|
||||
@mkdir -p $(BIN_DIR)
|
||||
go build -o $(BIN_DIR)/amcs-cli ./cmd/amcs-cli
|
||||
|
||||
111
README.md
111
README.md
@@ -46,21 +46,29 @@ A Go MCP server for capturing and retrieving thoughts, memory, and project conte
|
||||
| `load_file` | Retrieve a stored file by ID; returns metadata, base64 content, and an embedded MCP binary resource |
|
||||
| `list_files` | Browse stored files by thought, project, or kind |
|
||||
| `backfill_embeddings` | Generate missing embeddings for stored thoughts |
|
||||
| `reparse_thought_metadata` | Re-extract and normalize metadata for stored thoughts |
|
||||
| `retry_failed_metadata` | Retry metadata extraction for thoughts still pending or failed |
|
||||
| `add_skill` | Store a reusable agent skill (behavioural instruction or capability prompt) |
|
||||
| `reparse_thought_metadata` | Re-extract metadata from thought content |
|
||||
| `retry_failed_metadata` | Retry pending/failed metadata extraction |
|
||||
| `add_maintenance_task` | Create a recurring or one-time home maintenance task |
|
||||
| `log_maintenance` | Log completed maintenance; updates next due date |
|
||||
| `get_upcoming_maintenance` | List maintenance tasks due within the next N days |
|
||||
| `search_maintenance_history` | Search the maintenance log by task name, category, or date range |
|
||||
| `save_chat_history` | Save chat messages with optional title, summary, channel, agent, and project |
|
||||
| `get_chat_history` | Fetch chat history by UUID or session_id |
|
||||
| `list_chat_histories` | List chat histories; filter by project, channel, agent_id, session_id, or days |
|
||||
| `delete_chat_history` | Delete a chat history by id |
|
||||
| `add_skill` | Store an agent skill (instruction or capability prompt) |
|
||||
| `remove_skill` | Delete an agent skill by id |
|
||||
| `list_skills` | List all agent skills, optionally filtered by tag |
|
||||
| `add_guardrail` | Store a reusable agent guardrail (constraint or safety rule) |
|
||||
| `add_guardrail` | Store an agent guardrail (constraint or safety rule) |
|
||||
| `remove_guardrail` | Delete an agent guardrail by id |
|
||||
| `list_guardrails` | List all agent guardrails, optionally filtered by tag or severity |
|
||||
| `add_project_skill` | Link an agent skill to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
||||
| `remove_project_skill` | Unlink an agent skill from a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
||||
| `list_project_skills` | List all skills linked to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
||||
| `add_project_guardrail` | Link an agent guardrail to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
||||
| `remove_project_guardrail` | Unlink an agent guardrail from a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
||||
| `list_project_guardrails` | List all guardrails linked to a project; pass `project` explicitly if your client does not preserve MCP sessions |
|
||||
| `get_version_info` | Return the server build version information, including version, tag name, commit, and build date |
|
||||
| `add_project_skill` | Link a skill to a project; pass `project` if client is stateless |
|
||||
| `remove_project_skill` | Unlink a skill from a project; pass `project` if client is stateless |
|
||||
| `list_project_skills` | Skills for a project; pass `project` if client is stateless |
|
||||
| `add_project_guardrail` | Link a guardrail to a project; pass `project` if client is stateless |
|
||||
| `remove_project_guardrail` | Unlink a guardrail from a project; pass `project` if client is stateless |
|
||||
| `list_project_guardrails` | Guardrails for a project; pass `project` if client is stateless |
|
||||
| `get_version_info` | Build version, commit, and date |
|
||||
| `describe_tools` | List all available MCP tools with names, descriptions, categories, and model-authored usage notes; call this at the start of a session to orient yourself |
|
||||
| `annotate_tool` | Persist your own usage notes for a specific tool; notes are returned by `describe_tools` in future sessions |
|
||||
|
||||
@@ -74,7 +82,7 @@ AMCS includes a built-in tool directory that models can read and annotate.
|
||||
{ "category": "thoughts" }
|
||||
```
|
||||
|
||||
Available categories: `system`, `thoughts`, `projects`, `files`, `admin`, `household`, `maintenance`, `calendar`, `meals`, `crm`, `skills`, `chat`, `meta`.
|
||||
Available categories: `system`, `thoughts`, `projects`, `files`, `admin`, `maintenance`, `skills`, `chat`, `meta`.
|
||||
|
||||
**`annotate_tool`** lets a model write persistent usage notes against a tool name. Notes survive across sessions and are returned by `describe_tools`:
|
||||
|
||||
@@ -525,13 +533,90 @@ Recommended Apache settings:
|
||||
- `ProxyTimeout 600` and `ProxyPass ... timeout=600` give Apache enough time to wait for the Go backend.
|
||||
- If another proxy or load balancer sits in front of Apache, align its size and timeout settings too.
|
||||
|
||||
## CLI
|
||||
|
||||
`amcs-cli` is a pre-built CLI client for the AMCS MCP server. Download it from https://git.warky.dev/wdevs/amcs/releases
|
||||
|
||||
The primary purpose is to give agents and MCP clients a ready-made bridge to the AMCS server so they do not need to implement their own HTTP MCP client. Configure it once and any stdio-based MCP client can use AMCS immediately.
|
||||
|
||||
### Commands
|
||||
|
||||
| Command | Purpose |
|
||||
|---|---|
|
||||
| `amcs-cli tools` | List all tools available on the remote server |
|
||||
| `amcs-cli call <tool>` | Call a tool by name with `--arg key=value` flags |
|
||||
| `amcs-cli stdio` | Start a stdio MCP bridge backed by the remote server |
|
||||
|
||||
`stdio` is the main integration point. It connects to the remote HTTP MCP server, discovers all its tools, and re-exposes them over stdio. Register it as a stdio MCP server in your agent config and it proxies every tool call through to AMCS.
|
||||
|
||||
### Configuration
|
||||
|
||||
Config file: `~/.config/amcs/config.yaml`
|
||||
|
||||
```yaml
|
||||
server: https://your-amcs-server
|
||||
token: your-bearer-token
|
||||
```
|
||||
|
||||
Env vars override the config file: `AMCS_URL`, `AMCS_TOKEN`. Flags `--server` and `--token` override env vars.
|
||||
|
||||
### stdio MCP client setup
|
||||
|
||||
#### Claude Code
|
||||
|
||||
```bash
|
||||
claude mcp add --transport stdio amcs amcs-cli stdio
|
||||
```
|
||||
|
||||
With inline credentials (no config file):
|
||||
|
||||
```bash
|
||||
claude mcp add --transport stdio amcs amcs-cli stdio \
|
||||
--env AMCS_URL=https://your-amcs-server \
|
||||
--env AMCS_TOKEN=your-bearer-token
|
||||
```
|
||||
|
||||
#### Output format
|
||||
|
||||
`call` outputs JSON by default. Pass `--output yaml` for YAML.
|
||||
|
||||
## Development
|
||||
|
||||
Run the SQL migrations against a local database with:
|
||||
|
||||
`DATABASE_URL=postgres://... make migrate`
|
||||
|
||||
LLM integration instructions are served at `/llm`.
|
||||
### Backend + embedded UI build
|
||||
|
||||
The web UI now lives in the top-level `ui/` module and is embedded into the Go binary at build time with `go:embed`.
|
||||
|
||||
**Use `pnpm` for all UI work in this repo.**
|
||||
|
||||
- `make build` — runs the real UI build first, then compiles the Go server
|
||||
- `make test` — runs `svelte-check` for the frontend and `go test ./...` for the backend
|
||||
- `make ui-install` — installs frontend dependencies with `pnpm install --frozen-lockfile`
|
||||
- `make ui-build` — builds only the frontend bundle
|
||||
- `make ui-dev` — starts the Vite dev server with hot reload on `http://localhost:5173`
|
||||
- `make ui-check` — runs the frontend type and Svelte checks
|
||||
|
||||
### Local UI workflow
|
||||
|
||||
For the normal production-style local flow:
|
||||
|
||||
1. Start the backend: `./scripts/run-local.sh configs/dev.yaml`
|
||||
2. Open `http://localhost:8080`
|
||||
|
||||
For frontend iteration with hot reload and no Go rebuilds:
|
||||
|
||||
1. Start the backend once: `go run ./cmd/amcs-server --config configs/dev.yaml`
|
||||
2. In another shell start the UI dev server: `make ui-dev`
|
||||
3. Open `http://localhost:5173`
|
||||
|
||||
The Vite dev server proxies backend routes such as `/api/status`, `/llm`, `/healthz`, `/readyz`, `/files`, `/mcp`, and the OAuth endpoints back to the Go server on `http://127.0.0.1:8080` by default. Override that target with `AMCS_UI_BACKEND` if needed.
|
||||
|
||||
The root page (`/`) is now the Svelte frontend. It preserves the existing landing-page content and status information by fetching data from `GET /api/status`.
|
||||
|
||||
LLM integration instructions are still served at `/llm`.
|
||||
|
||||
## Containers
|
||||
|
||||
|
||||
98
cmd/amcs-cli/cmd/call.go
Normal file
98
cmd/amcs-cli/cmd/call.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var argFlags []string
|
||||
|
||||
var callCmd = &cobra.Command{
|
||||
Use: "call <tool>",
|
||||
Short: "Call a remote AMCS tool",
|
||||
Args: cobra.ExactArgs(1),
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
toolName := args[0]
|
||||
toolArgs, err := parseArgs(argFlags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
session, err := connectRemote(cmd.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = session.Close() }()
|
||||
|
||||
res, err := session.CallTool(cmd.Context(), &mcp.CallToolParams{Name: toolName, Arguments: toolArgs})
|
||||
if err != nil {
|
||||
return fmt.Errorf("call tool %q: %w", toolName, err)
|
||||
}
|
||||
return printOutput(res)
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
callCmd.Flags().StringArrayVar(&argFlags, "arg", nil, "Tool argument in key=value format (repeatable)")
|
||||
rootCmd.AddCommand(callCmd)
|
||||
}
|
||||
|
||||
func parseArgs(items []string) (map[string]any, error) {
|
||||
result := make(map[string]any, len(items))
|
||||
for _, item := range items {
|
||||
key, value, ok := strings.Cut(item, "=")
|
||||
if !ok || strings.TrimSpace(key) == "" {
|
||||
return nil, fmt.Errorf("invalid --arg %q: want key=value", item)
|
||||
}
|
||||
result[key] = parseScalar(value)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func parseScalar(s string) any {
|
||||
if s == "true" || s == "false" {
|
||||
b, _ := strconv.ParseBool(s)
|
||||
return b
|
||||
}
|
||||
if i, err := strconv.ParseInt(s, 10, 64); err == nil {
|
||||
return i
|
||||
}
|
||||
if f, err := strconv.ParseFloat(s, 64); err == nil && strings.ContainsAny(s, ".eE") {
|
||||
return f
|
||||
}
|
||||
var v any
|
||||
if err := json.Unmarshal([]byte(s), &v); err == nil {
|
||||
switch v.(type) {
|
||||
case map[string]any, []any, float64, bool, nil:
|
||||
return v
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func printOutput(v any) error {
|
||||
switch outputFlag {
|
||||
case "yaml":
|
||||
data, err := yaml.Marshal(v)
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal yaml: %w", err)
|
||||
}
|
||||
_, err = os.Stdout.Write(data)
|
||||
return err
|
||||
default:
|
||||
data, err := json.MarshalIndent(v, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal json: %w", err)
|
||||
}
|
||||
data = append(data, '\n')
|
||||
_, err = os.Stdout.Write(data)
|
||||
return err
|
||||
}
|
||||
}
|
||||
60
cmd/amcs-cli/cmd/config.go
Normal file
60
cmd/amcs-cli/cmd/config.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Server string `yaml:"server"`
|
||||
Token string `yaml:"token"`
|
||||
}
|
||||
|
||||
func defaultConfigPath() (string, error) {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("resolve home dir: %w", err)
|
||||
}
|
||||
return filepath.Join(home, ".config", "amcs", "config.yaml"), nil
|
||||
}
|
||||
|
||||
func resolveConfigPath(path string) (string, error) {
|
||||
if strings.TrimSpace(path) != "" {
|
||||
return path, nil
|
||||
}
|
||||
return defaultConfigPath()
|
||||
}
|
||||
|
||||
func loadConfigFile(path string) (Config, error) {
|
||||
var cfg Config
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
return cfg, nil
|
||||
}
|
||||
return cfg, fmt.Errorf("read config: %w", err)
|
||||
}
|
||||
if err := yaml.Unmarshal(data, &cfg); err != nil {
|
||||
return cfg, fmt.Errorf("parse config: %w", err)
|
||||
}
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func saveConfigFile(path string, cfg Config) error {
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o700); err != nil {
|
||||
return fmt.Errorf("create config dir: %w", err)
|
||||
}
|
||||
data, err := yaml.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal config: %w", err)
|
||||
}
|
||||
if err := os.WriteFile(path, data, 0o600); err != nil {
|
||||
return fmt.Errorf("write config: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
134
cmd/amcs-cli/cmd/root.go
Normal file
134
cmd/amcs-cli/cmd/root.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
cfgFile string
|
||||
serverFlag string
|
||||
tokenFlag string
|
||||
outputFlag string
|
||||
cfg Config
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "amcs-cli",
|
||||
Short: "CLI for connecting to a remote AMCS MCP server",
|
||||
SilenceUsage: true,
|
||||
SilenceErrors: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, _ []string) error {
|
||||
return loadConfig()
|
||||
},
|
||||
}
|
||||
|
||||
func Execute() {
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "Path to config file")
|
||||
rootCmd.PersistentFlags().StringVar(&serverFlag, "server", "", "AMCS server URL")
|
||||
rootCmd.PersistentFlags().StringVar(&tokenFlag, "token", "", "AMCS bearer token")
|
||||
rootCmd.PersistentFlags().StringVar(&outputFlag, "output", "json", "Output format: json or yaml")
|
||||
}
|
||||
|
||||
func loadConfig() error {
|
||||
path, err := resolveConfigPath(cfgFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
loaded, err := loadConfigFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg = loaded
|
||||
if v := strings.TrimSpace(os.Getenv("AMCS_URL")); v != "" {
|
||||
cfg.Server = v
|
||||
}
|
||||
if v := strings.TrimSpace(os.Getenv("AMCS_TOKEN")); v != "" {
|
||||
cfg.Token = v
|
||||
}
|
||||
if v := strings.TrimSpace(serverFlag); v != "" {
|
||||
cfg.Server = v
|
||||
}
|
||||
if v := strings.TrimSpace(tokenFlag); v != "" {
|
||||
cfg.Token = v
|
||||
}
|
||||
outputFlag = strings.ToLower(strings.TrimSpace(outputFlag))
|
||||
if outputFlag != "json" && outputFlag != "yaml" {
|
||||
return fmt.Errorf("invalid --output %q: must be json or yaml", outputFlag)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func requireServer() error {
|
||||
if strings.TrimSpace(cfg.Server) == "" {
|
||||
return fmt.Errorf("server URL is required; set --server, AMCS_URL, or config server")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func endpointURL() string {
|
||||
base := strings.TrimRight(strings.TrimSpace(cfg.Server), "/")
|
||||
if strings.HasSuffix(base, "/mcp") {
|
||||
return base
|
||||
}
|
||||
return base + "/mcp"
|
||||
}
|
||||
|
||||
func newHTTPClient() *http.Client {
|
||||
return &http.Client{
|
||||
Timeout: 0,
|
||||
Transport: &bearerTransport{
|
||||
base: http.DefaultTransport,
|
||||
token: cfg.Token,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type bearerTransport struct {
|
||||
base http.RoundTripper
|
||||
token string
|
||||
}
|
||||
|
||||
func (t *bearerTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
base := t.base
|
||||
if base == nil {
|
||||
base = http.DefaultTransport
|
||||
}
|
||||
clone := req.Clone(req.Context())
|
||||
if strings.TrimSpace(t.token) != "" {
|
||||
clone.Header.Set("Authorization", "Bearer "+t.token)
|
||||
}
|
||||
return base.RoundTrip(clone)
|
||||
}
|
||||
|
||||
func connectRemote(ctx context.Context) (*mcp.ClientSession, error) {
|
||||
if err := requireServer(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil)
|
||||
transport := &mcp.StreamableClientTransport{
|
||||
Endpoint: endpointURL(),
|
||||
HTTPClient: newHTTPClient(),
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
session, err := client.Connect(ctx, transport, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("connect to AMCS server: %w", err)
|
||||
}
|
||||
return session, nil
|
||||
}
|
||||
86
cmd/amcs-cli/cmd/sse.go
Normal file
86
cmd/amcs-cli/cmd/sse.go
Normal file
@@ -0,0 +1,86 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sseCmd = &cobra.Command{
|
||||
Use: "sse",
|
||||
Short: "Run a stdio MCP bridge backed by a remote AMCS server using SSE transport (widely supported by hosted MCP clients)",
|
||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if err := requireServer(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil)
|
||||
transport := &mcp.SSEClientTransport{
|
||||
Endpoint: sseEndpointURL(),
|
||||
HTTPClient: newHTTPClient(),
|
||||
}
|
||||
|
||||
connectCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
remote, err := client.Connect(connectCtx, transport, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("connect to AMCS SSE endpoint: %w", err)
|
||||
}
|
||||
defer func() { _ = remote.Close() }()
|
||||
|
||||
tools, err := remote.ListTools(ctx, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("load remote tools: %w", err)
|
||||
}
|
||||
|
||||
server := mcp.NewServer(&mcp.Implementation{
|
||||
Name: "amcs-cli",
|
||||
Title: "AMCS CLI Bridge (SSE)",
|
||||
Version: "0.0.1",
|
||||
}, nil)
|
||||
|
||||
for _, tool := range tools.Tools {
|
||||
remoteTool := tool
|
||||
server.AddTool(&mcp.Tool{
|
||||
Name: remoteTool.Name,
|
||||
Description: remoteTool.Description,
|
||||
InputSchema: remoteTool.InputSchema,
|
||||
OutputSchema: remoteTool.OutputSchema,
|
||||
Annotations: remoteTool.Annotations,
|
||||
}, func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) {
|
||||
return remote.CallTool(ctx, &mcp.CallToolParams{
|
||||
Name: req.Params.Name,
|
||||
Arguments: req.Params.Arguments,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
session, err := server.Connect(ctx, &mcp.StdioTransport{}, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("start stdio bridge: %w", err)
|
||||
}
|
||||
defer func() { _ = session.Close() }()
|
||||
|
||||
<-ctx.Done()
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
func sseEndpointURL() string {
|
||||
base := strings.TrimRight(strings.TrimSpace(cfg.Server), "/")
|
||||
if strings.HasSuffix(base, "/sse") {
|
||||
return base
|
||||
}
|
||||
return base + "/sse"
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(sseCmd)
|
||||
}
|
||||
62
cmd/amcs-cli/cmd/stdio.go
Normal file
62
cmd/amcs-cli/cmd/stdio.go
Normal file
@@ -0,0 +1,62 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var stdioCmd = &cobra.Command{
|
||||
Use: "stdio",
|
||||
Short: "Run a stdio MCP bridge backed by a remote AMCS server",
|
||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||
ctx := cmd.Context()
|
||||
remote, err := connectRemote(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = remote.Close() }()
|
||||
|
||||
tools, err := remote.ListTools(ctx, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("load remote tools: %w", err)
|
||||
}
|
||||
|
||||
server := mcp.NewServer(&mcp.Implementation{
|
||||
Name: "amcs-cli",
|
||||
Title: "AMCS CLI Bridge",
|
||||
Version: "0.0.1",
|
||||
}, nil)
|
||||
|
||||
for _, tool := range tools.Tools {
|
||||
remoteTool := tool
|
||||
server.AddTool(&mcp.Tool{
|
||||
Name: remoteTool.Name,
|
||||
Description: remoteTool.Description,
|
||||
InputSchema: remoteTool.InputSchema,
|
||||
OutputSchema: remoteTool.OutputSchema,
|
||||
Annotations: remoteTool.Annotations,
|
||||
}, func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) {
|
||||
return remote.CallTool(ctx, &mcp.CallToolParams{
|
||||
Name: req.Params.Name,
|
||||
Arguments: req.Params.Arguments,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
session, err := server.Connect(ctx, &mcp.StdioTransport{}, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("start stdio bridge: %w", err)
|
||||
}
|
||||
defer func() { _ = session.Close() }()
|
||||
|
||||
<-ctx.Done()
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(stdioCmd)
|
||||
}
|
||||
38
cmd/amcs-cli/cmd/tools.go
Normal file
38
cmd/amcs-cli/cmd/tools.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var toolsCmd = &cobra.Command{
|
||||
Use: "tools",
|
||||
Short: "List tools available on the remote AMCS server",
|
||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||
session, err := connectRemote(cmd.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = session.Close() }()
|
||||
|
||||
res, err := session.ListTools(cmd.Context(), nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("list tools: %w", err)
|
||||
}
|
||||
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||
fmt.Fprintln(w, "NAME\tDESCRIPTION")
|
||||
for _, tool := range res.Tools {
|
||||
fmt.Fprintf(w, "%s\t%s\n", tool.Name, strings.TrimSpace(tool.Description))
|
||||
}
|
||||
return w.Flush()
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(toolsCmd)
|
||||
}
|
||||
7
cmd/amcs-cli/main.go
Normal file
7
cmd/amcs-cli/main.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package main
|
||||
|
||||
import "git.warky.dev/wdevs/amcs/cmd/amcs-cli/cmd"
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
}
|
||||
@@ -9,6 +9,7 @@ server:
|
||||
|
||||
mcp:
|
||||
path: "/mcp"
|
||||
sse_path: "/sse"
|
||||
server_name: "amcs"
|
||||
transport: "streamable_http"
|
||||
session_timeout: "10m"
|
||||
|
||||
@@ -9,6 +9,7 @@ server:
|
||||
|
||||
mcp:
|
||||
path: "/mcp"
|
||||
sse_path: "/sse"
|
||||
server_name: "amcs"
|
||||
transport: "streamable_http"
|
||||
session_timeout: "10m"
|
||||
|
||||
@@ -9,6 +9,7 @@ server:
|
||||
|
||||
mcp:
|
||||
path: "/mcp"
|
||||
sse_path: "/sse"
|
||||
server_name: "amcs"
|
||||
transport: "streamable_http"
|
||||
session_timeout: "10m"
|
||||
|
||||
3
go.mod
3
go.mod
@@ -8,11 +8,13 @@ require (
|
||||
github.com/jackc/pgx/v5 v5.9.1
|
||||
github.com/modelcontextprotocol/go-sdk v1.4.1
|
||||
github.com/pgvector/pgvector-go v0.3.0
|
||||
github.com/spf13/cobra v1.10.2
|
||||
golang.org/x/sync v0.17.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||
@@ -20,6 +22,7 @@ require (
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/segmentio/asm v1.1.3 // indirect
|
||||
github.com/segmentio/encoding v0.5.4 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/x448/float16 v0.8.4 // indirect
|
||||
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
|
||||
golang.org/x/oauth2 v0.34.0 // indirect
|
||||
|
||||
9
go.sum
9
go.sum
@@ -1,5 +1,6 @@
|
||||
entgo.io/ent v0.14.3 h1:wokAV/kIlH9TeklJWGGS7AYJdVckr0DloWjIcO9iIIQ=
|
||||
entgo.io/ent v0.14.3/go.mod h1:aDPE/OziPEu8+OWbzy4UlvWmD2/kbRuWfK2A40hcxJM=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
@@ -16,6 +17,8 @@ github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbc
|
||||
github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||
@@ -44,10 +47,15 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/segmentio/asm v1.1.3 h1:WM03sfUOENvvKexOLp+pCqgb/WDjsi7EK8gIsICtzhc=
|
||||
github.com/segmentio/asm v1.1.3/go.mod h1:Ld3L4ZXGNcSLRg4JBsZ3//1+f/TjYl0Mzen/DQy1EJg=
|
||||
github.com/segmentio/encoding v0.5.4 h1:OW1VRern8Nw6ITAtwSZ7Idrl3MXCFwXHPgqESYfvNt0=
|
||||
github.com/segmentio/encoding v0.5.4/go.mod h1:HS1ZKa3kSN32ZHVZ7ZLPLXWvOVIiZtyJnO1gPH1sKt0=
|
||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
@@ -73,6 +81,7 @@ github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
|
||||
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
||||
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
|
||||
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
||||
|
||||
@@ -162,10 +162,11 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st
|
||||
oauthEnabled := oauthRegistry != nil && tokenStore != nil
|
||||
authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger)
|
||||
filesTool := tools.NewFilesTool(db, activeProjects)
|
||||
metadataRetryer := tools.NewMetadataRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
||||
enrichmentRetryer := tools.NewEnrichmentRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger)
|
||||
backfillTool := tools.NewBackfillTool(db, provider, activeProjects, logger)
|
||||
|
||||
toolSet := mcpserver.ToolSet{
|
||||
Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, metadataRetryer, logger),
|
||||
Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, enrichmentRetryer, backfillTool, logger),
|
||||
Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects),
|
||||
List: tools.NewListTool(db, cfg.Search, activeProjects),
|
||||
Stats: tools.NewStatsTool(db),
|
||||
@@ -180,24 +181,24 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st
|
||||
Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects),
|
||||
Links: tools.NewLinksTool(db, provider, cfg.Search),
|
||||
Files: filesTool,
|
||||
Backfill: tools.NewBackfillTool(db, provider, activeProjects, logger),
|
||||
Backfill: backfillTool,
|
||||
Reparse: tools.NewReparseMetadataTool(db, provider, cfg.Capture, activeProjects, logger),
|
||||
RetryMetadata: tools.NewRetryMetadataTool(metadataRetryer),
|
||||
Household: tools.NewHouseholdTool(db),
|
||||
RetryMetadata: tools.NewRetryEnrichmentTool(enrichmentRetryer),
|
||||
Maintenance: tools.NewMaintenanceTool(db),
|
||||
Calendar: tools.NewCalendarTool(db),
|
||||
Meals: tools.NewMealsTool(db),
|
||||
CRM: tools.NewCRMTool(db),
|
||||
Skills: tools.NewSkillsTool(db, activeProjects),
|
||||
ChatHistory: tools.NewChatHistoryTool(db, activeProjects),
|
||||
Describe: tools.NewDescribeTool(db, mcpserver.BuildToolCatalog()),
|
||||
}
|
||||
|
||||
mcpHandler, err := mcpserver.New(cfg.MCP, logger, toolSet, activeProjects.Clear)
|
||||
mcpHandlers, err := mcpserver.NewHandlers(cfg.MCP, logger, toolSet, activeProjects.Clear)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("build mcp handler: %w", err)
|
||||
}
|
||||
mux.Handle(cfg.MCP.Path, authMiddleware(mcpHandler))
|
||||
mux.Handle(cfg.MCP.Path, authMiddleware(mcpHandlers.StreamableHTTP))
|
||||
if mcpHandlers.SSE != nil {
|
||||
mux.Handle(cfg.MCP.SSEPath, authMiddleware(mcpHandlers.SSE))
|
||||
logger.Info("SSE transport enabled", slog.String("sse_path", cfg.MCP.SSEPath))
|
||||
}
|
||||
mux.Handle("/files", authMiddleware(fileHandler(filesTool)))
|
||||
mux.Handle("/files/{id}", authMiddleware(fileHandler(filesTool)))
|
||||
if oauthEnabled {
|
||||
@@ -212,6 +213,7 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st
|
||||
mux.HandleFunc("/images/project.jpg", serveHomeImage)
|
||||
mux.HandleFunc("/images/icon.png", serveIcon)
|
||||
mux.HandleFunc("/llm", serveLLMInstructions)
|
||||
mux.HandleFunc("/api/status", statusAPIHandler(info, accessTracker, oauthEnabled))
|
||||
|
||||
mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -13,131 +15,33 @@ import (
|
||||
|
||||
const connectedWindow = 10 * time.Minute
|
||||
|
||||
type statusPageData struct {
|
||||
Version string
|
||||
BuildDate string
|
||||
Commit string
|
||||
ConnectedCount int
|
||||
TotalKnown int
|
||||
Entries []auth.AccessSnapshot
|
||||
OAuthEnabled bool
|
||||
type statusAPIResponse struct {
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Version string `json:"version"`
|
||||
BuildDate string `json:"build_date"`
|
||||
Commit string `json:"commit"`
|
||||
ConnectedCount int `json:"connected_count"`
|
||||
TotalKnown int `json:"total_known"`
|
||||
ConnectedWindow string `json:"connected_window"`
|
||||
Entries []auth.AccessSnapshot `json:"entries"`
|
||||
OAuthEnabled bool `json:"oauth_enabled"`
|
||||
}
|
||||
|
||||
func renderHomePage(info buildinfo.Info, tracker *auth.AccessTracker, oauthEnabled bool, now time.Time) string {
|
||||
func statusSnapshot(info buildinfo.Info, tracker *auth.AccessTracker, oauthEnabled bool, now time.Time) statusAPIResponse {
|
||||
entries := tracker.Snapshot()
|
||||
data := statusPageData{
|
||||
Version: fallback(info.Version, "dev"),
|
||||
BuildDate: fallback(info.BuildDate, "unknown"),
|
||||
Commit: fallback(info.Commit, "unknown"),
|
||||
ConnectedCount: tracker.ConnectedCount(now, connectedWindow),
|
||||
TotalKnown: len(entries),
|
||||
Entries: entries,
|
||||
OAuthEnabled: oauthEnabled,
|
||||
return statusAPIResponse{
|
||||
Title: "Avelon Memory Crystal Server (AMCS)",
|
||||
Description: "AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools.",
|
||||
Version: fallback(info.Version, "dev"),
|
||||
BuildDate: fallback(info.BuildDate, "unknown"),
|
||||
Commit: fallback(info.Commit, "unknown"),
|
||||
ConnectedCount: tracker.ConnectedCount(now, connectedWindow),
|
||||
TotalKnown: len(entries),
|
||||
ConnectedWindow: "last 10 minutes",
|
||||
Entries: entries,
|
||||
OAuthEnabled: oauthEnabled,
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString(`<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>AMCS</title>
|
||||
<style>
|
||||
body { margin: 0; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif; background: #f5f7fb; color: #172033; }
|
||||
main { max-width: 980px; margin: 48px auto; background: #fff; border-radius: 12px; box-shadow: 0 10px 28px rgba(23, 32, 51, 0.12); overflow: hidden; }
|
||||
.content { padding: 28px; }
|
||||
h1, h2 { margin: 0 0 12px 0; }
|
||||
p { margin: 0; line-height: 1.5; color: #334155; }
|
||||
.actions { margin-top: 18px; display: flex; flex-wrap: wrap; gap: 10px; }
|
||||
.link { display: inline-block; padding: 10px 14px; border-radius: 8px; background: #172033; color: #fff; text-decoration: none; font-weight: 600; }
|
||||
.link:hover { background: #0f172a; }
|
||||
.stats { display: grid; grid-template-columns: repeat(auto-fit, minmax(160px, 1fr)); gap: 12px; margin-top: 24px; }
|
||||
.card { background: #eef2ff; border-radius: 10px; padding: 16px; }
|
||||
.label { display: block; font-size: 0.85rem; text-transform: uppercase; letter-spacing: 0.04em; color: #475569; }
|
||||
.value { display: block; margin-top: 6px; font-size: 1.4rem; font-weight: 700; color: #0f172a; }
|
||||
.meta { margin-top: 28px; color: #475569; font-size: 0.95rem; }
|
||||
table { width: 100%; border-collapse: collapse; margin-top: 16px; }
|
||||
th, td { text-align: left; padding: 10px 8px; border-bottom: 1px solid #e2e8f0; vertical-align: top; }
|
||||
th { font-size: 0.85rem; text-transform: uppercase; letter-spacing: 0.04em; color: #475569; }
|
||||
.empty { margin-top: 16px; color: #64748b; }
|
||||
code { font-family: ui-monospace, SFMono-Regular, Menlo, monospace; }
|
||||
img { display: block; width: 100%; height: auto; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
<img src="/images/project.jpg" alt="Avelon Memory Crystal project image">
|
||||
<div class="content">
|
||||
<h1>Avelon Memory Crystal Server (AMCS)</h1>
|
||||
<p>AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools.</p>
|
||||
<div class="actions">
|
||||
<a class="link" href="/llm">LLM Instructions</a>
|
||||
<a class="link" href="/healthz">Health Check</a>
|
||||
<a class="link" href="/readyz">Readiness Check</a>`)
|
||||
if data.OAuthEnabled {
|
||||
b.WriteString(`
|
||||
<a class="link" href="/oauth-authorization-server">OAuth Authorization Server</a>`)
|
||||
}
|
||||
b.WriteString(`
|
||||
</div>
|
||||
|
||||
<div class="stats">
|
||||
<div class="card">
|
||||
<span class="label">Connected users</span>
|
||||
<span class="value">` + fmt.Sprintf("%d", data.ConnectedCount) + `</span>
|
||||
</div>
|
||||
<div class="card">
|
||||
<span class="label">Known principals</span>
|
||||
<span class="value">` + fmt.Sprintf("%d", data.TotalKnown) + `</span>
|
||||
</div>
|
||||
<div class="card">
|
||||
<span class="label">Version</span>
|
||||
<span class="value">` + html.EscapeString(data.Version) + `</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="meta">
|
||||
<strong>Build date:</strong> ` + html.EscapeString(data.BuildDate) + ` •
|
||||
<strong>Commit:</strong> <code>` + html.EscapeString(data.Commit) + `</code> •
|
||||
<strong>Connected window:</strong> last 10 minutes
|
||||
</div>
|
||||
|
||||
<h2 style="margin-top: 28px;">Recent access</h2>`)
|
||||
if len(data.Entries) == 0 {
|
||||
b.WriteString(`
|
||||
<p class="empty">No authenticated access recorded yet.</p>`)
|
||||
} else {
|
||||
b.WriteString(`
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Principal</th>
|
||||
<th>Last accessed</th>
|
||||
<th>Last path</th>
|
||||
<th>Requests</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>`)
|
||||
for _, entry := range data.Entries {
|
||||
b.WriteString(`
|
||||
<tr>
|
||||
<td><code>` + html.EscapeString(entry.KeyID) + `</code></td>
|
||||
<td>` + html.EscapeString(entry.LastAccessedAt.UTC().Format(time.RFC3339)) + `</td>
|
||||
<td>` + html.EscapeString(entry.LastPath) + `</td>
|
||||
<td>` + fmt.Sprintf("%d", entry.RequestCount) + `</td>
|
||||
</tr>`)
|
||||
}
|
||||
b.WriteString(`
|
||||
</tbody>
|
||||
</table>`)
|
||||
}
|
||||
b.WriteString(`
|
||||
</div>
|
||||
</main>
|
||||
</body>
|
||||
</html>`)
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func fallback(value, defaultValue string) string {
|
||||
@@ -147,25 +51,90 @@ func fallback(value, defaultValue string) string {
|
||||
return value
|
||||
}
|
||||
|
||||
func homeHandler(info buildinfo.Info, tracker *auth.AccessTracker, oauthEnabled bool) http.HandlerFunc {
|
||||
func statusAPIHandler(info buildinfo.Info, tracker *auth.AccessTracker, oauthEnabled bool) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path != "/" {
|
||||
if r.URL.Path != "/api/status" {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if r.Method != http.MethodGet && r.Method != http.MethodHead {
|
||||
w.Header().Set("Allow", "GET, HEAD")
|
||||
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
if r.Method == http.MethodHead {
|
||||
return
|
||||
}
|
||||
|
||||
_, _ = w.Write([]byte(renderHomePage(info, tracker, oauthEnabled, time.Now())))
|
||||
_ = json.NewEncoder(w).Encode(statusSnapshot(info, tracker, oauthEnabled, time.Now()))
|
||||
}
|
||||
}
|
||||
|
||||
func homeHandler(_ buildinfo.Info, _ *auth.AccessTracker, _ bool) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet && r.Method != http.MethodHead {
|
||||
w.Header().Set("Allow", "GET, HEAD")
|
||||
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
requestPath := strings.TrimPrefix(path.Clean(r.URL.Path), "/")
|
||||
if requestPath == "." {
|
||||
requestPath = ""
|
||||
}
|
||||
|
||||
if requestPath != "" {
|
||||
if serveUIAsset(w, r, requestPath) {
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
serveUIIndex(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
func serveUIAsset(w http.ResponseWriter, r *http.Request, name string) bool {
|
||||
if uiDistFS == nil {
|
||||
return false
|
||||
}
|
||||
if strings.Contains(name, "..") {
|
||||
return false
|
||||
}
|
||||
file, err := uiDistFS.Open(name)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
info, err := file.Stat()
|
||||
if err != nil || info.IsDir() {
|
||||
return false
|
||||
}
|
||||
|
||||
data, err := fs.ReadFile(uiDistFS, name)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
http.ServeContent(w, r, info.Name(), info.ModTime(), bytes.NewReader(data))
|
||||
return true
|
||||
}
|
||||
|
||||
func serveUIIndex(w http.ResponseWriter, r *http.Request) {
|
||||
if indexHTML == nil {
|
||||
http.Error(w, "ui assets not built", http.StatusServiceUnavailable)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
if r.Method == http.MethodHead {
|
||||
return
|
||||
}
|
||||
_, _ = w.Write(indexHTML)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
@@ -14,29 +15,62 @@ import (
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
)
|
||||
|
||||
func TestRenderHomePageHidesOAuthLinkWhenDisabled(t *testing.T) {
|
||||
func TestStatusSnapshotHidesOAuthLinkWhenDisabled(t *testing.T) {
|
||||
tracker := auth.NewAccessTracker()
|
||||
page := renderHomePage(buildinfo.Info{Version: "v1.2.3", BuildDate: "2026-04-04", Commit: "abc123"}, tracker, false, time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC))
|
||||
snapshot := statusSnapshot(buildinfo.Info{Version: "v1.2.3", BuildDate: "2026-04-04", Commit: "abc123"}, tracker, false, time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC))
|
||||
|
||||
if strings.Contains(page, "/oauth-authorization-server") {
|
||||
t.Fatal("page unexpectedly contains OAuth link")
|
||||
if snapshot.OAuthEnabled {
|
||||
t.Fatal("OAuthEnabled = true, want false")
|
||||
}
|
||||
if !strings.Contains(page, "Connected users") {
|
||||
t.Fatal("page missing Connected users stat")
|
||||
if snapshot.ConnectedCount != 0 {
|
||||
t.Fatalf("ConnectedCount = %d, want 0", snapshot.ConnectedCount)
|
||||
}
|
||||
if snapshot.Title == "" {
|
||||
t.Fatal("Title = empty, want non-empty")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRenderHomePageShowsTrackedAccess(t *testing.T) {
|
||||
func TestStatusSnapshotShowsTrackedAccess(t *testing.T) {
|
||||
tracker := auth.NewAccessTracker()
|
||||
now := time.Date(2026, 4, 4, 12, 0, 0, 0, time.UTC)
|
||||
tracker.Record("client-a", "/files", "127.0.0.1:1234", "tester", now)
|
||||
|
||||
page := renderHomePage(buildinfo.Info{Version: "v1.2.3"}, tracker, true, now)
|
||||
snapshot := statusSnapshot(buildinfo.Info{Version: "v1.2.3"}, tracker, true, now)
|
||||
|
||||
for _, needle := range []string{"client-a", "/files", "1</span>", "/oauth-authorization-server"} {
|
||||
if !strings.Contains(page, needle) {
|
||||
t.Fatalf("page missing %q", needle)
|
||||
}
|
||||
if !snapshot.OAuthEnabled {
|
||||
t.Fatal("OAuthEnabled = false, want true")
|
||||
}
|
||||
if snapshot.ConnectedCount != 1 {
|
||||
t.Fatalf("ConnectedCount = %d, want 1", snapshot.ConnectedCount)
|
||||
}
|
||||
if len(snapshot.Entries) != 1 {
|
||||
t.Fatalf("len(Entries) = %d, want 1", len(snapshot.Entries))
|
||||
}
|
||||
if snapshot.Entries[0].KeyID != "client-a" || snapshot.Entries[0].LastPath != "/files" {
|
||||
t.Fatalf("entry = %+v, want keyID client-a and path /files", snapshot.Entries[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestStatusAPIHandlerReturnsJSON(t *testing.T) {
|
||||
handler := statusAPIHandler(buildinfo.Info{Version: "v1"}, auth.NewAccessTracker(), true)
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/status", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("status = %d, want %d", rec.Code, http.StatusOK)
|
||||
}
|
||||
if got := rec.Header().Get("Content-Type"); !strings.Contains(got, "application/json") {
|
||||
t.Fatalf("content-type = %q, want application/json", got)
|
||||
}
|
||||
|
||||
var payload statusAPIResponse
|
||||
if err := json.Unmarshal(rec.Body.Bytes(), &payload); err != nil {
|
||||
t.Fatalf("json.Unmarshal() error = %v", err)
|
||||
}
|
||||
if payload.Version != "v1" {
|
||||
t.Fatalf("version = %q, want %q", payload.Version, "v1")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,6 +89,21 @@ func TestHomeHandlerAllowsHead(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestHomeHandlerServesIndex(t *testing.T) {
|
||||
handler := homeHandler(buildinfo.Info{Version: "v1"}, auth.NewAccessTracker(), false)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("status = %d, want %d", rec.Code, http.StatusOK)
|
||||
}
|
||||
if !strings.Contains(rec.Body.String(), "<div id=\"app\"></div>") {
|
||||
t.Fatalf("body = %q, want embedded UI index", rec.Body.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestMiddlewareRecordsAuthenticatedAccess(t *testing.T) {
|
||||
keyring, err := auth.NewKeyring([]config.APIKey{{ID: "client-a", Value: "secret"}})
|
||||
if err != nil {
|
||||
|
||||
22
internal/app/ui_assets.go
Normal file
22
internal/app/ui_assets.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"io/fs"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed ui/dist
|
||||
uiFiles embed.FS
|
||||
uiDistFS fs.FS
|
||||
indexHTML []byte
|
||||
)
|
||||
|
||||
func init() {
|
||||
dist, err := fs.Sub(uiFiles, "ui/dist")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
uiDistFS = dist
|
||||
indexHTML, _ = fs.ReadFile(uiDistFS, "index.html")
|
||||
}
|
||||
@@ -7,12 +7,12 @@ import (
|
||||
)
|
||||
|
||||
type AccessSnapshot struct {
|
||||
KeyID string
|
||||
LastPath string
|
||||
RemoteAddr string
|
||||
UserAgent string
|
||||
RequestCount int
|
||||
LastAccessedAt time.Time
|
||||
KeyID string `json:"key_id"`
|
||||
LastPath string `json:"last_path"`
|
||||
RemoteAddr string `json:"remote_addr"`
|
||||
UserAgent string `json:"user_agent"`
|
||||
RequestCount int `json:"request_count"`
|
||||
LastAccessedAt time.Time `json:"last_accessed_at"`
|
||||
}
|
||||
|
||||
type AccessTracker struct {
|
||||
|
||||
@@ -32,6 +32,7 @@ type ServerConfig struct {
|
||||
|
||||
type MCPConfig struct {
|
||||
Path string `yaml:"path"`
|
||||
SSEPath string `yaml:"sse_path"`
|
||||
ServerName string `yaml:"server_name"`
|
||||
Version string `yaml:"version"`
|
||||
Transport string `yaml:"transport"`
|
||||
|
||||
@@ -58,6 +58,7 @@ func defaultConfig() Config {
|
||||
},
|
||||
MCP: MCPConfig{
|
||||
Path: "/mcp",
|
||||
SSEPath: "/sse",
|
||||
ServerName: "amcs",
|
||||
Version: info.Version,
|
||||
Transport: "streamable_http",
|
||||
|
||||
@@ -33,6 +33,14 @@ func (c Config) Validate() error {
|
||||
if strings.TrimSpace(c.MCP.Path) == "" {
|
||||
return fmt.Errorf("invalid config: mcp.path is required")
|
||||
}
|
||||
if c.MCP.SSEPath != "" {
|
||||
if strings.TrimSpace(c.MCP.SSEPath) == "" {
|
||||
return fmt.Errorf("invalid config: mcp.sse_path must not be blank whitespace")
|
||||
}
|
||||
if c.MCP.SSEPath == c.MCP.Path {
|
||||
return fmt.Errorf("invalid config: mcp.sse_path %q must differ from mcp.path", c.MCP.SSEPath)
|
||||
}
|
||||
}
|
||||
if c.MCP.SessionTimeout <= 0 {
|
||||
return fmt.Errorf("invalid config: mcp.session_timeout must be greater than zero")
|
||||
}
|
||||
|
||||
@@ -221,12 +221,19 @@ func formatLogDuration(d time.Duration) string {
|
||||
return fmt.Sprintf("%02d:%02d:%03d", minutes, seconds, milliseconds)
|
||||
}
|
||||
|
||||
func normalizeObjectSchema(schema *jsonschema.Schema) {
|
||||
if schema != nil && schema.Type == "object" && schema.Properties == nil {
|
||||
schema.Properties = map[string]*jsonschema.Schema{}
|
||||
}
|
||||
}
|
||||
|
||||
func setToolSchemas[In any, Out any](tool *mcp.Tool) error {
|
||||
if tool.InputSchema == nil {
|
||||
inputSchema, err := jsonschema.For[In](toolSchemaOptions)
|
||||
if err != nil {
|
||||
return fmt.Errorf("infer input schema: %w", err)
|
||||
}
|
||||
normalizeObjectSchema(inputSchema)
|
||||
tool.InputSchema = inputSchema
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,24 @@ import (
|
||||
"git.warky.dev/wdevs/amcs/internal/tools"
|
||||
)
|
||||
|
||||
func TestSetToolSchemasAddsEmptyPropertiesForNoArgInput(t *testing.T) {
|
||||
type noArgInput struct{}
|
||||
type anyOutput struct{}
|
||||
|
||||
tool := &mcp.Tool{Name: "no_args"}
|
||||
if err := setToolSchemas[noArgInput, anyOutput](tool); err != nil {
|
||||
t.Fatalf("set tool schemas: %v", err)
|
||||
}
|
||||
|
||||
schema, ok := tool.InputSchema.(*jsonschema.Schema)
|
||||
if !ok {
|
||||
t.Fatalf("input schema type = %T, want *jsonschema.Schema", tool.InputSchema)
|
||||
}
|
||||
if schema.Properties == nil {
|
||||
t.Fatal("input schema missing properties: strict MCP clients require properties:{} on object schemas")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetToolSchemasUsesStringUUIDsInListOutput(t *testing.T) {
|
||||
tool := &mcp.Tool{Name: "list_thoughts"}
|
||||
|
||||
|
||||
@@ -36,17 +36,34 @@ type ToolSet struct {
|
||||
Backfill *tools.BackfillTool
|
||||
Reparse *tools.ReparseMetadataTool
|
||||
RetryMetadata *tools.RetryMetadataTool
|
||||
Household *tools.HouseholdTool
|
||||
Maintenance *tools.MaintenanceTool
|
||||
Calendar *tools.CalendarTool
|
||||
Meals *tools.MealsTool
|
||||
CRM *tools.CRMTool
|
||||
Skills *tools.SkillsTool
|
||||
ChatHistory *tools.ChatHistoryTool
|
||||
Describe *tools.DescribeTool
|
||||
}
|
||||
|
||||
// Handlers groups the HTTP handlers produced for an MCP server instance.
|
||||
type Handlers struct {
|
||||
// StreamableHTTP is the primary MCP handler (always non-nil).
|
||||
StreamableHTTP http.Handler
|
||||
// SSE is the SSE transport handler; nil when SSEPath is empty.
|
||||
// SSE is the de facto transport for MCP over the internet and is required by most hosted MCP clients.
|
||||
SSE http.Handler
|
||||
}
|
||||
|
||||
// New builds the StreamableHTTP MCP handler. It is a convenience wrapper
|
||||
// around NewHandlers for callers that only need the primary transport.
|
||||
func New(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionClosed func(string)) (http.Handler, error) {
|
||||
h, err := NewHandlers(cfg, logger, toolSet, onSessionClosed)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return h.StreamableHTTP, nil
|
||||
}
|
||||
|
||||
// NewHandlers builds MCP HTTP handlers for both transports.
|
||||
// SSE is nil when cfg.SSEPath is empty.
|
||||
func NewHandlers(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionClosed func(string)) (Handlers, error) {
|
||||
instructions := cfg.Instructions
|
||||
if instructions == "" {
|
||||
instructions = string(amcsllm.MemoryInstructions)
|
||||
@@ -68,16 +85,12 @@ func New(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionCl
|
||||
registerProjectTools,
|
||||
registerFileTools,
|
||||
registerMaintenanceTools,
|
||||
registerHouseholdTools,
|
||||
registerCalendarTools,
|
||||
registerMealTools,
|
||||
registerCRMTools,
|
||||
registerSkillTools,
|
||||
registerChatHistoryTools,
|
||||
registerDescribeTools,
|
||||
} {
|
||||
if err := register(server, logger, toolSet); err != nil {
|
||||
return nil, err
|
||||
return Handlers{}, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,9 +102,19 @@ func New(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onSessionCl
|
||||
opts.EventStore = newCleanupEventStore(mcp.NewMemoryEventStore(nil), onSessionClosed)
|
||||
}
|
||||
|
||||
return mcp.NewStreamableHTTPHandler(func(*http.Request) *mcp.Server {
|
||||
return server
|
||||
}, opts), nil
|
||||
h := Handlers{
|
||||
StreamableHTTP: mcp.NewStreamableHTTPHandler(func(*http.Request) *mcp.Server {
|
||||
return server
|
||||
}, opts),
|
||||
}
|
||||
|
||||
if strings.TrimSpace(cfg.SSEPath) != "" {
|
||||
h.SSE = mcp.NewSSEHandler(func(*http.Request) *mcp.Server {
|
||||
return server
|
||||
}, nil)
|
||||
}
|
||||
|
||||
return h, nil
|
||||
}
|
||||
|
||||
// buildServerIcons returns icon definitions referencing the server's own /images/icon.png endpoint.
|
||||
@@ -109,7 +132,7 @@ func buildServerIcons(publicURL string) []mcp.Icon {
|
||||
func registerSystemTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_version_info",
|
||||
Description: "Return the server build version information, including version, tag name, commit, and build date.",
|
||||
Description: "Build version, commit, and date.",
|
||||
}, toolSet.Version.GetInfo); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -119,13 +142,13 @@ func registerSystemTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSe
|
||||
func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "capture_thought",
|
||||
Description: "Store a thought with generated embeddings and extracted metadata. The thought is saved immediately even if metadata extraction times out; pending thoughts are retried in the background.",
|
||||
Description: "Store a thought; embeddings and metadata extracted async.",
|
||||
}, toolSet.Capture.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "search_thoughts",
|
||||
Description: "Search stored thoughts by semantic similarity. Falls back to Postgres full-text search automatically when no embeddings exist for the active model.",
|
||||
Description: "Semantic search; falls back to full-text if no embeddings.",
|
||||
}, toolSet.Search.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -137,7 +160,7 @@ func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "thought_stats",
|
||||
Description: "Get counts and top metadata buckets across stored thoughts.",
|
||||
Description: "Counts and top metadata buckets for stored thoughts.",
|
||||
}, toolSet.Stats.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -161,19 +184,19 @@ func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "archive_thought",
|
||||
Description: "Archive a thought so it is hidden from default search and listing.",
|
||||
Description: "Hide a thought from default search and listing.",
|
||||
}, toolSet.Archive.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "summarize_thoughts",
|
||||
Description: "Produce an LLM prose summary of a filtered or searched set of thoughts.",
|
||||
Description: "LLM summary of a filtered set of thoughts.",
|
||||
}, toolSet.Summarize.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "recall_context",
|
||||
Description: "Recall semantically relevant and recent context for prompt injection. Combines vector similarity with recency. Falls back to full-text search when no embeddings exist.",
|
||||
Description: "Semantic + recency context for prompt injection; falls back to full-text.",
|
||||
}, toolSet.Recall.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -185,7 +208,7 @@ func registerThoughtTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "related_thoughts",
|
||||
Description: "Retrieve explicit links and semantic neighbours for a thought. Falls back to full-text search when no embeddings exist.",
|
||||
Description: "Explicit links and semantic neighbours; falls back to full-text.",
|
||||
}, toolSet.Links.Related); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -207,19 +230,19 @@ func registerProjectTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "set_active_project",
|
||||
Description: "Set the active project for the current MCP session. Requires a stateful MCP client that reuses the same session across calls. If your client does not preserve sessions, pass project explicitly to each tool instead.",
|
||||
Description: "Set session's active project. Pass project per call if client is stateless.",
|
||||
}, toolSet.Projects.SetActive); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_active_project",
|
||||
Description: "Return the active project for the current MCP session. If your client does not preserve MCP sessions, pass project explicitly to project-scoped tools instead of relying on this.",
|
||||
Description: "Return session's active project. Pass project per call if client is stateless.",
|
||||
}, toolSet.Projects.GetActive); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_project_context",
|
||||
Description: "Get recent and semantic context for a project. Uses the explicit project when provided, otherwise the active MCP session project. Falls back to full-text search when no embeddings exist.",
|
||||
Description: "Recent and semantic context for a project; falls back to full-text.",
|
||||
}, toolSet.Context.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -235,19 +258,19 @@ func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet)
|
||||
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "upload_file",
|
||||
Description: "Stage a file and get an amcs://files/{id} resource URI. Use content_path (absolute server-side path, no size limit) for large or binary files, or content_base64 (≤10 MB) for small files. Pass thought_id/project to link immediately, or omit and pass the URI to save_file later.",
|
||||
Description: "Stage a file; returns amcs://files/{id}. content_path for large/binary, content_base64 for ≤10 MB. Link now or pass URI to save_file.",
|
||||
}, toolSet.Files.Upload); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "save_file",
|
||||
Description: "Store a file and optionally link it to a thought. Use content_base64 (≤10 MB) for small files, or content_uri (amcs://files/{id} from a prior upload_file) for previously staged files. For files larger than 10 MB, use upload_file with content_path first. If the goal is to retain the artifact, store the file directly instead of reading or summarising it first.",
|
||||
Description: "Store and optionally link a file. content_base64 (≤10 MB) or content_uri from upload_file. >10 MB: use upload_file first.",
|
||||
}, toolSet.Files.Save); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "load_file",
|
||||
Description: "Load a stored file by id. Returns metadata, base64 content, and an embedded MCP binary resource at amcs://files/{id}. Prefer the embedded resource when your client supports it. The id field accepts a bare UUID or full amcs://files/{id} URI.",
|
||||
Description: "Fetch file metadata and content by id (UUID or amcs://files/{id}); includes embedded MCP resource.",
|
||||
}, toolSet.Files.Load); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -263,19 +286,19 @@ func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet)
|
||||
func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "backfill_embeddings",
|
||||
Description: "Generate missing embeddings for stored thoughts using the active embedding model. Run this after switching embedding models or importing thoughts that have no vectors.",
|
||||
Description: "Generate missing embeddings. Run after model switch or bulk import.",
|
||||
}, toolSet.Backfill.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "reparse_thought_metadata",
|
||||
Description: "Re-extract and normalize metadata for stored thoughts from their content.",
|
||||
Description: "Re-extract metadata from thought content.",
|
||||
}, toolSet.Reparse.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "retry_failed_metadata",
|
||||
Description: "Retry metadata extraction for thoughts still marked pending or failed.",
|
||||
Description: "Retry pending/failed metadata extraction.",
|
||||
}, toolSet.RetryMetadata.Handle); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -287,7 +310,7 @@ func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "log_maintenance",
|
||||
Description: "Log completed maintenance work; automatically updates the task's next due date.",
|
||||
Description: "Log completed maintenance; updates next due date.",
|
||||
}, toolSet.Maintenance.LogWork); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -306,176 +329,10 @@ func registerMaintenanceTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
||||
return nil
|
||||
}
|
||||
|
||||
func registerHouseholdTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_household_item",
|
||||
Description: "Store a household fact (paint color, appliance details, measurement, document, etc.).",
|
||||
}, toolSet.Household.AddItem); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "search_household_items",
|
||||
Description: "Search household items by name, category, or location.",
|
||||
}, toolSet.Household.SearchItems); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_household_item",
|
||||
Description: "Retrieve a household item by id.",
|
||||
}, toolSet.Household.GetItem); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_vendor",
|
||||
Description: "Add a service provider (plumber, electrician, landscaper, etc.).",
|
||||
}, toolSet.Household.AddVendor); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "list_vendors",
|
||||
Description: "List household service vendors, optionally filtered by service type.",
|
||||
}, toolSet.Household.ListVendors); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func registerCalendarTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_family_member",
|
||||
Description: "Add a family member to the household.",
|
||||
}, toolSet.Calendar.AddMember); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "list_family_members",
|
||||
Description: "List all family members.",
|
||||
}, toolSet.Calendar.ListMembers); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_activity",
|
||||
Description: "Schedule a one-time or recurring family activity.",
|
||||
}, toolSet.Calendar.AddActivity); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_week_schedule",
|
||||
Description: "Get all activities scheduled for a given week.",
|
||||
}, toolSet.Calendar.GetWeekSchedule); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "search_activities",
|
||||
Description: "Search activities by title, type, or family member.",
|
||||
}, toolSet.Calendar.SearchActivities); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_important_date",
|
||||
Description: "Track a birthday, anniversary, deadline, or other important date.",
|
||||
}, toolSet.Calendar.AddImportantDate); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_upcoming_dates",
|
||||
Description: "Get important dates coming up in the next N days.",
|
||||
}, toolSet.Calendar.GetUpcomingDates); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func registerMealTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_recipe",
|
||||
Description: "Save a recipe with ingredients and instructions.",
|
||||
}, toolSet.Meals.AddRecipe); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "search_recipes",
|
||||
Description: "Search recipes by name, cuisine, tags, or ingredient.",
|
||||
}, toolSet.Meals.SearchRecipes); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "update_recipe",
|
||||
Description: "Update an existing recipe.",
|
||||
}, toolSet.Meals.UpdateRecipe); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "create_meal_plan",
|
||||
Description: "Set the meal plan for a week; replaces any existing plan for that week.",
|
||||
}, toolSet.Meals.CreateMealPlan); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_meal_plan",
|
||||
Description: "Get the meal plan for a given week.",
|
||||
}, toolSet.Meals.GetMealPlan); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "generate_shopping_list",
|
||||
Description: "Auto-generate a shopping list from the meal plan for a given week.",
|
||||
}, toolSet.Meals.GenerateShoppingList); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func registerCRMTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_professional_contact",
|
||||
Description: "Add a professional contact to the CRM.",
|
||||
}, toolSet.CRM.AddContact); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "search_contacts",
|
||||
Description: "Search professional contacts by name, company, title, notes, or tags.",
|
||||
}, toolSet.CRM.SearchContacts); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "log_interaction",
|
||||
Description: "Log an interaction with a professional contact.",
|
||||
}, toolSet.CRM.LogInteraction); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_contact_history",
|
||||
Description: "Get full history (interactions and opportunities) for a contact.",
|
||||
}, toolSet.CRM.GetHistory); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "create_opportunity",
|
||||
Description: "Create a deal, project, or opportunity linked to a contact.",
|
||||
}, toolSet.CRM.CreateOpportunity); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_follow_ups_due",
|
||||
Description: "List contacts with a follow-up date due within the next N days.",
|
||||
}, toolSet.CRM.GetFollowUpsDue); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "link_thought_to_contact",
|
||||
Description: "Append a stored thought to a contact's notes.",
|
||||
}, toolSet.CRM.LinkThought); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_skill",
|
||||
Description: "Store a reusable agent skill (behavioural instruction or capability prompt).",
|
||||
Description: "Store an agent skill (instruction or capability prompt).",
|
||||
}, toolSet.Skills.AddSkill); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -493,7 +350,7 @@ func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_guardrail",
|
||||
Description: "Store a reusable agent guardrail (constraint or safety rule).",
|
||||
Description: "Store an agent guardrail (constraint or safety rule).",
|
||||
}, toolSet.Skills.AddGuardrail); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -511,37 +368,37 @@ func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_project_skill",
|
||||
Description: "Link an agent skill to a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
||||
Description: "Link a skill to a project. Pass project if client is stateless.",
|
||||
}, toolSet.Skills.AddProjectSkill); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "remove_project_skill",
|
||||
Description: "Unlink an agent skill from a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
||||
Description: "Unlink a skill from a project. Pass project if client is stateless.",
|
||||
}, toolSet.Skills.RemoveProjectSkill); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "list_project_skills",
|
||||
Description: "List all skills linked to a project. Call this at the start of every project session to load agent behaviour instructions before generating new ones. Only create new skills if none are returned. Pass project explicitly when your client does not preserve MCP sessions.",
|
||||
Description: "Skills for a project. Load at session start; only add new if none returned. Pass project if stateless.",
|
||||
}, toolSet.Skills.ListProjectSkills); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "add_project_guardrail",
|
||||
Description: "Link an agent guardrail to a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
||||
Description: "Link a guardrail to a project. Pass project if client is stateless.",
|
||||
}, toolSet.Skills.AddProjectGuardrail); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "remove_project_guardrail",
|
||||
Description: "Unlink an agent guardrail from a project. Pass project explicitly when your client does not preserve MCP sessions.",
|
||||
Description: "Unlink a guardrail from a project. Pass project if client is stateless.",
|
||||
}, toolSet.Skills.RemoveProjectGuardrail); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "list_project_guardrails",
|
||||
Description: "List all guardrails linked to a project. Call this at the start of every project session to load agent constraints before generating new ones. Only create new guardrails if none are returned. Pass project explicitly when your client does not preserve MCP sessions.",
|
||||
Description: "Guardrails for a project. Load at session start; only add new if none returned. Pass project if stateless.",
|
||||
}, toolSet.Skills.ListProjectGuardrails); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -551,25 +408,25 @@ func registerSkillTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet
|
||||
func registerChatHistoryTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "save_chat_history",
|
||||
Description: "Save a chat session's message history for later retrieval. Stores messages with optional title, summary, channel, agent, and project metadata.",
|
||||
Description: "Save chat messages with optional title, summary, channel, agent, and project.",
|
||||
}, toolSet.ChatHistory.SaveChatHistory); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "get_chat_history",
|
||||
Description: "Retrieve a saved chat history by its UUID or session_id. Returns the full message list.",
|
||||
Description: "Fetch chat history by UUID or session_id.",
|
||||
}, toolSet.ChatHistory.GetChatHistory); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "list_chat_histories",
|
||||
Description: "List saved chat histories with optional filters: project, channel, agent_id, session_id, or recent days.",
|
||||
Description: "List chat histories; filter by project, channel, agent_id, session_id, or days.",
|
||||
}, toolSet.ChatHistory.ListChatHistories); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "delete_chat_history",
|
||||
Description: "Permanently delete a saved chat history by id.",
|
||||
Description: "Delete a chat history by id.",
|
||||
}, toolSet.ChatHistory.DeleteChatHistory); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -579,13 +436,13 @@ func registerChatHistoryTools(server *mcp.Server, logger *slog.Logger, toolSet T
|
||||
func registerDescribeTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error {
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "describe_tools",
|
||||
Description: "Call this first in every session. Returns all available MCP tools with names, descriptions, categories, and your accumulated usage notes. Filter by category to narrow results. Available categories: system, thoughts, projects, files, admin, household, maintenance, calendar, meals, crm, skills, chat, meta.",
|
||||
Description: "Call first each session. All tools with categories and usage notes. Categories: system, thoughts, projects, files, admin, maintenance, skills, chat, meta.",
|
||||
}, toolSet.Describe.Describe); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := addTool(server, logger, &mcp.Tool{
|
||||
Name: "annotate_tool",
|
||||
Description: "Persist usage notes, gotchas, or workflow patterns for a specific tool. Notes survive across sessions and are returned by describe_tools. Call this whenever you discover something non-obvious about a tool's behaviour. Pass an empty string to clear notes.",
|
||||
Description: "Save usage notes for a tool; returned by describe_tools. Empty string clears.",
|
||||
}, toolSet.Describe.Annotate); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -631,45 +488,12 @@ func BuildToolCatalog() []tools.ToolEntry {
|
||||
{Name: "reparse_thought_metadata", Description: "Re-extract and normalize metadata for stored thoughts from their content.", Category: "admin"},
|
||||
{Name: "retry_failed_metadata", Description: "Retry metadata extraction for thoughts still marked pending or failed.", Category: "admin"},
|
||||
|
||||
// household
|
||||
{Name: "add_household_item", Description: "Store a household fact (paint color, appliance details, measurement, document, etc.).", Category: "household"},
|
||||
{Name: "search_household_items", Description: "Search household items by name, category, or location.", Category: "household"},
|
||||
{Name: "get_household_item", Description: "Retrieve a household item by id.", Category: "household"},
|
||||
{Name: "add_vendor", Description: "Add a service provider (plumber, electrician, landscaper, etc.).", Category: "household"},
|
||||
{Name: "list_vendors", Description: "List household service vendors, optionally filtered by service type.", Category: "household"},
|
||||
|
||||
// maintenance
|
||||
{Name: "add_maintenance_task", Description: "Create a recurring or one-time home maintenance task.", Category: "maintenance"},
|
||||
{Name: "log_maintenance", Description: "Log completed maintenance work; automatically updates the task's next due date.", Category: "maintenance"},
|
||||
{Name: "get_upcoming_maintenance", Description: "List maintenance tasks due within the next N days.", Category: "maintenance"},
|
||||
{Name: "search_maintenance_history", Description: "Search the maintenance log by task name, category, or date range.", Category: "maintenance"},
|
||||
|
||||
// calendar
|
||||
{Name: "add_family_member", Description: "Add a family member to the household.", Category: "calendar"},
|
||||
{Name: "list_family_members", Description: "List all family members.", Category: "calendar"},
|
||||
{Name: "add_activity", Description: "Schedule a one-time or recurring family activity.", Category: "calendar"},
|
||||
{Name: "get_week_schedule", Description: "Get all activities scheduled for a given week.", Category: "calendar"},
|
||||
{Name: "search_activities", Description: "Search activities by title, type, or family member.", Category: "calendar"},
|
||||
{Name: "add_important_date", Description: "Track a birthday, anniversary, deadline, or other important date.", Category: "calendar"},
|
||||
{Name: "get_upcoming_dates", Description: "Get important dates coming up in the next N days.", Category: "calendar"},
|
||||
|
||||
// meals
|
||||
{Name: "add_recipe", Description: "Save a recipe with ingredients and instructions.", Category: "meals"},
|
||||
{Name: "search_recipes", Description: "Search recipes by name, cuisine, tags, or ingredient.", Category: "meals"},
|
||||
{Name: "update_recipe", Description: "Update an existing recipe.", Category: "meals"},
|
||||
{Name: "create_meal_plan", Description: "Set the meal plan for a week; replaces any existing plan for that week.", Category: "meals"},
|
||||
{Name: "get_meal_plan", Description: "Get the meal plan for a given week.", Category: "meals"},
|
||||
{Name: "generate_shopping_list", Description: "Auto-generate a shopping list from the meal plan for a given week.", Category: "meals"},
|
||||
|
||||
// crm
|
||||
{Name: "add_professional_contact", Description: "Add a professional contact to the CRM.", Category: "crm"},
|
||||
{Name: "search_contacts", Description: "Search professional contacts by name, company, title, notes, or tags.", Category: "crm"},
|
||||
{Name: "log_interaction", Description: "Log an interaction with a professional contact.", Category: "crm"},
|
||||
{Name: "get_contact_history", Description: "Get full history (interactions and opportunities) for a contact.", Category: "crm"},
|
||||
{Name: "create_opportunity", Description: "Create a deal, project, or opportunity linked to a contact.", Category: "crm"},
|
||||
{Name: "get_follow_ups_due", Description: "List contacts with a follow-up date due within the next N days.", Category: "crm"},
|
||||
{Name: "link_thought_to_contact", Description: "Append a stored thought to a contact's notes.", Category: "crm"},
|
||||
|
||||
// skills
|
||||
{Name: "add_skill", Description: "Store a reusable agent skill (behavioural instruction or capability prompt).", Category: "skills"},
|
||||
{Name: "remove_skill", Description: "Delete an agent skill by id.", Category: "skills"},
|
||||
|
||||
@@ -28,45 +28,27 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
||||
sort.Strings(got)
|
||||
|
||||
want := []string{
|
||||
"add_activity",
|
||||
"add_family_member",
|
||||
"add_guardrail",
|
||||
"add_household_item",
|
||||
"add_important_date",
|
||||
"add_maintenance_task",
|
||||
"add_professional_contact",
|
||||
"add_project_guardrail",
|
||||
"add_project_skill",
|
||||
"add_recipe",
|
||||
"add_skill",
|
||||
"add_vendor",
|
||||
"annotate_tool",
|
||||
"archive_thought",
|
||||
"backfill_embeddings",
|
||||
"capture_thought",
|
||||
"create_meal_plan",
|
||||
"create_opportunity",
|
||||
"create_project",
|
||||
"delete_chat_history",
|
||||
"delete_thought",
|
||||
"describe_tools",
|
||||
"generate_shopping_list",
|
||||
"get_active_project",
|
||||
"get_chat_history",
|
||||
"get_contact_history",
|
||||
"get_follow_ups_due",
|
||||
"get_household_item",
|
||||
"get_meal_plan",
|
||||
"get_project_context",
|
||||
"get_thought",
|
||||
"get_upcoming_dates",
|
||||
"get_upcoming_maintenance",
|
||||
"get_version_info",
|
||||
"get_week_schedule",
|
||||
"link_thought_to_contact",
|
||||
"link_thoughts",
|
||||
"list_chat_histories",
|
||||
"list_family_members",
|
||||
"list_files",
|
||||
"list_guardrails",
|
||||
"list_project_guardrails",
|
||||
@@ -74,9 +56,7 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
||||
"list_projects",
|
||||
"list_skills",
|
||||
"list_thoughts",
|
||||
"list_vendors",
|
||||
"load_file",
|
||||
"log_interaction",
|
||||
"log_maintenance",
|
||||
"recall_context",
|
||||
"related_thoughts",
|
||||
@@ -88,16 +68,11 @@ func TestNewListsAllRegisteredTools(t *testing.T) {
|
||||
"retry_failed_metadata",
|
||||
"save_chat_history",
|
||||
"save_file",
|
||||
"search_activities",
|
||||
"search_contacts",
|
||||
"search_household_items",
|
||||
"search_maintenance_history",
|
||||
"search_recipes",
|
||||
"search_thoughts",
|
||||
"set_active_project",
|
||||
"summarize_thoughts",
|
||||
"thought_stats",
|
||||
"update_recipe",
|
||||
"update_thought",
|
||||
"upload_file",
|
||||
}
|
||||
|
||||
136
internal/mcpserver/sse_test.go
Normal file
136
internal/mcpserver/sse_test.go
Normal file
@@ -0,0 +1,136 @@
|
||||
package mcpserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
)
|
||||
|
||||
func TestNewHandlers_SSEDisabledByDefault(t *testing.T) {
|
||||
h, err := NewHandlers(config.MCPConfig{
|
||||
ServerName: "test",
|
||||
Version: "0.0.1",
|
||||
SessionTimeout: time.Minute,
|
||||
}, nil, streamableTestToolSet(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("NewHandlers() error = %v", err)
|
||||
}
|
||||
if h.StreamableHTTP == nil {
|
||||
t.Fatal("StreamableHTTP handler is nil")
|
||||
}
|
||||
if h.SSE != nil {
|
||||
t.Fatal("SSE handler should be nil when SSEPath is empty")
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewHandlers_SSEEnabledWhenPathSet(t *testing.T) {
|
||||
h, err := NewHandlers(config.MCPConfig{
|
||||
ServerName: "test",
|
||||
Version: "0.0.1",
|
||||
SessionTimeout: time.Minute,
|
||||
SSEPath: "/sse",
|
||||
}, nil, streamableTestToolSet(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("NewHandlers() error = %v", err)
|
||||
}
|
||||
if h.StreamableHTTP == nil {
|
||||
t.Fatal("StreamableHTTP handler is nil")
|
||||
}
|
||||
if h.SSE == nil {
|
||||
t.Fatal("SSE handler is nil when SSEPath is set")
|
||||
}
|
||||
}
|
||||
|
||||
func TestNew_BackwardCompatibility(t *testing.T) {
|
||||
handler, err := New(config.MCPConfig{
|
||||
ServerName: "test",
|
||||
Version: "0.0.1",
|
||||
SessionTimeout: time.Minute,
|
||||
}, nil, streamableTestToolSet(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("New() error = %v", err)
|
||||
}
|
||||
if handler == nil {
|
||||
t.Fatal("New() returned nil handler")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSSEListTools(t *testing.T) {
|
||||
h, err := NewHandlers(config.MCPConfig{
|
||||
ServerName: "test",
|
||||
Version: "0.0.1",
|
||||
SessionTimeout: time.Minute,
|
||||
SSEPath: "/sse",
|
||||
}, nil, streamableTestToolSet(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("NewHandlers() error = %v", err)
|
||||
}
|
||||
|
||||
srv := httptest.NewServer(h.SSE)
|
||||
t.Cleanup(srv.Close)
|
||||
|
||||
client := mcp.NewClient(&mcp.Implementation{Name: "client", Version: "0.0.1"}, nil)
|
||||
cs, err := client.Connect(context.Background(), &mcp.SSEClientTransport{Endpoint: srv.URL}, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("connect SSE client: %v", err)
|
||||
}
|
||||
t.Cleanup(func() { _ = cs.Close() })
|
||||
|
||||
result, err := cs.ListTools(context.Background(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("ListTools() error = %v", err)
|
||||
}
|
||||
if len(result.Tools) == 0 {
|
||||
t.Fatal("ListTools() returned no tools")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSSEAndStreamableShareTools(t *testing.T) {
|
||||
h, err := NewHandlers(config.MCPConfig{
|
||||
ServerName: "test",
|
||||
Version: "0.0.1",
|
||||
SessionTimeout: time.Minute,
|
||||
SSEPath: "/sse",
|
||||
}, nil, streamableTestToolSet(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("NewHandlers() error = %v", err)
|
||||
}
|
||||
|
||||
sseSrv := httptest.NewServer(h.SSE)
|
||||
t.Cleanup(sseSrv.Close)
|
||||
|
||||
streamSrv := httptest.NewServer(h.StreamableHTTP)
|
||||
t.Cleanup(streamSrv.Close)
|
||||
|
||||
sseClient := mcp.NewClient(&mcp.Implementation{Name: "sse-client", Version: "0.0.1"}, nil)
|
||||
sseSession, err := sseClient.Connect(context.Background(), &mcp.SSEClientTransport{Endpoint: sseSrv.URL}, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("connect SSE client: %v", err)
|
||||
}
|
||||
t.Cleanup(func() { _ = sseSession.Close() })
|
||||
|
||||
streamClient := mcp.NewClient(&mcp.Implementation{Name: "stream-client", Version: "0.0.1"}, nil)
|
||||
streamSession, err := streamClient.Connect(context.Background(), &mcp.StreamableClientTransport{Endpoint: streamSrv.URL}, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("connect StreamableHTTP client: %v", err)
|
||||
}
|
||||
t.Cleanup(func() { _ = streamSession.Close() })
|
||||
|
||||
sseTools, err := sseSession.ListTools(context.Background(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("SSE ListTools() error = %v", err)
|
||||
}
|
||||
streamTools, err := streamSession.ListTools(context.Background(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("StreamableHTTP ListTools() error = %v", err)
|
||||
}
|
||||
|
||||
if len(sseTools.Tools) != len(streamTools.Tools) {
|
||||
t.Fatalf("SSE tool count = %d, StreamableHTTP tool count = %d, want equal", len(sseTools.Tools), len(streamTools.Tools))
|
||||
}
|
||||
}
|
||||
@@ -127,11 +127,7 @@ func streamableTestToolSet() ToolSet {
|
||||
Backfill: new(tools.BackfillTool),
|
||||
Reparse: new(tools.ReparseMetadataTool),
|
||||
RetryMetadata: new(tools.RetryMetadataTool),
|
||||
Household: new(tools.HouseholdTool),
|
||||
Maintenance: new(tools.MaintenanceTool),
|
||||
Calendar: new(tools.CalendarTool),
|
||||
Meals: new(tools.MealsTool),
|
||||
CRM: new(tools.CRMTool),
|
||||
Skills: new(tools.SkillsTool),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,12 @@ func (db *DB) InsertThought(ctx context.Context, thought thoughttypes.Thought, e
|
||||
return thoughttypes.Thought{}, fmt.Errorf("commit thought insert: %w", err)
|
||||
}
|
||||
|
||||
if len(thought.Embedding) > 0 {
|
||||
created.EmbeddingStatus = "done"
|
||||
} else {
|
||||
created.EmbeddingStatus = "pending"
|
||||
}
|
||||
|
||||
return created, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -51,6 +51,30 @@ func NewBackfillTool(db *store.DB, provider ai.Provider, sessions *session.Activ
|
||||
return &BackfillTool{store: db, provider: provider, sessions: sessions, logger: logger}
|
||||
}
|
||||
|
||||
// QueueThought queues a single thought for background embedding generation.
|
||||
// It is used by capture when the embedding provider is temporarily unavailable.
|
||||
func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content string) {
|
||||
go func() {
|
||||
vec, err := t.provider.Embed(ctx, content)
|
||||
if err != nil {
|
||||
t.logger.Warn("background embedding retry failed",
|
||||
slog.String("thought_id", id.String()),
|
||||
slog.String("error", err.Error()),
|
||||
)
|
||||
return
|
||||
}
|
||||
model := t.provider.EmbeddingModel()
|
||||
if err := t.store.UpsertEmbedding(ctx, id, model, vec); err != nil {
|
||||
t.logger.Warn("background embedding upsert failed",
|
||||
slog.String("thought_id", id.String()),
|
||||
slog.String("error", err.Error()),
|
||||
)
|
||||
return
|
||||
}
|
||||
t.logger.Info("background embedding retry succeeded", slog.String("thought_id", id.String()))
|
||||
}()
|
||||
}
|
||||
|
||||
func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in BackfillInput) (*mcp.CallToolResult, BackfillOutput, error) {
|
||||
limit := in.Limit
|
||||
if limit <= 0 {
|
||||
|
||||
@@ -6,8 +6,8 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
"git.warky.dev/wdevs/amcs/internal/ai"
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
@@ -17,6 +17,11 @@ import (
|
||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||
)
|
||||
|
||||
// EmbeddingQueuer queues a thought for background embedding generation.
|
||||
type EmbeddingQueuer interface {
|
||||
QueueThought(ctx context.Context, id uuid.UUID, content string)
|
||||
}
|
||||
|
||||
type CaptureTool struct {
|
||||
store *store.DB
|
||||
provider ai.Provider
|
||||
@@ -24,6 +29,7 @@ type CaptureTool struct {
|
||||
sessions *session.ActiveProjects
|
||||
metadataTimeout time.Duration
|
||||
retryer *MetadataRetryer
|
||||
embedRetryer EmbeddingQueuer
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
@@ -36,8 +42,8 @@ type CaptureOutput struct {
|
||||
Thought thoughttypes.Thought `json:"thought"`
|
||||
}
|
||||
|
||||
func NewCaptureTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer *MetadataRetryer, log *slog.Logger) *CaptureTool {
|
||||
return &CaptureTool{store: db, provider: provider, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, log: log}
|
||||
func NewCaptureTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer *MetadataRetryer, embedRetryer EmbeddingQueuer, log *slog.Logger) *CaptureTool {
|
||||
return &CaptureTool{store: db, provider: provider, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, embedRetryer: embedRetryer, log: log}
|
||||
}
|
||||
|
||||
func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) {
|
||||
@@ -51,46 +57,10 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C
|
||||
return nil, CaptureOutput{}, err
|
||||
}
|
||||
|
||||
var embedding []float32
|
||||
rawMetadata := metadata.Fallback(t.capture)
|
||||
metadataNeedsRetry := false
|
||||
|
||||
group, groupCtx := errgroup.WithContext(ctx)
|
||||
group.Go(func() error {
|
||||
vector, err := t.provider.Embed(groupCtx, content)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
embedding = vector
|
||||
return nil
|
||||
})
|
||||
group.Go(func() error {
|
||||
metaCtx := groupCtx
|
||||
attemptedAt := time.Now().UTC()
|
||||
if t.metadataTimeout > 0 {
|
||||
var cancel context.CancelFunc
|
||||
metaCtx, cancel = context.WithTimeout(groupCtx, t.metadataTimeout)
|
||||
defer cancel()
|
||||
}
|
||||
extracted, err := t.provider.ExtractMetadata(metaCtx, content)
|
||||
if err != nil {
|
||||
t.log.Warn("metadata extraction failed, using fallback", slog.String("provider", t.provider.Name()), slog.String("error", err.Error()))
|
||||
rawMetadata = metadata.MarkMetadataPending(rawMetadata, t.capture, attemptedAt, err)
|
||||
metadataNeedsRetry = true
|
||||
return nil
|
||||
}
|
||||
rawMetadata = metadata.MarkMetadataComplete(extracted, t.capture, attemptedAt)
|
||||
return nil
|
||||
})
|
||||
|
||||
if err := group.Wait(); err != nil {
|
||||
return nil, CaptureOutput{}, err
|
||||
}
|
||||
|
||||
thought := thoughttypes.Thought{
|
||||
Content: content,
|
||||
Embedding: embedding,
|
||||
Metadata: metadata.Normalize(metadata.SanitizeExtracted(rawMetadata), t.capture),
|
||||
Content: content,
|
||||
Metadata: rawMetadata,
|
||||
}
|
||||
if project != nil {
|
||||
thought.ProjectID = &project.ID
|
||||
@@ -103,9 +73,57 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C
|
||||
if project != nil {
|
||||
_ = t.store.TouchProject(ctx, project.ID)
|
||||
}
|
||||
if metadataNeedsRetry && t.retryer != nil {
|
||||
t.retryer.QueueThought(created.ID)
|
||||
|
||||
if t.retryer != nil || t.embedRetryer != nil {
|
||||
t.launchEnrichment(created.ID, content)
|
||||
}
|
||||
|
||||
return nil, CaptureOutput{Thought: created}, nil
|
||||
}
|
||||
|
||||
func (t *CaptureTool) launchEnrichment(id uuid.UUID, content string) {
|
||||
go func() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
if t.retryer != nil {
|
||||
attemptedAt := time.Now().UTC()
|
||||
rawMetadata := metadata.Fallback(t.capture)
|
||||
extracted, err := t.provider.ExtractMetadata(ctx, content)
|
||||
if err != nil {
|
||||
failed := metadata.MarkMetadataFailed(rawMetadata, t.capture, attemptedAt, err)
|
||||
if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, failed); updateErr != nil {
|
||||
t.log.Warn("deferred metadata failure could not be persisted",
|
||||
slog.String("thought_id", id.String()),
|
||||
slog.String("error", updateErr.Error()),
|
||||
)
|
||||
}
|
||||
t.log.Warn("deferred metadata extraction failed",
|
||||
slog.String("thought_id", id.String()),
|
||||
slog.String("provider", t.provider.Name()),
|
||||
slog.String("error", err.Error()),
|
||||
)
|
||||
t.retryer.QueueThought(id)
|
||||
} else {
|
||||
completed := metadata.MarkMetadataComplete(extracted, t.capture, attemptedAt)
|
||||
if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, completed); updateErr != nil {
|
||||
t.log.Warn("deferred metadata completion could not be persisted",
|
||||
slog.String("thought_id", id.String()),
|
||||
slog.String("error", updateErr.Error()),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if t.embedRetryer != nil {
|
||||
if _, err := t.provider.Embed(ctx, content); err != nil {
|
||||
t.log.Warn("deferred embedding failed",
|
||||
slog.String("thought_id", id.String()),
|
||||
slog.String("provider", t.provider.Name()),
|
||||
slog.String("error", err.Error()),
|
||||
)
|
||||
}
|
||||
t.embedRetryer.QueueThought(ctx, id, content)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
209
internal/tools/enrichment_retry.go
Normal file
209
internal/tools/enrichment_retry.go
Normal file
@@ -0,0 +1,209 @@
|
||||
package tools
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||
"golang.org/x/sync/semaphore"
|
||||
|
||||
"git.warky.dev/wdevs/amcs/internal/ai"
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
"git.warky.dev/wdevs/amcs/internal/metadata"
|
||||
"git.warky.dev/wdevs/amcs/internal/session"
|
||||
"git.warky.dev/wdevs/amcs/internal/store"
|
||||
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
|
||||
)
|
||||
|
||||
const enrichmentRetryConcurrency = 4
|
||||
const enrichmentRetryMaxAttempts = 5
|
||||
|
||||
var enrichmentRetryBackoff = []time.Duration{
|
||||
30 * time.Second,
|
||||
2 * time.Minute,
|
||||
10 * time.Minute,
|
||||
30 * time.Minute,
|
||||
2 * time.Hour,
|
||||
}
|
||||
|
||||
type EnrichmentRetryer struct {
|
||||
backgroundCtx context.Context
|
||||
store *store.DB
|
||||
provider ai.Provider
|
||||
capture config.CaptureConfig
|
||||
sessions *session.ActiveProjects
|
||||
metadataTimeout time.Duration
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
type RetryEnrichmentTool struct {
|
||||
retryer *EnrichmentRetryer
|
||||
}
|
||||
|
||||
type RetryEnrichmentInput struct {
|
||||
Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the retry"`
|
||||
Limit int `json:"limit,omitempty" jsonschema:"maximum number of thoughts to process in one call; defaults to 100"`
|
||||
IncludeArchived bool `json:"include_archived,omitempty" jsonschema:"whether to include archived thoughts; defaults to false"`
|
||||
OlderThanDays int `json:"older_than_days,omitempty" jsonschema:"only retry thoughts whose last metadata attempt was at least N days ago; 0 means no restriction"`
|
||||
DryRun bool `json:"dry_run,omitempty" jsonschema:"report counts without retrying metadata extraction"`
|
||||
}
|
||||
|
||||
type RetryEnrichmentFailure struct {
|
||||
ID string `json:"id"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
type RetryEnrichmentOutput struct {
|
||||
Scanned int `json:"scanned"`
|
||||
Retried int `json:"retried"`
|
||||
Updated int `json:"updated"`
|
||||
Skipped int `json:"skipped"`
|
||||
Failed int `json:"failed"`
|
||||
DryRun bool `json:"dry_run"`
|
||||
Failures []RetryEnrichmentFailure `json:"failures,omitempty"`
|
||||
}
|
||||
|
||||
func NewEnrichmentRetryer(backgroundCtx context.Context, db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *EnrichmentRetryer {
|
||||
if backgroundCtx == nil {
|
||||
backgroundCtx = context.Background()
|
||||
}
|
||||
return &EnrichmentRetryer{
|
||||
backgroundCtx: backgroundCtx,
|
||||
store: db,
|
||||
provider: provider,
|
||||
capture: capture,
|
||||
sessions: sessions,
|
||||
metadataTimeout: metadataTimeout,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func NewRetryEnrichmentTool(retryer *EnrichmentRetryer) *RetryEnrichmentTool {
|
||||
return &RetryEnrichmentTool{retryer: retryer}
|
||||
}
|
||||
|
||||
func (t *RetryEnrichmentTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in RetryEnrichmentInput) (*mcp.CallToolResult, RetryEnrichmentOutput, error) {
|
||||
return t.retryer.Handle(ctx, req, in)
|
||||
}
|
||||
|
||||
func (r *EnrichmentRetryer) QueueThought(id uuid.UUID) {
|
||||
go func() {
|
||||
if _, err := r.retryOne(r.backgroundCtx, id); err != nil {
|
||||
r.logger.Warn("background metadata retry failed",
|
||||
slog.String("thought_id", id.String()),
|
||||
slog.String("error", err.Error()),
|
||||
)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (r *EnrichmentRetryer) Handle(ctx context.Context, req *mcp.CallToolRequest, in RetryEnrichmentInput) (*mcp.CallToolResult, RetryEnrichmentOutput, error) {
|
||||
limit := in.Limit
|
||||
if limit <= 0 {
|
||||
limit = 100
|
||||
}
|
||||
|
||||
project, err := resolveProject(ctx, r.store, r.sessions, req, in.Project, false)
|
||||
if err != nil {
|
||||
return nil, RetryEnrichmentOutput{}, err
|
||||
}
|
||||
|
||||
var projectID *uuid.UUID
|
||||
if project != nil {
|
||||
projectID = &project.ID
|
||||
}
|
||||
|
||||
thoughts, err := r.store.ListThoughtsPendingMetadataRetry(ctx, limit, projectID, in.IncludeArchived, in.OlderThanDays)
|
||||
if err != nil {
|
||||
return nil, RetryEnrichmentOutput{}, err
|
||||
}
|
||||
|
||||
out := RetryEnrichmentOutput{Scanned: len(thoughts), DryRun: in.DryRun}
|
||||
if in.DryRun || len(thoughts) == 0 {
|
||||
return nil, out, nil
|
||||
}
|
||||
|
||||
sem := semaphore.NewWeighted(enrichmentRetryConcurrency)
|
||||
var mu sync.Mutex
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for _, thought := range thoughts {
|
||||
if ctx.Err() != nil {
|
||||
break
|
||||
}
|
||||
if err := sem.Acquire(ctx, 1); err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
go func(thought thoughttypes.Thought) {
|
||||
defer wg.Done()
|
||||
defer sem.Release(1)
|
||||
|
||||
mu.Lock()
|
||||
out.Retried++
|
||||
mu.Unlock()
|
||||
|
||||
updated, err := r.retryOne(ctx, thought.ID)
|
||||
if err != nil {
|
||||
mu.Lock()
|
||||
out.Failures = append(out.Failures, RetryEnrichmentFailure{ID: thought.ID.String(), Error: err.Error()})
|
||||
mu.Unlock()
|
||||
return
|
||||
}
|
||||
if updated {
|
||||
mu.Lock()
|
||||
out.Updated++
|
||||
mu.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
mu.Lock()
|
||||
out.Skipped++
|
||||
mu.Unlock()
|
||||
}(thought)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
out.Failed = len(out.Failures)
|
||||
|
||||
return nil, out, nil
|
||||
}
|
||||
|
||||
func (r *EnrichmentRetryer) retryOne(ctx context.Context, id uuid.UUID) (bool, error) {
|
||||
thought, err := r.store.GetThought(ctx, id)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if thought.Metadata.MetadataStatus == metadata.MetadataStatusComplete {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
attemptCtx := ctx
|
||||
if r.metadataTimeout > 0 {
|
||||
var cancel context.CancelFunc
|
||||
attemptCtx, cancel = context.WithTimeout(ctx, r.metadataTimeout)
|
||||
defer cancel()
|
||||
}
|
||||
|
||||
attemptedAt := time.Now().UTC()
|
||||
extracted, extractErr := r.provider.ExtractMetadata(attemptCtx, thought.Content)
|
||||
if extractErr != nil {
|
||||
failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr)
|
||||
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil {
|
||||
return false, updateErr
|
||||
}
|
||||
return false, extractErr
|
||||
}
|
||||
|
||||
completedMetadata := metadata.MarkMetadataComplete(metadata.SanitizeExtracted(extracted), r.capture, attemptedAt)
|
||||
completedMetadata.Attachments = thought.Metadata.Attachments
|
||||
if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, completedMetadata); updateErr != nil {
|
||||
return false, updateErr
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
@@ -28,12 +28,42 @@ type MetadataRetryer struct {
|
||||
sessions *session.ActiveProjects
|
||||
metadataTimeout time.Duration
|
||||
logger *slog.Logger
|
||||
lock *RetryLocker
|
||||
}
|
||||
|
||||
type RetryMetadataTool struct {
|
||||
retryer *MetadataRetryer
|
||||
}
|
||||
|
||||
type RetryLocker struct {
|
||||
mu sync.Mutex
|
||||
locks map[uuid.UUID]time.Time
|
||||
}
|
||||
|
||||
func NewRetryLocker() *RetryLocker {
|
||||
return &RetryLocker{locks: map[uuid.UUID]time.Time{}}
|
||||
}
|
||||
|
||||
func (l *RetryLocker) Acquire(id uuid.UUID, ttl time.Duration) bool {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
if l.locks == nil {
|
||||
l.locks = map[uuid.UUID]time.Time{}
|
||||
}
|
||||
now := time.Now()
|
||||
if exp, ok := l.locks[id]; ok && exp.After(now) {
|
||||
return false
|
||||
}
|
||||
l.locks[id] = now.Add(ttl)
|
||||
return true
|
||||
}
|
||||
|
||||
func (l *RetryLocker) Release(id uuid.UUID) {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
delete(l.locks, id)
|
||||
}
|
||||
|
||||
type RetryMetadataInput struct {
|
||||
Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the retry"`
|
||||
Limit int `json:"limit,omitempty" jsonschema:"maximum number of thoughts to process in one call; defaults to 100"`
|
||||
@@ -69,6 +99,7 @@ func NewMetadataRetryer(backgroundCtx context.Context, db *store.DB, provider ai
|
||||
sessions: sessions,
|
||||
metadataTimeout: metadataTimeout,
|
||||
logger: logger,
|
||||
lock: NewRetryLocker(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,6 +113,10 @@ func (t *RetryMetadataTool) Handle(ctx context.Context, req *mcp.CallToolRequest
|
||||
|
||||
func (r *MetadataRetryer) QueueThought(id uuid.UUID) {
|
||||
go func() {
|
||||
if !r.lock.Acquire(id, 15*time.Minute) {
|
||||
return
|
||||
}
|
||||
defer r.lock.Release(id)
|
||||
if _, err := r.retryOne(r.backgroundCtx, id); err != nil {
|
||||
r.logger.Warn("background metadata retry failed", slog.String("thought_id", id.String()), slog.String("error", err.Error()))
|
||||
}
|
||||
@@ -138,7 +173,14 @@ func (r *MetadataRetryer) Handle(ctx context.Context, req *mcp.CallToolRequest,
|
||||
out.Retried++
|
||||
mu.Unlock()
|
||||
|
||||
if !r.lock.Acquire(thought.ID, 15*time.Minute) {
|
||||
mu.Lock()
|
||||
out.Skipped++
|
||||
mu.Unlock()
|
||||
return
|
||||
}
|
||||
updated, err := r.retryOne(ctx, thought.ID)
|
||||
r.lock.Release(thought.ID)
|
||||
if err != nil {
|
||||
mu.Lock()
|
||||
out.Failures = append(out.Failures, RetryMetadataFailure{ID: thought.ID.String(), Error: err.Error()})
|
||||
|
||||
@@ -52,14 +52,15 @@ type StoredFileFilter struct {
|
||||
}
|
||||
|
||||
type Thought struct {
|
||||
ID uuid.UUID `json:"id"`
|
||||
Content string `json:"content"`
|
||||
Embedding []float32 `json:"embedding,omitempty"`
|
||||
Metadata ThoughtMetadata `json:"metadata"`
|
||||
ProjectID *uuid.UUID `json:"project_id,omitempty"`
|
||||
ArchivedAt *time.Time `json:"archived_at,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
ID uuid.UUID `json:"id"`
|
||||
Content string `json:"content"`
|
||||
Embedding []float32 `json:"embedding,omitempty"`
|
||||
EmbeddingStatus string `json:"embedding_status,omitempty"`
|
||||
Metadata ThoughtMetadata `json:"metadata"`
|
||||
ProjectID *uuid.UUID `json:"project_id,omitempty"`
|
||||
ArchivedAt *time.Time `json:"archived_at,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
type SearchResult struct {
|
||||
|
||||
@@ -2,6 +2,12 @@
|
||||
|
||||
AMCS (Avalon Memory Crystal Server) is an MCP server for capturing and retrieving thoughts, memory, and project context. It is backed by Postgres with pgvector for semantic search.
|
||||
|
||||
`amcs-cli` is a pre-built CLI that connects to the AMCS MCP server so agents do not need to implement their own HTTP MCP client. Download it from https://git.warky.dev/wdevs/amcs/releases
|
||||
|
||||
The key command is `amcs-cli stdio`, which bridges the remote HTTP MCP server to a local stdio MCP transport. Register it as a stdio MCP server in your agent config and all AMCS tools are available immediately without any custom client code.
|
||||
|
||||
Configure with `~/.config/amcs/config.yaml` (`server`, `token`), env vars `AMCS_URL` / `AMCS_TOKEN`, or `--server` / `--token` flags.
|
||||
|
||||
You have access to an MCP memory server named AMCS.
|
||||
|
||||
Use AMCS as memory with two scopes:
|
||||
|
||||
826
llm/options_to_openclaw.md
Normal file
826
llm/options_to_openclaw.md
Normal file
@@ -0,0 +1,826 @@
|
||||
# AMCS → OpenClaw Alternative: Gap Analysis & Roadmap
|
||||
|
||||
## Context
|
||||
|
||||
AMCS is a **passive** MCP memory server. OpenClaw's key differentiator is that it's an **always-on autonomous agent** — it proactively acts, monitors, and learns without human prompting. AMCS has the data model and search foundation; it's missing the execution engine and channel integrations that make OpenClaw compelling.
|
||||
|
||||
OpenClaw's 3 pillars AMCS lacks:
|
||||
1. **Autonomous heartbeat** — scheduled jobs that run without user prompts
|
||||
2. **Channel integrations** — 25+ messaging platforms (Telegram, Slack, Discord, email, etc.)
|
||||
3. **Self-improving memory** — knowledge graph distillation, daily notes, living summary (MEMORY.md)
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Autonomous Heartbeat Engine (Critical — unlocks everything else)
|
||||
|
||||
### 1a. Add `Complete()` to AI Provider
|
||||
|
||||
The current `Provider` interface in `internal/ai/provider.go` only supports `Summarize(ctx, systemPrompt, userPrompt)`. An autonomous agent needs a stateful multi-turn call with tool awareness.
|
||||
|
||||
**Extend the interface:**
|
||||
|
||||
```go
|
||||
// internal/ai/provider.go
|
||||
|
||||
type CompletionRole string
|
||||
|
||||
const (
|
||||
RoleSystem CompletionRole = "system"
|
||||
RoleUser CompletionRole = "user"
|
||||
RoleAssistant CompletionRole = "assistant"
|
||||
)
|
||||
|
||||
type CompletionMessage struct {
|
||||
Role CompletionRole `json:"role"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type CompletionResult struct {
|
||||
Content string `json:"content"`
|
||||
StopReason string `json:"stop_reason"` // "stop" | "length" | "error"
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
type Provider interface {
|
||||
Embed(ctx context.Context, input string) ([]float32, error)
|
||||
ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error)
|
||||
Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error)
|
||||
Complete(ctx context.Context, messages []CompletionMessage) (CompletionResult, error)
|
||||
Name() string
|
||||
EmbeddingModel() string
|
||||
}
|
||||
```
|
||||
|
||||
**Implement in `internal/ai/compat/client.go`:**
|
||||
|
||||
`Complete` is a simplification of the existing `extractMetadataWithModel` path — same OpenAI-compatible `/chat/completions` endpoint, same auth headers, no JSON schema coercion. Add a `chatCompletionsRequest` type (reuse or extend the existing unexported struct) and a `Complete` method on `*Client` that:
|
||||
1. Builds the request body from `[]CompletionMessage`
|
||||
2. POSTs to `c.baseURL + "/chat/completions"` with `c.metadataModel`
|
||||
3. Reads the first choice's `message.content`
|
||||
4. Returns `CompletionResult{Content, StopReason, Model}`
|
||||
|
||||
Error handling mirrors the metadata path: on HTTP 429/503 mark the model unhealthy (`c.modelHealth`), return a wrapped error. No fallback model chain needed for agent calls — callers should retry on next heartbeat tick.
|
||||
|
||||
---
|
||||
|
||||
### 1b. Heartbeat Engine Package
|
||||
|
||||
**New package: `internal/agent/`**
|
||||
|
||||
#### `internal/agent/job.go`
|
||||
|
||||
```go
|
||||
package agent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Job is a single scheduled unit of autonomous work.
|
||||
type Job interface {
|
||||
Name() string
|
||||
Interval() time.Duration
|
||||
Run(ctx context.Context) error
|
||||
}
|
||||
```
|
||||
|
||||
#### `internal/agent/engine.go`
|
||||
|
||||
The engine manages a set of jobs and fires each on its own ticker. It mirrors the pattern already used for `runBackfillPass` and `runMetadataRetryPass` in `internal/app/app.go`, but generalises it.
|
||||
|
||||
```go
|
||||
package agent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Engine struct {
|
||||
jobs []Job
|
||||
store JobStore // persists agent_job_runs rows
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewEngine(store JobStore, logger *slog.Logger, jobs ...Job) *Engine {
|
||||
return &Engine{jobs: jobs, store: store, logger: logger}
|
||||
}
|
||||
|
||||
// Run starts all job tickers and blocks until ctx is cancelled.
|
||||
func (e *Engine) Run(ctx context.Context) {
|
||||
var wg sync.WaitGroup
|
||||
for _, job := range e.jobs {
|
||||
wg.Add(1)
|
||||
go func(j Job) {
|
||||
defer wg.Done()
|
||||
e.runLoop(ctx, j)
|
||||
}(job)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func (e *Engine) runLoop(ctx context.Context, j Job) {
|
||||
ticker := time.NewTicker(j.Interval())
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
e.runOnce(ctx, j)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Engine) runOnce(ctx context.Context, j Job) {
|
||||
runID, err := e.store.StartRun(ctx, j.Name())
|
||||
if err != nil {
|
||||
e.logger.Error("agent: failed to start job run record",
|
||||
slog.String("job", j.Name()), slog.String("error", err.Error()))
|
||||
return
|
||||
}
|
||||
if err := j.Run(ctx); err != nil {
|
||||
e.logger.Error("agent: job failed",
|
||||
slog.String("job", j.Name()), slog.String("error", err.Error()))
|
||||
_ = e.store.FinishRun(ctx, runID, "failed", "", err.Error())
|
||||
return
|
||||
}
|
||||
_ = e.store.FinishRun(ctx, runID, "ok", "", "")
|
||||
e.logger.Info("agent: job complete", slog.String("job", j.Name()))
|
||||
}
|
||||
```
|
||||
|
||||
**Deduplication / double-run prevention:** `StartRun` should check for an existing `running` row younger than `2 * j.Interval()` and return `ErrAlreadyRunning` — the caller skips that tick.
|
||||
|
||||
#### `internal/agent/distill.go`
|
||||
|
||||
```go
|
||||
// DistillJob clusters semantically related thoughts and promotes
|
||||
// durable insights into knowledge nodes.
|
||||
type DistillJob struct {
|
||||
store store.ThoughtQuerier
|
||||
provider ai.Provider
|
||||
cfg AgentDistillConfig
|
||||
projectID *uuid.UUID // nil = all projects
|
||||
}
|
||||
|
||||
func (j *DistillJob) Name() string { return "distill" }
|
||||
func (j *DistillJob) Interval() time.Duration { return j.cfg.Interval }
|
||||
|
||||
func (j *DistillJob) Run(ctx context.Context) error {
|
||||
// 1. Fetch recent thoughts not yet distilled (metadata.distilled != true)
|
||||
// using store.ListThoughts with filter Days = cfg.MinAgeHours/24
|
||||
// 2. Group into semantic clusters via SearchSimilarThoughts
|
||||
// 3. For each cluster > MinClusterSize:
|
||||
// a. Call provider.Summarize with insight extraction prompt
|
||||
// b. InsertThought with type="insight", metadata.knowledge_node=true
|
||||
// c. InsertLink from each cluster member to the insight, relation="distilled_from"
|
||||
// d. UpdateThought on each source to set metadata.distilled=true
|
||||
// 4. Return nil; partial failures are logged but do not abort the run
|
||||
}
|
||||
```
|
||||
|
||||
Prompt used in step 3a:
|
||||
```
|
||||
System: You extract durable knowledge from a cluster of related notes.
|
||||
Return a single paragraph (2-5 sentences) capturing the core insight.
|
||||
Do not reference the notes themselves. Write in third person.
|
||||
User: [concatenated thought content, newest first, max 4000 tokens]
|
||||
```
|
||||
|
||||
#### `internal/agent/daily_notes.go`
|
||||
|
||||
Runs at a configured hour each day (checked by comparing `time.Now().Hour()` against `cfg.Hour` inside the loop — skip if already ran today by querying `agent_job_runs` for a successful `daily_notes` run with `started_at >= today midnight`).
|
||||
|
||||
Collects:
|
||||
- Thoughts created today (`store.ListThoughts` with `Days=1`)
|
||||
- CRM interactions logged today
|
||||
- Calendar activities for today
|
||||
- Maintenance logs from today
|
||||
|
||||
Formats into a structured markdown string and calls `store.InsertThought` with `type=daily_note`.
|
||||
|
||||
#### `internal/agent/living_summary.go`
|
||||
|
||||
Regenerates `MEMORY.md` from the last N daily notes + all knowledge nodes. Calls `provider.Summarize` and upserts the result via `store.UpsertFile` using a fixed name `MEMORY.md` scoped to the project (or global if no project).
|
||||
|
||||
---
|
||||
|
||||
### 1c. Config Structs
|
||||
|
||||
Add to `internal/config/config.go`:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
// ... existing fields ...
|
||||
Agent AgentConfig `yaml:"agent"`
|
||||
Channels ChannelsConfig `yaml:"channels"`
|
||||
Shell ShellConfig `yaml:"shell"`
|
||||
}
|
||||
|
||||
type AgentConfig struct {
|
||||
Enabled bool `yaml:"enabled"`
|
||||
Distill AgentDistillConfig `yaml:"distill"`
|
||||
DailyNotes AgentDailyNotesConfig `yaml:"daily_notes"`
|
||||
LivingSummary AgentLivingSummary `yaml:"living_summary"`
|
||||
Archival AgentArchivalConfig `yaml:"archival"`
|
||||
Model string `yaml:"model"` // override for agent calls; falls back to AI.Metadata.Model
|
||||
}
|
||||
|
||||
type AgentDistillConfig struct {
|
||||
Enabled bool `yaml:"enabled"`
|
||||
Interval time.Duration `yaml:"interval"` // default: 24h
|
||||
BatchSize int `yaml:"batch_size"` // thoughts per run; default: 50
|
||||
MinClusterSize int `yaml:"min_cluster_size"` // default: 3
|
||||
MinAgeHours int `yaml:"min_age_hours"` // ignore thoughts younger than this; default: 6
|
||||
}
|
||||
|
||||
type AgentDailyNotesConfig struct {
|
||||
Enabled bool `yaml:"enabled"`
|
||||
Hour int `yaml:"hour"` // 0-23 UTC; default: 23
|
||||
}
|
||||
|
||||
type AgentLivingSummary struct {
|
||||
Enabled bool `yaml:"enabled"`
|
||||
Interval time.Duration `yaml:"interval"` // default: 24h
|
||||
MaxDays int `yaml:"max_days"` // daily notes lookback; default: 30
|
||||
}
|
||||
|
||||
type AgentArchivalConfig struct {
|
||||
Enabled bool `yaml:"enabled"`
|
||||
Interval time.Duration `yaml:"interval"` // default: 168h (weekly)
|
||||
ArchiveOlderThan int `yaml:"archive_older_than_days"` // default: 90
|
||||
}
|
||||
```
|
||||
|
||||
**Full YAML reference (`configs/dev.yaml` additions):**
|
||||
|
||||
```yaml
|
||||
agent:
|
||||
enabled: false
|
||||
model: "" # leave blank to reuse ai.metadata.model
|
||||
distill:
|
||||
enabled: false
|
||||
interval: 24h
|
||||
batch_size: 50
|
||||
min_cluster_size: 3
|
||||
min_age_hours: 6
|
||||
daily_notes:
|
||||
enabled: false
|
||||
hour: 23 # UTC hour to generate (0–23)
|
||||
living_summary:
|
||||
enabled: false
|
||||
interval: 24h
|
||||
max_days: 30
|
||||
archival:
|
||||
enabled: false
|
||||
interval: 168h
|
||||
archive_older_than_days: 90
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 1d. Wire into `internal/app/app.go`
|
||||
|
||||
After the existing `MetadataRetry` goroutine block:
|
||||
|
||||
```go
|
||||
if cfg.Agent.Enabled {
|
||||
jobStore := store.NewJobStore(db)
|
||||
var jobs []agent.Job
|
||||
if cfg.Agent.Distill.Enabled {
|
||||
jobs = append(jobs, agent.NewDistillJob(db, provider, cfg.Agent.Distill, nil))
|
||||
}
|
||||
if cfg.Agent.DailyNotes.Enabled {
|
||||
jobs = append(jobs, agent.NewDailyNotesJob(db, provider, cfg.Agent.DailyNotes))
|
||||
}
|
||||
if cfg.Agent.LivingSummary.Enabled {
|
||||
jobs = append(jobs, agent.NewLivingSummaryJob(db, provider, cfg.Agent.LivingSummary))
|
||||
}
|
||||
if cfg.Agent.Archival.Enabled {
|
||||
jobs = append(jobs, agent.NewArchivalJob(db, cfg.Agent.Archival))
|
||||
}
|
||||
engine := agent.NewEngine(jobStore, logger, jobs...)
|
||||
go engine.Run(ctx)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 1e. New MCP Tools — `internal/tools/agent.go`
|
||||
|
||||
```go
|
||||
// list_agent_jobs
|
||||
// Returns all registered jobs with: name, interval, last_run (status, started_at, finished_at), next_run estimate.
|
||||
|
||||
// trigger_agent_job
|
||||
// Input: { "job": "distill" }
|
||||
// Fires the job immediately in a goroutine; returns a run_id for polling.
|
||||
|
||||
// get_agent_job_history
|
||||
// Input: { "job": "distill", "limit": 20 }
|
||||
// Returns rows from agent_job_runs ordered by started_at DESC.
|
||||
```
|
||||
|
||||
Register in `internal/app/app.go` routes by adding `Agent tools.AgentTool` to `mcpserver.ToolSet` and wiring `tools.NewAgentTool(engine)`.
|
||||
|
||||
---
|
||||
|
||||
### 1f. Migration — `migrations/021_agent_jobs.sql`
|
||||
|
||||
```sql
|
||||
CREATE TABLE agent_job_runs (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
job_name text NOT NULL,
|
||||
started_at timestamptz NOT NULL DEFAULT now(),
|
||||
finished_at timestamptz,
|
||||
status text NOT NULL DEFAULT 'running', -- running | ok | failed | skipped
|
||||
output text,
|
||||
error text,
|
||||
metadata jsonb NOT NULL DEFAULT '{}'
|
||||
);
|
||||
|
||||
CREATE INDEX idx_agent_job_runs_lookup
|
||||
ON agent_job_runs (job_name, started_at DESC);
|
||||
```
|
||||
|
||||
**`JobStore` interface (`internal/store/agent.go`):**
|
||||
|
||||
```go
|
||||
type JobStore interface {
|
||||
StartRun(ctx context.Context, jobName string) (uuid.UUID, error)
|
||||
FinishRun(ctx context.Context, id uuid.UUID, status, output, errMsg string) error
|
||||
LastRun(ctx context.Context, jobName string) (*AgentJobRun, error)
|
||||
ListRuns(ctx context.Context, jobName string, limit int) ([]AgentJobRun, error)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Knowledge Graph Distillation
|
||||
|
||||
Builds on Phase 1's distillation job. `thought_links` already exists with typed `relation` — the missing piece is a way to mark and query promoted knowledge nodes.
|
||||
|
||||
### 2a. Extend `ThoughtMetadata`
|
||||
|
||||
In `internal/types/thought.go`, add two fields to `ThoughtMetadata`:
|
||||
|
||||
```go
|
||||
type ThoughtMetadata struct {
|
||||
// ... existing fields ...
|
||||
KnowledgeNode bool `json:"knowledge_node,omitempty"` // true = promoted insight
|
||||
KnowledgeWeight int `json:"knowledge_weight,omitempty"` // number of source thoughts that fed this node
|
||||
Distilled bool `json:"distilled,omitempty"` // true = this thought has been processed by distill job
|
||||
}
|
||||
```
|
||||
|
||||
These are stored in the existing `metadata jsonb` column — no schema migration needed.
|
||||
|
||||
### 2b. Store Addition
|
||||
|
||||
In `internal/store/thoughts.go` add:
|
||||
|
||||
```go
|
||||
// ListKnowledgeNodes returns thoughts where metadata->>'knowledge_node' = 'true',
|
||||
// ordered by knowledge_weight DESC, then created_at DESC.
|
||||
func (db *DB) ListKnowledgeNodes(ctx context.Context, projectID *uuid.UUID, limit int) ([]types.Thought, error)
|
||||
```
|
||||
|
||||
SQL:
|
||||
```sql
|
||||
SELECT id, content, metadata, project_id, archived_at, created_at, updated_at
|
||||
FROM thoughts
|
||||
WHERE (metadata->>'knowledge_node')::boolean = true
|
||||
AND ($1::uuid IS NULL OR project_id = $1)
|
||||
AND archived_at IS NULL
|
||||
ORDER BY (metadata->>'knowledge_weight')::int DESC, created_at DESC
|
||||
LIMIT $2
|
||||
```
|
||||
|
||||
### 2c. New MCP Tools — `internal/tools/knowledge.go`
|
||||
|
||||
```go
|
||||
// get_knowledge_graph
|
||||
// Input: { "project_id": "uuid|null", "limit": 50 }
|
||||
// Returns: { nodes: [Thought], edges: [ThoughtLink] }
|
||||
// Fetches ListKnowledgeNodes + their outgoing/incoming links via store.GetThoughtLinks.
|
||||
|
||||
// distill_now
|
||||
// Input: { "project_id": "uuid|null", "batch_size": 20 }
|
||||
// Triggers the distillation job synchronously (for on-demand use); returns { insights_created: N }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: Channel Integrations — Telegram First
|
||||
|
||||
### 3a. Channel Adapter Interface — `internal/channels/channel.go`
|
||||
|
||||
```go
|
||||
package channels
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Attachment struct {
|
||||
Name string
|
||||
MediaType string
|
||||
Data []byte
|
||||
}
|
||||
|
||||
type InboundMessage struct {
|
||||
ChannelID string // e.g. telegram chat ID as string
|
||||
SenderID string // e.g. telegram user ID as string
|
||||
SenderName string // display name
|
||||
Text string
|
||||
Attachments []Attachment
|
||||
Timestamp time.Time
|
||||
Raw any // original platform message for debug/logging
|
||||
}
|
||||
|
||||
type Channel interface {
|
||||
Name() string
|
||||
Start(ctx context.Context, handler func(InboundMessage)) error
|
||||
Send(ctx context.Context, channelID string, text string) error
|
||||
}
|
||||
```
|
||||
|
||||
### 3b. Telegram Implementation — `internal/channels/telegram/bot.go`
|
||||
|
||||
Uses `net/http` only (no external Telegram SDK). Long-polling loop:
|
||||
|
||||
```go
|
||||
type Bot struct {
|
||||
token string
|
||||
allowedIDs map[int64]struct{} // empty = all allowed
|
||||
baseURL string // https://api.telegram.org/bot{token}
|
||||
client *http.Client
|
||||
offset int64
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func (b *Bot) Name() string { return "telegram" }
|
||||
|
||||
func (b *Bot) Start(ctx context.Context, handler func(channels.InboundMessage)) error {
|
||||
for {
|
||||
updates, err := b.getUpdates(ctx, b.offset, 30 /*timeout seconds*/)
|
||||
if err != nil {
|
||||
if ctx.Err() != nil { return nil }
|
||||
// transient error: log and back off 5s
|
||||
time.Sleep(5 * time.Second)
|
||||
continue
|
||||
}
|
||||
for _, u := range updates {
|
||||
b.offset = u.UpdateID + 1
|
||||
if u.Message == nil { continue }
|
||||
if !b.isAllowed(u.Message.Chat.ID) { continue }
|
||||
handler(b.toInbound(u.Message))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *Bot) Send(ctx context.Context, channelID string, text string) error {
|
||||
// POST /sendMessage with chat_id and text
|
||||
// Splits messages > 4096 chars automatically
|
||||
}
|
||||
```
|
||||
|
||||
**Error handling:**
|
||||
- HTTP 401 (bad token): return fatal error, engine stops channel
|
||||
- HTTP 429 (rate limit): respect `retry_after` from response body, sleep, retry
|
||||
- HTTP 5xx: exponential backoff (5s → 10s → 30s → 60s), max 3 retries then sleep 5 min
|
||||
|
||||
### 3c. Channel Router — `internal/channels/router.go`
|
||||
|
||||
```go
|
||||
type Router struct {
|
||||
store store.ContactQuerier
|
||||
thoughts store.ThoughtInserter
|
||||
provider ai.Provider
|
||||
channels map[string]channels.Channel
|
||||
cfg config.ChannelsConfig
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func (r *Router) Handle(msg channels.InboundMessage) {
|
||||
// 1. Resolve sender → CRM contact (by channel_identifiers->>'telegram' = senderID)
|
||||
// If not found: create a new professional_contact with the sender name + channel identifier
|
||||
// 2. Capture message as thought:
|
||||
// content = msg.Text
|
||||
// metadata.source = "telegram"
|
||||
// metadata.type = "observation"
|
||||
// metadata.people = [senderName]
|
||||
// metadata (extra, stored in JSONB): channel="telegram", channel_id=msg.ChannelID, sender_id=msg.SenderID
|
||||
// 3. If cfg.Telegram.Respond:
|
||||
// a. Load recent context via store.SearchSimilarThoughts(msg.Text, limit=10)
|
||||
// b. Build []CompletionMessage with system context + recent thoughts + user message
|
||||
// c. Call provider.Complete(ctx, messages)
|
||||
// d. Capture response as thought (type="assistant_response", source="telegram")
|
||||
// e. Send reply via channel.Send(ctx, msg.ChannelID, result.Content)
|
||||
// f. Save chat history via store.InsertChatHistory
|
||||
}
|
||||
```
|
||||
|
||||
**Agent response system prompt (step 3b):**
|
||||
```
|
||||
You are a personal assistant with access to the user's memory.
|
||||
Relevant context from memory:
|
||||
{joined recent thought content}
|
||||
|
||||
Respond concisely. If you cannot answer from memory, say so.
|
||||
```
|
||||
|
||||
### 3d. Config — full YAML reference
|
||||
|
||||
```yaml
|
||||
channels:
|
||||
telegram:
|
||||
enabled: false
|
||||
bot_token: ""
|
||||
allowed_chat_ids: [] # empty = all chats allowed
|
||||
capture_all: true # save every inbound message as a thought
|
||||
respond: true # send LLM reply back to sender
|
||||
response_model: "" # blank = uses agent.model or ai.metadata.model
|
||||
poll_timeout_seconds: 30 # Telegram long-poll timeout (max 60)
|
||||
max_message_length: 4096 # split replies longer than this
|
||||
discord:
|
||||
enabled: false
|
||||
bot_token: ""
|
||||
guild_ids: [] # empty = all guilds
|
||||
capture_all: true
|
||||
respond: true
|
||||
slack:
|
||||
enabled: false
|
||||
bot_token: ""
|
||||
app_token: "" # for socket mode
|
||||
capture_all: true
|
||||
respond: true
|
||||
email:
|
||||
enabled: false
|
||||
imap_host: ""
|
||||
imap_port: 993
|
||||
smtp_host: ""
|
||||
smtp_port: 587
|
||||
username: ""
|
||||
password: ""
|
||||
poll_interval: 5m
|
||||
capture_all: true
|
||||
folders: ["INBOX"]
|
||||
```
|
||||
|
||||
### 3e. Schema Migration — `migrations/022_channel_contacts.sql`
|
||||
|
||||
```sql
|
||||
-- Store per-channel identity handles on CRM contacts
|
||||
ALTER TABLE professional_contacts
|
||||
ADD COLUMN IF NOT EXISTS channel_identifiers jsonb NOT NULL DEFAULT '{}';
|
||||
|
||||
-- e.g. {"telegram": "123456789", "discord": "user#1234", "slack": "U01234567"}
|
||||
CREATE INDEX idx_contacts_telegram_id
|
||||
ON professional_contacts ((channel_identifiers->>'telegram'))
|
||||
WHERE channel_identifiers->>'telegram' IS NOT NULL;
|
||||
```
|
||||
|
||||
### 3f. New MCP Tools — `internal/tools/channels.go`
|
||||
|
||||
```go
|
||||
// send_channel_message
|
||||
// Input: { "channel": "telegram", "channel_id": "123456789", "text": "Hello" }
|
||||
// Sends a message on the named channel. Returns { sent: true, channel: "telegram" }
|
||||
|
||||
// list_channel_conversations
|
||||
// Input: { "channel": "telegram", "limit": 20, "days": 7 }
|
||||
// Lists chat histories filtered by channel metadata. Wraps store.ListChatHistories.
|
||||
|
||||
// get_channel_status
|
||||
// Returns: [{ channel: "telegram", connected: true, uptime_seconds: 3600 }, ...]
|
||||
```
|
||||
|
||||
### 3g. Future Channel Adapters
|
||||
|
||||
Each is a new subdirectory implementing `channels.Channel`. No router or MCP tool changes needed.
|
||||
|
||||
| Channel | Package | Approach |
|
||||
|---------|---------|----------|
|
||||
| Discord | `internal/channels/discord/` | Gateway WebSocket (discord.com/api/gateway); or use `discordgo` lib |
|
||||
| Slack | `internal/channels/slack/` | Socket Mode WebSocket (no public URL needed) |
|
||||
| Email (IMAP) | `internal/channels/email/` | IMAP IDLE or poll; SMTP for send |
|
||||
| Signal | `internal/channels/signal/` | Wrap `signal-cli` JSON-RPC subprocess |
|
||||
| WhatsApp | `internal/channels/whatsapp/` | Meta Cloud API webhook (requires public URL) |
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Shell / Computer Access
|
||||
|
||||
### 4a. Shell Tool — `internal/tools/shell.go`
|
||||
|
||||
```go
|
||||
type ShellInput struct {
|
||||
Command string `json:"command"`
|
||||
WorkingDir string `json:"working_dir,omitempty"` // override default; must be within allowed prefix
|
||||
Timeout string `json:"timeout,omitempty"` // e.g. "30s"; overrides config default
|
||||
CaptureAs string `json:"capture_as,omitempty"` // thought type for stored output; default "shell_output"
|
||||
SaveOutput bool `json:"save_output"` // store stdout/stderr as a thought
|
||||
}
|
||||
|
||||
type ShellOutput struct {
|
||||
Stdout string `json:"stdout"`
|
||||
Stderr string `json:"stderr"`
|
||||
ExitCode int `json:"exit_code"`
|
||||
ThoughtID *uuid.UUID `json:"thought_id,omitempty"` // set if save_output=true
|
||||
}
|
||||
```
|
||||
|
||||
**Execution model:**
|
||||
1. Validate `command` against `cfg.Shell.AllowedCommands` (if non-empty) and `cfg.Shell.BlockedCommands`
|
||||
2. `exec.CommandContext(ctx, "sh", "-c", command)` with `Dir` set to working dir
|
||||
3. Capture stdout + stderr into `bytes.Buffer`
|
||||
4. On timeout: kill process group (`syscall.Kill(-cmd.Process.Pid, syscall.SIGKILL)`), return exit code -1
|
||||
5. If `SaveOutput`: call `store.InsertThought` with content = truncated stdout (max 8KB) + stderr summary
|
||||
|
||||
**Security controls:**
|
||||
|
||||
```yaml
|
||||
shell:
|
||||
enabled: false
|
||||
working_dir: "/tmp/amcs-agent" # all commands run here unless overridden
|
||||
allowed_working_dirs: # if set, working_dir overrides must be within one of these
|
||||
- "/tmp/amcs-agent"
|
||||
- "/home/user/projects"
|
||||
timeout: 30s
|
||||
max_output_bytes: 65536 # truncate captured output beyond this
|
||||
allowed_commands: [] # empty = all; non-empty = exact binary name allowlist
|
||||
blocked_commands: # checked before allowed_commands
|
||||
- "rm"
|
||||
- "sudo"
|
||||
- "su"
|
||||
- "curl"
|
||||
- "wget"
|
||||
save_output_by_default: false
|
||||
```
|
||||
|
||||
The tool is registered with `mcp.Tool.Annotations` `Destructive: true` so MCP clients prompt for confirmation.
|
||||
|
||||
### 4b. File Bridge Tools
|
||||
|
||||
Also in `internal/tools/shell.go`:
|
||||
|
||||
```go
|
||||
// read_file_from_path
|
||||
// Input: { "path": "/abs/path/file.txt", "link_to_thought": "uuid|null" }
|
||||
// Reads file from server filesystem → stores as AMCS file via store.InsertFile
|
||||
// Returns: { file_id: "uuid", size_bytes: N, media_type: "text/plain" }
|
||||
|
||||
// write_file_to_path
|
||||
// Input: { "file_id": "uuid", "path": "/abs/path/output.txt" }
|
||||
// Loads AMCS file → writes to filesystem path
|
||||
// Path must be within cfg.Shell.AllowedWorkingDirs if set
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: Self-Improving Memory
|
||||
|
||||
### 5a. Skill Discovery Job — `internal/agent/skill_discovery.go`
|
||||
|
||||
Runs weekly. Algorithm:
|
||||
|
||||
1. Load last 30 days of `chat_histories` via `store.ListChatHistories(days=30)`
|
||||
2. Extract assistant message patterns with `provider.Complete`:
|
||||
```
|
||||
System: Identify reusable behavioural patterns or preferences visible in these conversations.
|
||||
Return a JSON array of { "name": "...", "description": "...", "tags": [...] }.
|
||||
Only include patterns that would be useful across future sessions.
|
||||
User: [last N assistant + user messages, newest first]
|
||||
```
|
||||
3. For each discovered pattern, call `store.InsertSkill` with tag `auto-discovered` and the current date
|
||||
4. Link to all projects via `store.LinkSkillToProject`
|
||||
|
||||
Deduplication: before inserting, call `store.SearchSkills(pattern.name)` — if similarity > 0.9, skip.
|
||||
|
||||
### 5b. Thought Archival Job — `internal/agent/archival.go`
|
||||
|
||||
```go
|
||||
func (j *ArchivalJob) Run(ctx context.Context) error {
|
||||
// 1. ListThoughts older than cfg.ArchiveOlderThanDays with no knowledge_node link
|
||||
// SQL: thoughts where created_at < now() - interval '$N days'
|
||||
// AND metadata->>'knowledge_node' IS DISTINCT FROM 'true'
|
||||
// AND archived_at IS NULL
|
||||
// AND id NOT IN (SELECT thought_id FROM thought_links WHERE relation = 'distilled_from')
|
||||
// 2. For each batch: store.ArchiveThought(ctx, id)
|
||||
// 3. Log count
|
||||
}
|
||||
```
|
||||
|
||||
Uses the existing `ArchiveThought` store method — no new SQL needed.
|
||||
|
||||
---
|
||||
|
||||
## End-to-End Agent Loop Flow
|
||||
|
||||
```
|
||||
Telegram message arrives
|
||||
│
|
||||
▼
|
||||
channels/telegram/bot.go (long-poll goroutine)
|
||||
│ InboundMessage{}
|
||||
▼
|
||||
channels/router.go Handle()
|
||||
├── Resolve sender → CRM contact (store.SearchContacts by channel_identifiers)
|
||||
├── store.InsertThought (source="telegram", type="observation")
|
||||
├── store.SearchSimilarThoughts (semantic context retrieval)
|
||||
├── ai.Provider.Complete (build messages → LLM call)
|
||||
├── store.InsertThought (source="telegram", type="assistant_response")
|
||||
├── store.InsertChatHistory (full turn saved)
|
||||
└── channels.Channel.Send (reply dispatched to Telegram)
|
||||
|
||||
Meanwhile, every 24h:
|
||||
agent/engine.go ticker fires DistillJob
|
||||
├── store.ListThoughts (recent, not yet distilled)
|
||||
├── store.SearchSimilarThoughts (cluster by semantic similarity)
|
||||
├── ai.Provider.Summarize (insight extraction prompt)
|
||||
├── store.InsertThought (type="insight", knowledge_node=true)
|
||||
└── store.InsertLink (relation="distilled_from" for each source)
|
||||
|
||||
After distill:
|
||||
agent/living_summary.go
|
||||
├── store.ListKnowledgeNodes
|
||||
├── store.ListThoughts (type="daily_note", last 30 days)
|
||||
├── ai.Provider.Summarize (MEMORY.md regeneration)
|
||||
└── store.UpsertFile (name="MEMORY.md", linked to project)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Error Handling & Retry Strategy
|
||||
|
||||
| Scenario | Handling |
|
||||
|----------|----------|
|
||||
| LLM returns 429 | Mark model unhealthy in `modelHealth` map (existing pattern), return error, engine logs and skips tick |
|
||||
| LLM returns 5xx | Same as 429 |
|
||||
| Telegram 429 | Read `retry_after` from response, sleep exact duration, retry immediately |
|
||||
| Telegram 5xx | Exponential backoff: 5s → 10s → 30s → 60s, reset after success |
|
||||
| Telegram disconnects | Long-poll timeout naturally retries; context cancel exits cleanly |
|
||||
| Agent job panics | `engine.runOnce` wraps in `recover()`, logs stack trace, marks run `failed` |
|
||||
| Agent double-run | `store.StartRun` checks for `running` row < `2 * interval` old → returns `ErrAlreadyRunning`, tick skipped silently |
|
||||
| Shell command timeout | `exec.CommandContext` kills process group via SIGKILL, returns exit_code=-1 and partial output |
|
||||
| Distillation partial failure | Each cluster processed independently; failure of one cluster logged and skipped, others continue |
|
||||
|
||||
---
|
||||
|
||||
## Critical Files
|
||||
|
||||
| File | Change |
|
||||
|------|--------|
|
||||
| `internal/ai/provider.go` | Add `Complete()`, `CompletionMessage`, `CompletionResult` |
|
||||
| `internal/ai/compat/client.go` | Implement `Complete()` on `*Client` |
|
||||
| `internal/config/config.go` | Add `AgentConfig`, `ChannelsConfig`, `ShellConfig` |
|
||||
| `internal/types/thought.go` | Add `KnowledgeNode`, `KnowledgeWeight`, `Distilled` to `ThoughtMetadata` |
|
||||
| `internal/store/thoughts.go` | Add `ListKnowledgeNodes()` |
|
||||
| `internal/store/agent.go` | New: `JobStore` interface + implementation |
|
||||
| `internal/app/app.go` | Wire agent engine + channel router goroutines |
|
||||
| `internal/mcpserver/server.go` | Add `Agent`, `Knowledge`, `Channels`, `Shell` to `ToolSet` |
|
||||
| `internal/agent/` | New package: engine, job, distill, daily_notes, living_summary, archival, skill_discovery |
|
||||
| `internal/channels/` | New package: channel interface, router, telegram/ |
|
||||
| `internal/tools/agent.go` | New: list_agent_jobs, trigger_agent_job, get_agent_job_history |
|
||||
| `internal/tools/knowledge.go` | New: get_knowledge_graph, distill_now |
|
||||
| `internal/tools/channels.go` | New: send_channel_message, list_channel_conversations, get_channel_status |
|
||||
| `internal/tools/shell.go` | New: run_shell_command, read_file_from_path, write_file_to_path |
|
||||
| `migrations/021_agent_jobs.sql` | New table: agent_job_runs |
|
||||
| `migrations/022_channel_contacts.sql` | ALTER professional_contacts: add channel_identifiers jsonb |
|
||||
|
||||
---
|
||||
|
||||
## Sequence / Parallelism
|
||||
|
||||
```
|
||||
Phase 1 (Heartbeat Engine) ──► Phase 2 (Knowledge Graph)
|
||||
└──► Phase 5 (Self-Improving)
|
||||
|
||||
Phase 3 (Telegram) ──► Phase 3g (Discord / Slack / Email)
|
||||
|
||||
Phase 4 (Shell) [fully independent — no dependencies on other phases]
|
||||
```
|
||||
|
||||
**Minimum viable OpenClaw competitor = Phase 1 + Phase 3** (autonomous scheduling + Telegram channel).
|
||||
|
||||
---
|
||||
|
||||
## Verification
|
||||
|
||||
| Phase | Test |
|
||||
|-------|------|
|
||||
| 1 — Heartbeat | Set `distill.interval: 1m` in dev config. Capture 5+ related thoughts. Wait 1 min. Query `thought_links` for `relation=distilled_from` rows. Check `agent_job_runs` has a `status=ok` row. |
|
||||
| 1 — Daily notes | Set `daily_notes.hour` to current UTC hour. Restart server. Within 1 min, `list_thoughts` should return a `type=daily_note` entry for today. |
|
||||
| 2 — Knowledge graph | Call `get_knowledge_graph` MCP tool. Verify `nodes` array contains `type=insight` thoughts with `knowledge_node=true`. Verify edges list `distilled_from` links. |
|
||||
| 3 — Telegram inbound | Send a message to the configured bot. Call `search_thoughts` with the message text — should appear with `source=telegram`. |
|
||||
| 3 — Telegram response | Send a question to the bot. Verify a reply arrives in Telegram. Call `list_chat_histories` — should contain the turn. |
|
||||
| 4 — Shell | Call `run_shell_command` with `{"command": "echo hello", "save_output": true}`. Verify `stdout=hello\n`, `exit_code=0`, and a new thought with `type=shell_output`. |
|
||||
| 4 — Blocked command | Call `run_shell_command` with `{"command": "sudo whoami"}`. Verify error returned without execution. |
|
||||
| 5 — Skill discovery | Run `trigger_agent_job` with `{"job": "skill_discovery"}`. Verify new rows in `agent_skills` with tag `auto-discovered`. |
|
||||
| Full loop | Send Telegram message → agent responds → distill job runs → knowledge node created from conversation → MEMORY.md regenerated with new insight. |
|
||||
@@ -33,7 +33,8 @@ In practice, the project has also grown beyond the original v1 scope with additi
|
||||
- stored files and binary resources
|
||||
- agent skills and guardrails
|
||||
- chat history tools
|
||||
- household / maintenance / calendar / meal / CRM tools
|
||||
- maintenance tools
|
||||
- household / calendar / meal / CRM tools (moved to future plugin; see `llm/todo.md`)
|
||||
- OAuth client-credentials support
|
||||
- Ollama support
|
||||
- tool discovery and persistent tool annotations
|
||||
|
||||
46
llm/todo.md
46
llm/todo.md
@@ -1,4 +1,50 @@
|
||||
# AMCS TODO
|
||||
|
||||
## Future Plugin: Lifestyle Tools (calendar, meals, household, CRM)
|
||||
|
||||
The following tool groups have been removed from the core server and are candidates for a separate optional plugin or extension server. The store/tool implementations remain in the codebase but are no longer registered.
|
||||
|
||||
### calendar
|
||||
- `add_family_member` — Add a family member to the household.
|
||||
- `list_family_members` — List all family members.
|
||||
- `add_activity` — Schedule a one-time or recurring family activity.
|
||||
- `get_week_schedule` — Get all activities scheduled for a given week.
|
||||
- `search_activities` — Search activities by title, type, or family member.
|
||||
- `add_important_date` — Track a birthday, anniversary, deadline, or other important date.
|
||||
- `get_upcoming_dates` — Get important dates coming up in the next N days.
|
||||
|
||||
### meals
|
||||
- `add_recipe` — Save a recipe with ingredients and instructions.
|
||||
- `search_recipes` — Search recipes by name, cuisine, tags, or ingredient.
|
||||
- `update_recipe` — Update an existing recipe.
|
||||
- `create_meal_plan` — Set the weekly meal plan; replaces existing.
|
||||
- `get_meal_plan` — Get the meal plan for a given week.
|
||||
- `generate_shopping_list` — Generate shopping list from the weekly meal plan.
|
||||
|
||||
### household
|
||||
- `add_household_item` — Store a household fact (paint, appliance, measurement, etc.).
|
||||
- `search_household_items` — Search household items by name, category, or location.
|
||||
- `get_household_item` — Retrieve a household item by id.
|
||||
- `add_vendor` — Add a service provider (plumber, electrician, landscaper, etc.).
|
||||
- `list_vendors` — List household service vendors, optionally filtered by service type.
|
||||
|
||||
### crm
|
||||
- `add_professional_contact` — Add a professional contact to the CRM.
|
||||
- `search_contacts` — Search professional contacts by name, company, title, notes, or tags.
|
||||
- `log_interaction` — Log an interaction with a professional contact.
|
||||
- `get_contact_history` — Get full history (interactions and opportunities) for a contact.
|
||||
- `create_opportunity` — Create a deal, project, or opportunity linked to a contact.
|
||||
- `get_follow_ups_due` — List contacts with a follow-up date due within the next N days.
|
||||
- `link_thought_to_contact` — Append a stored thought to a contact's notes.
|
||||
|
||||
**Implementation notes:**
|
||||
- Store implementations: `internal/tools/calendar.go`, `internal/tools/meals.go`, `internal/tools/household.go`, `internal/tools/crm.go`
|
||||
- DB store layers: `internal/store/calendar.go`, `internal/store/meals.go`, `internal/store/household.go`, `internal/store/crm.go`
|
||||
- Re-register via `mcpserver.ToolSet` fields: `Household`, `Calendar`, `Meals`, `CRM`
|
||||
- Re-add `registerHouseholdTools`, `registerCalendarTools`, `registerMealTools`, `registerCRMTools` to the register slice in `NewHandlers`
|
||||
- Add catalog entries back in `BuildToolCatalog`
|
||||
|
||||
---
|
||||
## Embedding Backfill and Text-Search Fallback Audit
|
||||
|
||||
This file originally described the planned `backfill_embeddings` work and semantic-to-text fallback behavior. Most of that work is now implemented. This document now tracks what landed, what still needs verification, and what follow-up work remains.
|
||||
|
||||
@@ -2,4 +2,11 @@
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
go run ./cmd/amcs-server --config "${1:-configs/dev.yaml}"
|
||||
CONFIG_PATH="${1:-configs/dev.yaml}"
|
||||
|
||||
if [[ ! -f internal/app/ui/dist/index.html ]]; then
|
||||
echo "UI build not found; building frontend first..."
|
||||
make ui-build
|
||||
fi
|
||||
|
||||
go run ./cmd/amcs-server --config "$CONFIG_PATH"
|
||||
|
||||
16
ui/index.html
Normal file
16
ui/index.html
Normal file
@@ -0,0 +1,16 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>AMCS</title>
|
||||
<meta
|
||||
name="description"
|
||||
content="AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools."
|
||||
/>
|
||||
</head>
|
||||
<body class="bg-slate-950">
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
23
ui/package.json
Normal file
23
ui/package.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "amcs-ui",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"packageManager": "pnpm@10.33.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"check": "svelte-check --tsconfig ./tsconfig.json",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/vite-plugin-svelte": "^5.0.3",
|
||||
"@tailwindcss/vite": "^4.1.4",
|
||||
"@types/node": "^24.5.2",
|
||||
"svelte": "^5.28.2",
|
||||
"svelte-check": "^4.1.6",
|
||||
"tailwindcss": "^4.1.4",
|
||||
"typescript": "^5.8.3",
|
||||
"vite": "^6.3.2"
|
||||
}
|
||||
}
|
||||
1291
ui/pnpm-lock.yaml
generated
Normal file
1291
ui/pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
262
ui/src/App.svelte
Normal file
262
ui/src/App.svelte
Normal file
@@ -0,0 +1,262 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
|
||||
type AccessEntry = {
|
||||
key_id: string;
|
||||
last_accessed_at: string;
|
||||
last_path: string;
|
||||
user_agent: string;
|
||||
request_count: number;
|
||||
};
|
||||
|
||||
type StatusResponse = {
|
||||
title: string;
|
||||
description: string;
|
||||
version: string;
|
||||
build_date: string;
|
||||
commit: string;
|
||||
connected_count: number;
|
||||
total_known: number;
|
||||
connected_window: string;
|
||||
oauth_enabled: boolean;
|
||||
entries: AccessEntry[];
|
||||
};
|
||||
|
||||
let data: StatusResponse | null = null;
|
||||
let loading = true;
|
||||
let error = "";
|
||||
|
||||
const quickLinks = [
|
||||
{ href: "/llm", label: "LLM Instructions" },
|
||||
{ href: "/healthz", label: "Health Check" },
|
||||
{ href: "/readyz", label: "Readiness Check" },
|
||||
];
|
||||
|
||||
async function loadStatus() {
|
||||
loading = true;
|
||||
error = "";
|
||||
|
||||
try {
|
||||
const response = await fetch("/api/status");
|
||||
if (!response.ok) {
|
||||
throw new Error(`Status request failed with ${response.status}`);
|
||||
}
|
||||
data = (await response.json()) as StatusResponse;
|
||||
} catch (err) {
|
||||
error = err instanceof Error ? err.message : "Failed to load status";
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
function formatDate(value: string) {
|
||||
return new Date(value).toLocaleString();
|
||||
}
|
||||
|
||||
onMount(loadStatus);
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>AMCS</title>
|
||||
</svelte:head>
|
||||
|
||||
<div class="min-h-screen bg-slate-950 text-slate-100">
|
||||
<main
|
||||
class="mx-auto flex min-h-screen max-w-7xl flex-col px-4 py-6 sm:px-6 lg:px-8"
|
||||
>
|
||||
<section
|
||||
class="overflow-hidden rounded-3xl border border-white/10 bg-slate-900 shadow-2xl shadow-slate-950/40"
|
||||
>
|
||||
<img
|
||||
src="/images/project.jpg"
|
||||
alt="Avelon Memory Crystal"
|
||||
class="h-64 w-full object-cover object-center sm:h-80"
|
||||
/>
|
||||
|
||||
<div class="grid gap-8 p-6 sm:p-8 lg:grid-cols-[1.6fr_1fr] lg:p-10">
|
||||
<div class="space-y-6">
|
||||
<div class="space-y-4">
|
||||
<div
|
||||
class="inline-flex items-center gap-2 rounded-full border border-cyan-400/20 bg-cyan-400/10 px-3 py-1 text-sm font-medium text-cyan-200"
|
||||
>
|
||||
<span class="h-2 w-2 rounded-full bg-emerald-400"></span>
|
||||
Avalon Memory Crystal Server
|
||||
</div>
|
||||
<div>
|
||||
<h1
|
||||
class="text-3xl font-semibold tracking-tight text-white sm:text-4xl"
|
||||
>
|
||||
Avelon Memory Crystal Server (AMCS)
|
||||
</h1>
|
||||
<p
|
||||
class="mt-3 max-w-3xl text-base leading-7 text-slate-300 sm:text-lg"
|
||||
>
|
||||
{data?.description ??
|
||||
"AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools."}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-wrap gap-3">
|
||||
{#each quickLinks as link}
|
||||
<a
|
||||
class="inline-flex items-center justify-center rounded-xl border border-cyan-300/20 bg-cyan-400/10 px-4 py-2 text-sm font-semibold text-cyan-100 transition hover:border-cyan-300/40 hover:bg-cyan-400/20"
|
||||
href={link.href}>{link.label}</a
|
||||
>
|
||||
{/each}
|
||||
{#if data?.oauth_enabled}
|
||||
<a
|
||||
class="inline-flex items-center justify-center rounded-xl border border-violet-300/20 bg-violet-400/10 px-4 py-2 text-sm font-semibold text-violet-100 transition hover:border-violet-300/40 hover:bg-violet-400/20"
|
||||
href="/oauth-authorization-server">OAuth Authorization Server</a
|
||||
>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="grid gap-4 sm:grid-cols-3">
|
||||
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">
|
||||
Connected users
|
||||
</p>
|
||||
<p class="mt-2 text-3xl font-semibold text-white">
|
||||
{data?.connected_count ?? "—"}
|
||||
</p>
|
||||
</div>
|
||||
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">
|
||||
Known principals
|
||||
</p>
|
||||
<p class="mt-2 text-3xl font-semibold text-white">
|
||||
{data?.total_known ?? "—"}
|
||||
</p>
|
||||
</div>
|
||||
<div class="rounded-2xl border border-white/10 bg-white/5 p-5">
|
||||
<p class="text-sm uppercase tracking-[0.2em] text-slate-400">
|
||||
Version
|
||||
</p>
|
||||
<p class="mt-2 break-all text-2xl font-semibold text-white">
|
||||
{data?.version ?? "—"}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<aside
|
||||
class="space-y-4 rounded-2xl border border-white/10 bg-slate-950/50 p-5"
|
||||
>
|
||||
<div>
|
||||
<h2 class="text-lg font-semibold text-white">Build details</h2>
|
||||
<p class="mt-1 text-sm text-slate-400">The same status info.</p>
|
||||
</div>
|
||||
<dl class="space-y-3 text-sm text-slate-300">
|
||||
<div>
|
||||
<dt class="text-slate-500">Build date</dt>
|
||||
<dd class="mt-1 font-medium text-white">
|
||||
{data?.build_date ?? "unknown"}
|
||||
</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt class="text-slate-500">Commit</dt>
|
||||
<dd
|
||||
class="mt-1 break-all rounded-lg bg-white/5 px-3 py-2 font-mono text-xs text-cyan-100"
|
||||
>
|
||||
{data?.commit ?? "unknown"}
|
||||
</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt class="text-slate-500">Connected window</dt>
|
||||
<dd class="mt-1 font-medium text-white">
|
||||
{data?.connected_window ?? "last 10 minutes"}
|
||||
</dd>
|
||||
</div>
|
||||
</dl>
|
||||
</aside>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section
|
||||
class="mt-6 rounded-3xl border border-white/10 bg-slate-900/80 p-6 shadow-xl shadow-slate-950/20 sm:p-8"
|
||||
>
|
||||
<div
|
||||
class="flex flex-col gap-3 sm:flex-row sm:items-end sm:justify-between"
|
||||
>
|
||||
<div>
|
||||
<h2 class="text-2xl font-semibold text-white">Recent access</h2>
|
||||
<p class="mt-1 text-sm text-slate-400">
|
||||
Authenticated principals AMCS has seen recently.
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
class="inline-flex items-center justify-center rounded-xl border border-white/10 bg-white/5 px-4 py-2 text-sm font-medium text-slate-200 transition hover:bg-white/10"
|
||||
on:click={loadStatus}
|
||||
>
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{#if loading}
|
||||
<div
|
||||
class="mt-6 rounded-2xl border border-dashed border-white/10 bg-slate-950/40 px-4 py-10 text-center text-slate-400"
|
||||
>
|
||||
Loading status…
|
||||
</div>
|
||||
{:else if error}
|
||||
<div
|
||||
class="mt-6 rounded-2xl border border-rose-400/30 bg-rose-400/10 px-4 py-6 text-sm text-rose-100"
|
||||
>
|
||||
<p class="font-semibold">Couldn’t load the status snapshot.</p>
|
||||
<p class="mt-1 text-rose-100/80">{error}</p>
|
||||
</div>
|
||||
{:else if data && data.entries.length === 0}
|
||||
<div
|
||||
class="mt-6 rounded-2xl border border-dashed border-white/10 bg-slate-950/40 px-4 py-10 text-center text-slate-400"
|
||||
>
|
||||
No authenticated access recorded yet.
|
||||
</div>
|
||||
{:else if data}
|
||||
<div class="mt-6 overflow-hidden rounded-2xl border border-white/10">
|
||||
<div class="overflow-x-auto">
|
||||
<table
|
||||
class="min-w-full divide-y divide-white/10 text-left text-sm text-slate-300"
|
||||
>
|
||||
<thead
|
||||
class="bg-white/5 text-xs uppercase tracking-[0.2em] text-slate-500"
|
||||
>
|
||||
<tr>
|
||||
<th class="px-4 py-3 font-medium">Principal</th>
|
||||
<th class="px-4 py-3 font-medium">Last accessed</th>
|
||||
<th class="px-4 py-3 font-medium">Last path</th>
|
||||
<th class="px-4 py-3 font-medium">Agent</th>
|
||||
<th class="px-4 py-3 font-medium">Requests</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-white/5 bg-slate-950/30">
|
||||
{#each data.entries as entry}
|
||||
<tr class="hover:bg-white/[0.03]">
|
||||
<td class="px-4 py-3 align-top"
|
||||
><code
|
||||
class="rounded bg-white/5 px-2 py-1 font-mono text-xs text-cyan-100"
|
||||
>{entry.key_id}</code
|
||||
></td
|
||||
>
|
||||
<td class="px-4 py-3 align-top text-slate-200"
|
||||
>{formatDate(entry.last_accessed_at)}</td
|
||||
>
|
||||
<td class="px-4 py-3 align-top"
|
||||
><code class="text-slate-100">{entry.last_path}</code></td
|
||||
>
|
||||
<td class="px-4 py-3 align-top text-slate-400 text-xs max-w-[16rem] truncate"
|
||||
>{entry.user_agent ?? "—"}</td
|
||||
>
|
||||
<td class="px-4 py-3 align-top font-semibold text-white"
|
||||
>{entry.request_count}</td
|
||||
>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</section>
|
||||
</main>
|
||||
</div>
|
||||
16
ui/src/app.css
Normal file
16
ui/src/app.css
Normal file
@@ -0,0 +1,16 @@
|
||||
@import 'tailwindcss';
|
||||
|
||||
:root {
|
||||
color-scheme: dark;
|
||||
font-family: Inter, system-ui, sans-serif;
|
||||
}
|
||||
|
||||
html,
|
||||
body,
|
||||
#app {
|
||||
min-height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
}
|
||||
9
ui/src/main.ts
Normal file
9
ui/src/main.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import './app.css';
|
||||
import App from './App.svelte';
|
||||
import { mount } from 'svelte';
|
||||
|
||||
const app = mount(App, {
|
||||
target: document.getElementById('app')!
|
||||
});
|
||||
|
||||
export default app;
|
||||
5
ui/svelte.config.js
Normal file
5
ui/svelte.config.js
Normal file
@@ -0,0 +1,5 @@
|
||||
export default {
|
||||
compilerOptions: {
|
||||
dev: process.env.NODE_ENV !== 'production'
|
||||
}
|
||||
};
|
||||
15
ui/tsconfig.json
Normal file
15
ui/tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"extends": "./tsconfig.node.json",
|
||||
"compilerOptions": {
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"target": "ES2022",
|
||||
"lib": ["ESNext", "DOM"],
|
||||
"verbatimModuleSyntax": true,
|
||||
"strict": true,
|
||||
"allowJs": true,
|
||||
"checkJs": false,
|
||||
"types": ["svelte", "node"]
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.js", "src/**/*.svelte", "vite.config.ts"]
|
||||
}
|
||||
8
ui/tsconfig.node.json
Normal file
8
ui/tsconfig.node.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"allowSyntheticDefaultImports": true
|
||||
}
|
||||
}
|
||||
31
ui/vite.config.ts
Normal file
31
ui/vite.config.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { defineConfig } from 'vite';
|
||||
import { svelte } from '@sveltejs/vite-plugin-svelte';
|
||||
import tailwindcss from '@tailwindcss/vite';
|
||||
|
||||
const backendTarget = process.env.AMCS_UI_BACKEND ?? 'http://127.0.0.1:8080';
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [svelte(), tailwindcss()],
|
||||
server: {
|
||||
host: '0.0.0.0',
|
||||
port: 5173,
|
||||
proxy: {
|
||||
'/api': backendTarget,
|
||||
'/healthz': backendTarget,
|
||||
'/readyz': backendTarget,
|
||||
'/llm': backendTarget,
|
||||
'/images': backendTarget,
|
||||
'/favicon.ico': backendTarget,
|
||||
'/mcp': backendTarget,
|
||||
'/files': backendTarget,
|
||||
'/oauth-authorization-server': backendTarget,
|
||||
'/authorize': backendTarget,
|
||||
'/oauth': backendTarget,
|
||||
'/.well-known': backendTarget
|
||||
}
|
||||
},
|
||||
build: {
|
||||
outDir: '../internal/app/ui/dist',
|
||||
emptyOutDir: true
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user