Files
amcs/configs/dev.yaml
Hein 5f48a197e8
Some checks failed
CI / build-and-test (push) Failing after -30m37s
feat(mcp): add SSE transport support and related configuration options
2026-04-05 15:57:34 +02:00

91 lines
1.8 KiB
YAML

server:
host: "0.0.0.0"
port: 8080
read_timeout: "10m"
write_timeout: "10m"
idle_timeout: "60s"
allowed_origins:
- "*"
mcp:
path: "/mcp"
sse_path: "/sse"
server_name: "amcs"
transport: "streamable_http"
session_timeout: "10m"
auth:
header_name: "x-brain-key"
query_param: "key"
allow_query_param: false
keys:
- id: "local-client"
value: "replace-me"
description: "main local client key"
oauth:
clients:
- id: "oauth-client"
client_id: ""
client_secret: ""
description: "used when auth.mode=oauth_client_credentials"
database:
url: "postgres://postgres:postgres@localhost:5432/amcs?sslmode=disable"
max_conns: 10
min_conns: 2
max_conn_lifetime: "30m"
max_conn_idle_time: "10m"
ai:
provider: "litellm"
embeddings:
model: "openai/text-embedding-3-small"
dimensions: 1536
metadata:
model: "gpt-4o-mini"
temperature: 0.1
log_conversations: false
litellm:
base_url: "http://localhost:4000/v1"
api_key: "replace-me"
use_responses_api: false
request_headers: {}
embedding_model: "openrouter/openai/text-embedding-3-small"
metadata_model: "gpt-4o-mini"
ollama:
base_url: "http://localhost:11434/v1"
api_key: "ollama"
request_headers: {}
openrouter:
base_url: "https://openrouter.ai/api/v1"
api_key: ""
app_name: "amcs"
site_url: ""
extra_headers: {}
capture:
source: "mcp"
metadata_defaults:
type: "observation"
topic_fallback: "uncategorized"
search:
default_limit: 10
default_threshold: 0.5
max_limit: 50
logging:
level: "info"
format: "json"
observability:
metrics_enabled: true
pprof_enabled: false
metadata_retry:
enabled: false
run_on_startup: false
interval: "24h"
max_per_run: 100
include_archived: false