mirror of
https://github.com/Warky-Devs/vecna.git
synced 2026-05-05 01:26:58 +00:00
* set default config path to /config * update docker-compose example for config usage * modify config resolution to include /config directory
83 lines
2.8 KiB
YAML
83 lines
2.8 KiB
YAML
services:
|
|
|
|
# ── vecna proxy ─────────────────────────────────────────────────────────────
|
|
vecna:
|
|
build: .
|
|
# image: ghcr.io/warky-devs/vecna:latest
|
|
ports:
|
|
- "8080:8080"
|
|
volumes:
|
|
- vecna_config:/config
|
|
environment:
|
|
VECNA_SERVER_PORT: 8080
|
|
# VECNA_CONFIG: /config/vecna.json # default; override to use a different path
|
|
# VECNA_SERVER_API_KEYS: sk-vecna-abc123,sk-vecna-def456
|
|
restart: unless-stopped
|
|
depends_on:
|
|
ollama:
|
|
condition: service_healthy
|
|
|
|
# ── ollama (local embedding model) ──────────────────────────────────────────
|
|
ollama:
|
|
image: ollama/ollama:latest
|
|
ports:
|
|
- "11434:11434"
|
|
volumes:
|
|
- ollama_data:/root/.ollama
|
|
healthcheck:
|
|
test: ["CMD", "ollama", "list"]
|
|
interval: 10s
|
|
timeout: 5s
|
|
retries: 6
|
|
start_period: 20s
|
|
restart: unless-stopped
|
|
|
|
# ── pull the embedding model on first start ──────────────────────────────────
|
|
# Remove this service after the model has been pulled once.
|
|
ollama-pull:
|
|
image: ollama/ollama:latest
|
|
depends_on:
|
|
ollama:
|
|
condition: service_healthy
|
|
environment:
|
|
OLLAMA_HOST: http://ollama:11434
|
|
entrypoint: ["ollama", "pull", "nomic-embed-text"]
|
|
restart: "no"
|
|
|
|
# ── prometheus (optional, for metrics scraping) ──────────────────────────────
|
|
# Requires metrics.enabled: true in /config/vecna.json
|
|
prometheus:
|
|
image: prom/prometheus:latest
|
|
ports:
|
|
- "9090:9090"
|
|
volumes:
|
|
- ./prometheus.example.yml:/etc/prometheus/prometheus.yml:ro
|
|
restart: unless-stopped
|
|
profiles:
|
|
- metrics
|
|
|
|
volumes:
|
|
ollama_data:
|
|
vecna_config: # persists vecna.json across container rebuilds
|
|
|
|
# ── one-off commands ──────────────────────────────────────────────────────────
|
|
#
|
|
# Run the interactive onboard wizard (writes config into the vecna_config volume):
|
|
#
|
|
# docker compose run --rm -it vecna onboard
|
|
#
|
|
# The wizard will discover the ollama service on the Docker network at
|
|
# http://ollama:11434 (select it from the list or enter the URL manually).
|
|
#
|
|
# Test all configured endpoints after onboarding:
|
|
#
|
|
# docker compose run --rm vecna test
|
|
#
|
|
# Remove broken endpoints automatically:
|
|
#
|
|
# docker compose run --rm vecna test --remove-broken
|
|
#
|
|
# Open the config in a shell editor (requires the alpine image):
|
|
#
|
|
# docker compose run --rm -it vecna sh -c "vi /config/vecna.json"
|