mirror of
https://github.com/Warky-Devs/vecna.git
synced 2026-05-05 01:26:58 +00:00
feat: 🎉 Vectors na Vectors, the begining
Translate 1536 <-> 768 , 3072 <-> 2048
This commit is contained in:
82
docker-compose.example.yml
Normal file
82
docker-compose.example.yml
Normal file
@@ -0,0 +1,82 @@
|
||||
services:
|
||||
|
||||
# ── vecna proxy ─────────────────────────────────────────────────────────────
|
||||
vecna:
|
||||
build: .
|
||||
# image: ghcr.io/warky-devs/vecna:latest
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- vecna_config:/config
|
||||
environment:
|
||||
VECNA_SERVER_PORT: 8080
|
||||
# VECNA_SERVER_API_KEYS: sk-vecna-abc123,sk-vecna-def456
|
||||
command: ["serve", "--config", "/config/vecna.json"]
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
ollama:
|
||||
condition: service_healthy
|
||||
|
||||
# ── ollama (local embedding model) ──────────────────────────────────────────
|
||||
ollama:
|
||||
image: ollama/ollama:latest
|
||||
ports:
|
||||
- "11434:11434"
|
||||
volumes:
|
||||
- ollama_data:/root/.ollama
|
||||
healthcheck:
|
||||
test: ["CMD", "ollama", "list"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 6
|
||||
start_period: 20s
|
||||
restart: unless-stopped
|
||||
|
||||
# ── pull the embedding model on first start ──────────────────────────────────
|
||||
# Remove this service after the model has been pulled once.
|
||||
ollama-pull:
|
||||
image: ollama/ollama:latest
|
||||
depends_on:
|
||||
ollama:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
OLLAMA_HOST: http://ollama:11434
|
||||
entrypoint: ["ollama", "pull", "nomic-embed-text"]
|
||||
restart: "no"
|
||||
|
||||
# ── prometheus (optional, for metrics scraping) ──────────────────────────────
|
||||
# Requires metrics.enabled: true in /config/vecna.json
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
ports:
|
||||
- "9090:9090"
|
||||
volumes:
|
||||
- ./prometheus.example.yml:/etc/prometheus/prometheus.yml:ro
|
||||
restart: unless-stopped
|
||||
profiles:
|
||||
- metrics
|
||||
|
||||
volumes:
|
||||
ollama_data:
|
||||
vecna_config: # persists vecna.json across container rebuilds
|
||||
|
||||
# ── one-off commands ──────────────────────────────────────────────────────────
|
||||
#
|
||||
# Run the interactive onboard wizard (writes config into the vecna_config volume):
|
||||
#
|
||||
# docker compose run --rm -it vecna onboard --config /config/vecna.json
|
||||
#
|
||||
# The wizard will discover the ollama service on the Docker network at
|
||||
# http://ollama:11434 (select it from the list or enter the URL manually).
|
||||
#
|
||||
# Test all configured endpoints after onboarding:
|
||||
#
|
||||
# docker compose run --rm vecna test --config /config/vecna.json
|
||||
#
|
||||
# Remove broken endpoints automatically:
|
||||
#
|
||||
# docker compose run --rm vecna test --config /config/vecna.json --remove-broken
|
||||
#
|
||||
# Open the config in a shell editor (requires the alpine image):
|
||||
#
|
||||
# docker compose run --rm -it vecna sh -c "vi /config/vecna.json"
|
||||
Reference in New Issue
Block a user