feat(ai): add support for Ollama AI provider configuration
* Update README with Ollama integration details * Add Ollama configuration to example YAML files * Implement Ollama provider in AI factory * Add tests for Ollama provider functionality * Enhance config validation for Ollama settings
This commit is contained in:
@@ -6,6 +6,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"git.warky.dev/wdevs/amcs/internal/ai/litellm"
|
||||
"git.warky.dev/wdevs/amcs/internal/ai/ollama"
|
||||
"git.warky.dev/wdevs/amcs/internal/ai/openrouter"
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
)
|
||||
@@ -14,6 +15,8 @@ func NewProvider(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger)
|
||||
switch cfg.Provider {
|
||||
case "litellm":
|
||||
return litellm.New(cfg, httpClient, log)
|
||||
case "ollama":
|
||||
return ollama.New(cfg, httpClient, log)
|
||||
case "openrouter":
|
||||
return openrouter.New(cfg, httpClient, log)
|
||||
default:
|
||||
|
||||
Reference in New Issue
Block a user