* Update README with Ollama integration details * Add Ollama configuration to example YAML files * Implement Ollama provider in AI factory * Add tests for Ollama provider functionality * Enhance config validation for Ollama settings
26 lines
639 B
Go
26 lines
639 B
Go
package ai
|
|
|
|
import (
|
|
"fmt"
|
|
"log/slog"
|
|
"net/http"
|
|
|
|
"git.warky.dev/wdevs/amcs/internal/ai/litellm"
|
|
"git.warky.dev/wdevs/amcs/internal/ai/ollama"
|
|
"git.warky.dev/wdevs/amcs/internal/ai/openrouter"
|
|
"git.warky.dev/wdevs/amcs/internal/config"
|
|
)
|
|
|
|
func NewProvider(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (Provider, error) {
|
|
switch cfg.Provider {
|
|
case "litellm":
|
|
return litellm.New(cfg, httpClient, log)
|
|
case "ollama":
|
|
return ollama.New(cfg, httpClient, log)
|
|
case "openrouter":
|
|
return openrouter.New(cfg, httpClient, log)
|
|
default:
|
|
return nil, fmt.Errorf("unsupported ai.provider: %s", cfg.Provider)
|
|
}
|
|
}
|