feat(ai): add support for Ollama AI provider configuration
* Update README with Ollama integration details * Add Ollama configuration to example YAML files * Implement Ollama provider in AI factory * Add tests for Ollama provider functionality * Enhance config validation for Ollama settings
This commit is contained in:
@@ -6,6 +6,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"git.warky.dev/wdevs/amcs/internal/ai/litellm"
|
||||
"git.warky.dev/wdevs/amcs/internal/ai/ollama"
|
||||
"git.warky.dev/wdevs/amcs/internal/ai/openrouter"
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
)
|
||||
@@ -14,6 +15,8 @@ func NewProvider(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger)
|
||||
switch cfg.Provider {
|
||||
case "litellm":
|
||||
return litellm.New(cfg, httpClient, log)
|
||||
case "ollama":
|
||||
return ollama.New(cfg, httpClient, log)
|
||||
case "openrouter":
|
||||
return openrouter.New(cfg, httpClient, log)
|
||||
default:
|
||||
|
||||
33
internal/ai/factory_test.go
Normal file
33
internal/ai/factory_test.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package ai
|
||||
|
||||
import (
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
)
|
||||
|
||||
func TestNewProviderSupportsOllama(t *testing.T) {
|
||||
provider, err := NewProvider(config.AIConfig{
|
||||
Provider: "ollama",
|
||||
Embeddings: config.AIEmbeddingConfig{
|
||||
Model: "nomic-embed-text",
|
||||
Dimensions: 768,
|
||||
},
|
||||
Metadata: config.AIMetadataConfig{
|
||||
Model: "llama3.2",
|
||||
},
|
||||
Ollama: config.OllamaConfig{
|
||||
BaseURL: "http://localhost:11434/v1",
|
||||
APIKey: "ollama",
|
||||
},
|
||||
}, &http.Client{}, slog.New(slog.NewTextHandler(io.Discard, nil)))
|
||||
if err != nil {
|
||||
t.Fatalf("NewProvider() error = %v", err)
|
||||
}
|
||||
if provider.Name() != "ollama" {
|
||||
t.Fatalf("provider name = %q, want ollama", provider.Name())
|
||||
}
|
||||
}
|
||||
24
internal/ai/ollama/client.go
Normal file
24
internal/ai/ollama/client.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package ollama
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"git.warky.dev/wdevs/amcs/internal/ai/compat"
|
||||
"git.warky.dev/wdevs/amcs/internal/config"
|
||||
)
|
||||
|
||||
func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) {
|
||||
return compat.New(compat.Config{
|
||||
Name: "ollama",
|
||||
BaseURL: cfg.Ollama.BaseURL,
|
||||
APIKey: cfg.Ollama.APIKey,
|
||||
EmbeddingModel: cfg.Embeddings.Model,
|
||||
MetadataModel: cfg.Metadata.Model,
|
||||
Temperature: cfg.Metadata.Temperature,
|
||||
Headers: cfg.Ollama.RequestHeaders,
|
||||
HTTPClient: httpClient,
|
||||
Log: log,
|
||||
Dimensions: cfg.Embeddings.Dimensions,
|
||||
}), nil
|
||||
}
|
||||
Reference in New Issue
Block a user