feat(ai): add support for Ollama AI provider configuration
* Update README with Ollama integration details * Add Ollama configuration to example YAML files * Implement Ollama provider in AI factory * Add tests for Ollama provider functionality * Enhance config validation for Ollama settings
This commit is contained in:
@@ -28,7 +28,7 @@ func (c Config) Validate() error {
|
||||
}
|
||||
|
||||
switch c.AI.Provider {
|
||||
case "litellm", "openrouter":
|
||||
case "litellm", "ollama", "openrouter":
|
||||
default:
|
||||
return fmt.Errorf("invalid config: unsupported ai.provider %q", c.AI.Provider)
|
||||
}
|
||||
@@ -45,6 +45,13 @@ func (c Config) Validate() error {
|
||||
if strings.TrimSpace(c.AI.LiteLLM.APIKey) == "" {
|
||||
return fmt.Errorf("invalid config: ai.litellm.api_key is required when ai.provider=litellm")
|
||||
}
|
||||
case "ollama":
|
||||
if strings.TrimSpace(c.AI.Ollama.BaseURL) == "" {
|
||||
return fmt.Errorf("invalid config: ai.ollama.base_url is required when ai.provider=ollama")
|
||||
}
|
||||
if strings.TrimSpace(c.AI.Ollama.APIKey) == "" {
|
||||
return fmt.Errorf("invalid config: ai.ollama.api_key is required when ai.provider=ollama")
|
||||
}
|
||||
case "openrouter":
|
||||
if strings.TrimSpace(c.AI.OpenRouter.BaseURL) == "" {
|
||||
return fmt.Errorf("invalid config: ai.openrouter.base_url is required when ai.provider=openrouter")
|
||||
|
||||
Reference in New Issue
Block a user