feat(ai): add support for Ollama AI provider configuration
* Update README with Ollama integration details * Add Ollama configuration to example YAML files * Implement Ollama provider in AI factory * Add tests for Ollama provider functionality * Enhance config validation for Ollama settings
This commit is contained in:
@@ -73,6 +73,10 @@ func defaultConfig() Config {
|
||||
Model: "gpt-4o-mini",
|
||||
Temperature: 0.1,
|
||||
},
|
||||
Ollama: OllamaConfig{
|
||||
BaseURL: "http://localhost:11434/v1",
|
||||
APIKey: "ollama",
|
||||
},
|
||||
},
|
||||
Capture: CaptureConfig{
|
||||
Source: DefaultSource,
|
||||
@@ -97,6 +101,8 @@ func applyEnvOverrides(cfg *Config) {
|
||||
overrideString(&cfg.Database.URL, "OB1_DATABASE_URL")
|
||||
overrideString(&cfg.AI.LiteLLM.BaseURL, "OB1_LITELLM_BASE_URL")
|
||||
overrideString(&cfg.AI.LiteLLM.APIKey, "OB1_LITELLM_API_KEY")
|
||||
overrideString(&cfg.AI.Ollama.BaseURL, "OB1_OLLAMA_BASE_URL")
|
||||
overrideString(&cfg.AI.Ollama.APIKey, "OB1_OLLAMA_API_KEY")
|
||||
overrideString(&cfg.AI.OpenRouter.APIKey, "OB1_OPENROUTER_API_KEY")
|
||||
|
||||
if value, ok := os.LookupEnv("OB1_SERVER_PORT"); ok {
|
||||
|
||||
Reference in New Issue
Block a user