Files
amcs/internal/config/loader.go
Hein c8ca272b03 feat(ai): add support for Ollama AI provider configuration
* Update README with Ollama integration details
* Add Ollama configuration to example YAML files
* Implement Ollama provider in AI factory
* Add tests for Ollama provider functionality
* Enhance config validation for Ollama settings
2026-03-25 12:26:31 +02:00

120 lines
2.6 KiB
Go

package config
import (
"fmt"
"os"
"strconv"
"strings"
"time"
"gopkg.in/yaml.v3"
)
func Load(explicitPath string) (*Config, string, error) {
path := ResolvePath(explicitPath)
data, err := os.ReadFile(path)
if err != nil {
return nil, path, fmt.Errorf("read config %q: %w", path, err)
}
cfg := defaultConfig()
if err := yaml.Unmarshal(data, &cfg); err != nil {
return nil, path, fmt.Errorf("decode config %q: %w", path, err)
}
applyEnvOverrides(&cfg)
if err := cfg.Validate(); err != nil {
return nil, path, err
}
return &cfg, path, nil
}
func ResolvePath(explicitPath string) string {
if strings.TrimSpace(explicitPath) != "" {
return explicitPath
}
if envPath := strings.TrimSpace(os.Getenv("OB1_CONFIG")); envPath != "" {
return envPath
}
return DefaultConfigPath
}
func defaultConfig() Config {
return Config{
Server: ServerConfig{
Host: "0.0.0.0",
Port: 8080,
ReadTimeout: 15 * time.Second,
WriteTimeout: 30 * time.Second,
IdleTimeout: 60 * time.Second,
},
MCP: MCPConfig{
Path: "/mcp",
ServerName: "amcs",
Version: "0.1.0",
Transport: "streamable_http",
},
Auth: AuthConfig{
Mode: "api_keys",
HeaderName: "x-brain-key",
QueryParam: "key",
},
AI: AIConfig{
Provider: "litellm",
Embeddings: AIEmbeddingConfig{
Model: "openai/text-embedding-3-small",
Dimensions: 1536,
},
Metadata: AIMetadataConfig{
Model: "gpt-4o-mini",
Temperature: 0.1,
},
Ollama: OllamaConfig{
BaseURL: "http://localhost:11434/v1",
APIKey: "ollama",
},
},
Capture: CaptureConfig{
Source: DefaultSource,
MetadataDefaults: CaptureMetadataDefault{
Type: "observation",
TopicFallback: "uncategorized",
},
},
Search: SearchConfig{
DefaultLimit: 10,
DefaultThreshold: 0.5,
MaxLimit: 50,
},
Logging: LoggingConfig{
Level: "info",
Format: "json",
},
}
}
func applyEnvOverrides(cfg *Config) {
overrideString(&cfg.Database.URL, "OB1_DATABASE_URL")
overrideString(&cfg.AI.LiteLLM.BaseURL, "OB1_LITELLM_BASE_URL")
overrideString(&cfg.AI.LiteLLM.APIKey, "OB1_LITELLM_API_KEY")
overrideString(&cfg.AI.Ollama.BaseURL, "OB1_OLLAMA_BASE_URL")
overrideString(&cfg.AI.Ollama.APIKey, "OB1_OLLAMA_API_KEY")
overrideString(&cfg.AI.OpenRouter.APIKey, "OB1_OPENROUTER_API_KEY")
if value, ok := os.LookupEnv("OB1_SERVER_PORT"); ok {
if port, err := strconv.Atoi(strings.TrimSpace(value)); err == nil {
cfg.Server.Port = port
}
}
}
func overrideString(target *string, envKey string) {
if value, ok := os.LookupEnv(envKey); ok {
*target = strings.TrimSpace(value)
}
}