package litellm import ( "log/slog" "net/http" "git.warky.dev/wdevs/amcs/internal/ai/compat" "git.warky.dev/wdevs/amcs/internal/config" ) func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) { fallbacks := cfg.LiteLLM.EffectiveFallbackMetadataModels() if len(fallbacks) == 0 { fallbacks = cfg.Metadata.EffectiveFallbackModels() } return compat.New(compat.Config{ Name: "litellm", BaseURL: cfg.LiteLLM.BaseURL, APIKey: cfg.LiteLLM.APIKey, EmbeddingModel: cfg.LiteLLM.EmbeddingModel, MetadataModel: cfg.LiteLLM.MetadataModel, FallbackMetadataModels: fallbacks, Temperature: cfg.Metadata.Temperature, Headers: cfg.LiteLLM.RequestHeaders, HTTPClient: httpClient, Log: log, Dimensions: cfg.Embeddings.Dimensions, LogConversations: cfg.Metadata.LogConversations, }), nil }