feat(config): update fallback model handling to support multiple models
This commit is contained in:
@@ -84,20 +84,22 @@ type AIEmbeddingConfig struct {
|
||||
}
|
||||
|
||||
type AIMetadataConfig struct {
|
||||
Model string `yaml:"model"`
|
||||
FallbackModel string `yaml:"fallback_model"`
|
||||
Temperature float64 `yaml:"temperature"`
|
||||
LogConversations bool `yaml:"log_conversations"`
|
||||
Model string `yaml:"model"`
|
||||
FallbackModels []string `yaml:"fallback_models"`
|
||||
FallbackModel string `yaml:"fallback_model"` // legacy single fallback
|
||||
Temperature float64 `yaml:"temperature"`
|
||||
LogConversations bool `yaml:"log_conversations"`
|
||||
}
|
||||
|
||||
type LiteLLMConfig struct {
|
||||
BaseURL string `yaml:"base_url"`
|
||||
APIKey string `yaml:"api_key"`
|
||||
UseResponsesAPI bool `yaml:"use_responses_api"`
|
||||
RequestHeaders map[string]string `yaml:"request_headers"`
|
||||
EmbeddingModel string `yaml:"embedding_model"`
|
||||
MetadataModel string `yaml:"metadata_model"`
|
||||
FallbackMetadataModel string `yaml:"fallback_metadata_model"`
|
||||
BaseURL string `yaml:"base_url"`
|
||||
APIKey string `yaml:"api_key"`
|
||||
UseResponsesAPI bool `yaml:"use_responses_api"`
|
||||
RequestHeaders map[string]string `yaml:"request_headers"`
|
||||
EmbeddingModel string `yaml:"embedding_model"`
|
||||
MetadataModel string `yaml:"metadata_model"`
|
||||
FallbackMetadataModels []string `yaml:"fallback_metadata_models"`
|
||||
FallbackMetadataModel string `yaml:"fallback_metadata_model"` // legacy single fallback
|
||||
}
|
||||
|
||||
type OllamaConfig struct {
|
||||
@@ -148,3 +150,45 @@ type BackfillConfig struct {
|
||||
MaxPerRun int `yaml:"max_per_run"`
|
||||
IncludeArchived bool `yaml:"include_archived"`
|
||||
}
|
||||
|
||||
func (c AIMetadataConfig) EffectiveFallbackModels() []string {
|
||||
models := make([]string, 0, len(c.FallbackModels)+1)
|
||||
for _, model := range c.FallbackModels {
|
||||
if model != "" {
|
||||
models = append(models, model)
|
||||
}
|
||||
}
|
||||
if c.FallbackModel != "" {
|
||||
models = append(models, c.FallbackModel)
|
||||
}
|
||||
return dedupeNonEmpty(models)
|
||||
}
|
||||
|
||||
func (c LiteLLMConfig) EffectiveFallbackMetadataModels() []string {
|
||||
models := make([]string, 0, len(c.FallbackMetadataModels)+1)
|
||||
for _, model := range c.FallbackMetadataModels {
|
||||
if model != "" {
|
||||
models = append(models, model)
|
||||
}
|
||||
}
|
||||
if c.FallbackMetadataModel != "" {
|
||||
models = append(models, c.FallbackMetadataModel)
|
||||
}
|
||||
return dedupeNonEmpty(models)
|
||||
}
|
||||
|
||||
func dedupeNonEmpty(values []string) []string {
|
||||
seen := make(map[string]struct{}, len(values))
|
||||
out := make([]string, 0, len(values))
|
||||
for _, value := range values {
|
||||
if value == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[value]; ok {
|
||||
continue
|
||||
}
|
||||
seen[value] = struct{}{}
|
||||
out = append(out, value)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user