From 74e67526d1f49846e29fec84f2826d9fb5b3d6ab Mon Sep 17 00:00:00 2001 From: Hein Date: Fri, 27 Mar 2026 00:12:33 +0200 Subject: [PATCH] feat(config): add log_conversations option to AI metadata configuration --- configs/config.example.yaml | 1 + configs/dev.yaml | 1 + internal/ai/compat/client.go | 24 +++++++++++++++++++++++- internal/ai/litellm/client.go | 1 + internal/ai/ollama/client.go | 1 + internal/ai/openrouter/client.go | 1 + internal/config/config.go | 7 ++++--- 7 files changed, 32 insertions(+), 4 deletions(-) diff --git a/configs/config.example.yaml b/configs/config.example.yaml index 3cf4960..e29ce8b 100644 --- a/configs/config.example.yaml +++ b/configs/config.example.yaml @@ -44,6 +44,7 @@ ai: model: "gpt-4o-mini" fallback_model: "" temperature: 0.1 + log_conversations: false litellm: base_url: "http://localhost:4000/v1" api_key: "replace-me" diff --git a/configs/dev.yaml b/configs/dev.yaml index 78f3879..f665bf4 100644 --- a/configs/dev.yaml +++ b/configs/dev.yaml @@ -43,6 +43,7 @@ ai: metadata: model: "gpt-4o-mini" temperature: 0.1 + log_conversations: false litellm: base_url: "http://localhost:4000/v1" api_key: "replace-me" diff --git a/internal/ai/compat/client.go b/internal/ai/compat/client.go index c6671db..bbf257d 100644 --- a/internal/ai/compat/client.go +++ b/internal/ai/compat/client.go @@ -44,6 +44,7 @@ type Client struct { httpClient *http.Client log *slog.Logger dimensions int + logConversations bool } type Config struct { @@ -58,6 +59,7 @@ type Config struct { HTTPClient *http.Client Log *slog.Logger Dimensions int + LogConversations bool } type embeddingsRequest struct { @@ -113,6 +115,7 @@ func New(cfg Config) *Client { httpClient: cfg.HTTPClient, log: cfg.Log, dimensions: cfg.Dimensions, + logConversations: cfg.LogConversations, } } @@ -177,6 +180,15 @@ func (c *Client) extractMetadataWithModel(ctx context.Context, input, model stri }, } + if c.logConversations && c.log != nil { + c.log.Info("metadata conversation request", + slog.String("provider", c.name), + slog.String("model", model), + slog.String("system", metadataSystemPrompt), + slog.String("input", input), + ) + } + var resp chatCompletionsResponse if err := c.doJSON(ctx, "/chat/completions", req, &resp); err != nil { return thoughttypes.ThoughtMetadata{}, err @@ -188,7 +200,17 @@ func (c *Client) extractMetadataWithModel(ctx context.Context, input, model stri return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: no choices returned", c.name) } - metadataText := strings.TrimSpace(resp.Choices[0].Message.Content) + rawResponse := resp.Choices[0].Message.Content + + if c.logConversations && c.log != nil { + c.log.Info("metadata conversation response", + slog.String("provider", c.name), + slog.String("model", model), + slog.String("response", rawResponse), + ) + } + + metadataText := strings.TrimSpace(rawResponse) metadataText = stripThinkingBlocks(metadataText) metadataText = stripCodeFence(metadataText) metadataText = extractJSONObject(metadataText) diff --git a/internal/ai/litellm/client.go b/internal/ai/litellm/client.go index 88bff34..afd48a8 100644 --- a/internal/ai/litellm/client.go +++ b/internal/ai/litellm/client.go @@ -25,5 +25,6 @@ func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compa HTTPClient: httpClient, Log: log, Dimensions: cfg.Embeddings.Dimensions, + LogConversations: cfg.Metadata.LogConversations, }), nil } diff --git a/internal/ai/ollama/client.go b/internal/ai/ollama/client.go index 71443f4..c5f692a 100644 --- a/internal/ai/ollama/client.go +++ b/internal/ai/ollama/client.go @@ -21,5 +21,6 @@ func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compa HTTPClient: httpClient, Log: log, Dimensions: cfg.Embeddings.Dimensions, + LogConversations: cfg.Metadata.LogConversations, }), nil } diff --git a/internal/ai/openrouter/client.go b/internal/ai/openrouter/client.go index 0195edc..e5e94c6 100644 --- a/internal/ai/openrouter/client.go +++ b/internal/ai/openrouter/client.go @@ -32,5 +32,6 @@ func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compa HTTPClient: httpClient, Log: log, Dimensions: cfg.Embeddings.Dimensions, + LogConversations: cfg.Metadata.LogConversations, }), nil } diff --git a/internal/config/config.go b/internal/config/config.go index 345712f..d588ce1 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -84,9 +84,10 @@ type AIEmbeddingConfig struct { } type AIMetadataConfig struct { - Model string `yaml:"model"` - FallbackModel string `yaml:"fallback_model"` - Temperature float64 `yaml:"temperature"` + Model string `yaml:"model"` + FallbackModel string `yaml:"fallback_model"` + Temperature float64 `yaml:"temperature"` + LogConversations bool `yaml:"log_conversations"` } type LiteLLMConfig struct {