feat(config): add log_conversations option to AI metadata configuration

This commit is contained in:
2026-03-27 00:12:33 +02:00
parent 6af02a2ba1
commit 74e67526d1
7 changed files with 32 additions and 4 deletions

View File

@@ -44,6 +44,7 @@ ai:
model: "gpt-4o-mini" model: "gpt-4o-mini"
fallback_model: "" fallback_model: ""
temperature: 0.1 temperature: 0.1
log_conversations: false
litellm: litellm:
base_url: "http://localhost:4000/v1" base_url: "http://localhost:4000/v1"
api_key: "replace-me" api_key: "replace-me"

View File

@@ -43,6 +43,7 @@ ai:
metadata: metadata:
model: "gpt-4o-mini" model: "gpt-4o-mini"
temperature: 0.1 temperature: 0.1
log_conversations: false
litellm: litellm:
base_url: "http://localhost:4000/v1" base_url: "http://localhost:4000/v1"
api_key: "replace-me" api_key: "replace-me"

View File

@@ -44,6 +44,7 @@ type Client struct {
httpClient *http.Client httpClient *http.Client
log *slog.Logger log *slog.Logger
dimensions int dimensions int
logConversations bool
} }
type Config struct { type Config struct {
@@ -58,6 +59,7 @@ type Config struct {
HTTPClient *http.Client HTTPClient *http.Client
Log *slog.Logger Log *slog.Logger
Dimensions int Dimensions int
LogConversations bool
} }
type embeddingsRequest struct { type embeddingsRequest struct {
@@ -113,6 +115,7 @@ func New(cfg Config) *Client {
httpClient: cfg.HTTPClient, httpClient: cfg.HTTPClient,
log: cfg.Log, log: cfg.Log,
dimensions: cfg.Dimensions, dimensions: cfg.Dimensions,
logConversations: cfg.LogConversations,
} }
} }
@@ -177,6 +180,15 @@ func (c *Client) extractMetadataWithModel(ctx context.Context, input, model stri
}, },
} }
if c.logConversations && c.log != nil {
c.log.Info("metadata conversation request",
slog.String("provider", c.name),
slog.String("model", model),
slog.String("system", metadataSystemPrompt),
slog.String("input", input),
)
}
var resp chatCompletionsResponse var resp chatCompletionsResponse
if err := c.doJSON(ctx, "/chat/completions", req, &resp); err != nil { if err := c.doJSON(ctx, "/chat/completions", req, &resp); err != nil {
return thoughttypes.ThoughtMetadata{}, err return thoughttypes.ThoughtMetadata{}, err
@@ -188,7 +200,17 @@ func (c *Client) extractMetadataWithModel(ctx context.Context, input, model stri
return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: no choices returned", c.name) return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: no choices returned", c.name)
} }
metadataText := strings.TrimSpace(resp.Choices[0].Message.Content) rawResponse := resp.Choices[0].Message.Content
if c.logConversations && c.log != nil {
c.log.Info("metadata conversation response",
slog.String("provider", c.name),
slog.String("model", model),
slog.String("response", rawResponse),
)
}
metadataText := strings.TrimSpace(rawResponse)
metadataText = stripThinkingBlocks(metadataText) metadataText = stripThinkingBlocks(metadataText)
metadataText = stripCodeFence(metadataText) metadataText = stripCodeFence(metadataText)
metadataText = extractJSONObject(metadataText) metadataText = extractJSONObject(metadataText)

View File

@@ -25,5 +25,6 @@ func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compa
HTTPClient: httpClient, HTTPClient: httpClient,
Log: log, Log: log,
Dimensions: cfg.Embeddings.Dimensions, Dimensions: cfg.Embeddings.Dimensions,
LogConversations: cfg.Metadata.LogConversations,
}), nil }), nil
} }

View File

@@ -21,5 +21,6 @@ func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compa
HTTPClient: httpClient, HTTPClient: httpClient,
Log: log, Log: log,
Dimensions: cfg.Embeddings.Dimensions, Dimensions: cfg.Embeddings.Dimensions,
LogConversations: cfg.Metadata.LogConversations,
}), nil }), nil
} }

View File

@@ -32,5 +32,6 @@ func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compa
HTTPClient: httpClient, HTTPClient: httpClient,
Log: log, Log: log,
Dimensions: cfg.Embeddings.Dimensions, Dimensions: cfg.Embeddings.Dimensions,
LogConversations: cfg.Metadata.LogConversations,
}), nil }), nil
} }

View File

@@ -84,9 +84,10 @@ type AIEmbeddingConfig struct {
} }
type AIMetadataConfig struct { type AIMetadataConfig struct {
Model string `yaml:"model"` Model string `yaml:"model"`
FallbackModel string `yaml:"fallback_model"` FallbackModel string `yaml:"fallback_model"`
Temperature float64 `yaml:"temperature"` Temperature float64 `yaml:"temperature"`
LogConversations bool `yaml:"log_conversations"`
} }
type LiteLLMConfig struct { type LiteLLMConfig struct {